From 4f6f0e03ff654081a9e9e83b4952aeaa900ca071 Mon Sep 17 00:00:00 2001 From: Rajasekar Date: Sat, 24 May 2025 22:54:49 +0530 Subject: [PATCH] Schema V2: - Added schema visualizer - Based on the model class predit all the schema detail and relationship. --- .DS_Store | Bin 8196 -> 0 bytes .github/ISSUE_TEMPLATE/bug_report.md | 38 - .github/ISSUE_TEMPLATE/feature_request.md | 20 - .github/PULL_REQUEST_TEMPLATE.md | 28 - .github/workflows/ci.yml | 19 - .github/workflows/codeql-analysis.yml | 75 - .../workflows/proto-schema-compatibility.yml | 62 - .gitignore | 45 +- CNAME | 1 - CODE_OF_CONDUCT.md | 128 - CONTRIBUTING.md | 154 - Dockerfile.connect | 34 + Dockerfile.user | 24 + LICENSE | 201 - Makefile | 31 - README.md | 523 +- SECURITY.md | 21 - _config.yml | 1 - asset/ds_classification.png | Bin 19516 -> 0 bytes asset/logo copy.png | Bin 81369 -> 0 bytes asset/logo.png | Bin 64039 -> 0 bytes asset/sample_schema.png | Bin 88228 -> 0 bytes cert.yaml | 7 + connect/build-connect-trigger.yaml | 42 + connect/k8s_connect.yaml | 179 + connect/pom.xml | 251 + .../opsbeach/connect/ConnectApplication.java | 23 + .../opsbeach/connect/config/Neo4JConfig.java | 22 + .../connect/config/SchedulerConfig.java | 38 + .../opsbeach/connect/core/App2AppConfig.java | 37 + .../com/opsbeach/connect/core/BaseDto.java | 37 + .../com/opsbeach/connect/core/BaseModel.java | 58 + .../opsbeach/connect/core/BaseRepository.java | 19 + .../connect/core/BaseRepositoryImpl.java | 110 + .../opsbeach/connect/core/enums/AuthType.java | 19 + .../connect/core/enums/ServiceType.java | 28 + .../opsbeach/connect/core/enums/TaskType.java | 26 + .../core/hibernate/HibernateConfig.java | 41 + .../core/specification/IdSpecifications.java | 236 + .../connect/core/utils/Constants.java | 114 + .../connect/core/utils/TaskUtils.java | 40 + .../github/controller/ActivityController.java | 40 + .../controller/ClientRepoController.java | 70 + .../github/controller/CommentController.java | 40 + .../github/controller/DomainController.java | 33 + .../controller/EventAuditController.java | 41 + .../github/controller/GitHubController.java | 72 + .../github/controller/ModelController.java | 35 + .../controller/PullRequestController.java | 48 + .../controller/SchemaFileAuditController.java | 27 + .../github/controller/WorkflowController.java | 48 + .../connect/github/dto/ActivityDto.java | 29 + .../github/dto/AutoCompleteModelDto.java | 10 + .../connect/github/dto/ClientRepoDto.java | 34 + .../connect/github/dto/CommentDto.java | 32 + .../connect/github/dto/DashboardDto.java | 22 + .../connect/github/dto/DomainDto.java | 28 + .../connect/github/dto/EventAuditDto.java | 33 + .../connect/github/dto/GitHubDto.java | 36 + .../connect/github/dto/GithubActionDto.java | 19 + .../opsbeach/connect/github/dto/ModelDto.java | 40 + .../connect/github/dto/PullRequestDto.java | 40 + .../github/dto/SchemaFileAuditDto.java | 31 + .../connect/github/dto/WorkflowDto.java | 90 + .../connect/github/entity/Activity.java | 56 + .../connect/github/entity/ClientRepo.java | 107 + .../connect/github/entity/Comment.java | 46 + .../connect/github/entity/Domain.java | 28 + .../connect/github/entity/EventAudit.java | 75 + .../opsbeach/connect/github/entity/Model.java | 52 + .../connect/github/entity/PullRequest.java | 69 + .../github/entity/SchemaFileAudit.java | 43 + .../connect/github/entity/Workflow.java | 67 + .../github/repository/ActivityRepository.java | 8 + .../repository/ClientRepoRepository.java | 8 + .../github/repository/CommentRepository.java | 8 + .../github/repository/DomainRepository.java | 9 + .../repository/EventAuditRepository.java | 8 + .../github/repository/ModelRepository.java | 23 + .../repository/PullRequestRepository.java | 8 + .../repository/SchemaFileAuditRepository.java | 20 + .../github/repository/WorkflowRepository.java | 8 + .../github/service/ActivityService.java | 51 + .../github/service/ClientRepoService.java | 368 ++ .../github/service/CommentService.java | 48 + .../connect/github/service/DomainService.java | 75 + .../github/service/EventAuditService.java | 131 + .../connect/github/service/GitHubService.java | 656 +++ .../connect/github/service/ModelService.java | 208 + .../github/service/PullRequestService.java | 229 + .../service/SchemaFileAuditService.java | 395 ++ .../github/service/WorkflowService.java | 467 ++ .../metrics/controller/SlaController.java | 52 + .../opsbeach/connect/metrics/dto/SlaDto.java | 30 + .../opsbeach/connect/metrics/entity/Sla.java | 38 + .../metrics/repository/SlaRepository.java | 8 + .../connect/metrics/service/SlaService.java | 77 + .../scheduler/SchedulerTaskService.java | 69 + .../connect/scheduler/SyncScheduler.java | 40 + .../controller/DomainNodeController.java | 39 + .../controller/OrganizationController.java | 44 + .../schemata/controller/TableController.java | 83 + .../connect/schemata/dto/FieldDto.java | 100 + .../connect/schemata/dto/RedshiftDto.java | 109 + .../schemata/dto/SchemaValidationDto.java | 17 + .../schemata/dto/SchemaVisualizerDto.java | 32 + .../connect/schemata/dto/TableCsvDto.java | 28 + .../connect/schemata/dto/TableDto.java | 123 + .../schemata/dto/TableFilterOptionsDto.java | 8 + .../connect/schemata/entity/DomainNode.java | 38 + .../connect/schemata/entity/Field.java | 179 + .../connect/schemata/entity/Organization.java | 33 + .../connect/schemata/entity/Table.java | 167 + .../connect/schemata/enums}/EventType.java | 2 +- .../connect/schemata/enums}/ModelType.java | 2 +- .../connect/schemata/enums}/SchemaType.java | 2 +- .../connect/schemata/graph/SchemaGraph.java | 244 + .../schemata/graph/WeightedSchemaEdge.java | 23 +- .../processor/SchemaFileProcessor.java | 17 + .../schemata/processor/avro/AvroSchema.java | 133 + .../processor/avro/AvroSchemaGenerator.java | 172 + .../processor/avro/AvroSchemaParser.java | 262 + .../schemata/processor/json/JsonSchema.java | 146 + .../processor/json/JsonSchemaGenerator.java | 154 + .../processor/json/JsonSchemaParser.java | 322 ++ .../schemata/processor/protobuf/Loader.java | 16 + .../ProtoFileDescriptorSetLoader.java | 63 +- .../processor/protobuf/ProtoProcessor.java | 122 + .../processor/protobuf/ProtoSchema.java | 215 + .../protobuf/ProtobufFileGenerator.java | 173 + .../repository/DomainNodeRepository.java | 18 + .../schemata/repository/FieldRepostory.java | 17 + .../repository/OrganizationRepository.java | 10 + .../schemata/repository/SchemaRepository.java | 10 + .../repository/SchemaRepositoryImpl.java | 33 + .../schemata/repository/TableRepository.java | 55 + .../schemata/service/DomainNodeService.java | 78 + .../schemata/service/OrganizationService.java | 56 + .../schemata/service/RedshiftService.java | 81 + .../schemata/service/TableService.java | 848 +++ .../schemata/validate/FieldTrigger.java | 14 + .../schemata/validate/FieldValidator.java | 32 + .../connect/schemata/validate/Result.java | 7 + .../connect}/schemata/validate/Rules.java | 3 +- .../schemata/validate/SchemaTrigger.java | 26 + .../schemata/validate/SchemaValidator.java | 460 ++ .../connect/schemata/validate/Status.java | 5 + .../connect}/schemata/validate/Validator.java | 2 +- .../task/controller/ConnectController.java | 77 + .../task/controller/TaskController.java | 43 + .../opsbeach/connect/task/dto/ConnectDto.java | 72 + .../opsbeach/connect/task/dto/TaskDto.java | 55 + .../opsbeach/connect/task/entity/Connect.java | 102 + .../opsbeach/connect/task/entity/Task.java | 55 + .../task/repository/ConnectRepository.java | 12 + .../task/repository/TaskRepository.java | 7 + .../connect/task/service/ConnectService.java | 276 + .../connect/task/service/TaskService.java | 102 + .../src/main/resources/application-url.yml | 52 + connect/src/main/resources/application.yaml | 80 + .../main/resources/db/migration/V1__Init.sql | 261 + .../schemata/protobuf/schemata.proto | 590 ++ .../github/service/ActivityServiceTest.java | 90 + .../github/service/ClientRepoServiceTest.java | 361 ++ .../github/service/CommentServiceTest.java | 85 + .../github/service/DomainServiceTest.java | 111 + .../github/service/EventAuditServiceTest.java | 167 + .../github/service/GitHubServiceTest.java | 551 ++ .../github/service/ModelServiceTest.java | 294 + .../service/PullRequestServiceTest.java | 256 + .../service/SchemaFileAuditServiceTest.java | 415 ++ .../github/service/WorkflowServiceTest.java | 435 ++ .../metrics/service/SlaServiceTest.java | 113 + .../scheduler/SchedulerTaskServiceTest.java | 95 + .../connect/scheduler/SyncSchedulerTest.java | 52 + .../schemata/graph/SchemaGraphTest.java | 111 + .../processor/avro/AvroSchemaTest.java | 147 + .../processor/json/JsonSchemaTest.java | 156 + .../processor/protobuf/ProtoSchemaTest.java | 182 + .../service/DomainNodeServiceTest.java | 158 + .../service/OrganizationServiceTest.java | 83 + .../schemata/service/RedshiftServiceTest.java | 67 + .../schemata/service/TableServiceTest.java | 563 ++ .../task/service/ConnectServiceTest.java | 278 + .../connect/task/service/TaskServiceTest.java | 166 + .../src/test/resources/application-test.yaml | 60 + .../src/test/resources/github-response.json | 28 + .../resources/schema_1/avro/avro_testing.avsc | 954 ++++ .../test/resources/schema_1/csv/sample.csv | 8 + .../schema_1/descriptors/entities.desc | Bin 0 -> 14021 bytes .../test/resources/schema_1/json/product.json | 86 + .../schema_1/json/productListFiltered.json | 90 + .../src/main/schema/Brand/brand.desc | Bin 0 -> 90269 bytes .../src/main/schema/Brand/brand.proto | 48 + .../src/main/schema/Product/product.desc | Bin 0 -> 95310 bytes .../src/main/schema/Product/product.proto | 59 + .../repository/src/main/schema/campaign.desc | Bin 120154 -> 106537 bytes .../repository/src/main/schema/campaign.proto | 105 + .../repository/src/main/schema/category.desc | Bin 0 -> 90274 bytes .../repository/src/main/schema/category.proto | 44 + .../repository/src/main/schema/user.desc | Bin 119863 -> 108012 bytes .../repository/src/main/schema/user.proto | 141 + .../src}/org/schemata/protobuf/schemata.proto | 21 +- .../test/resources/schema_2/roughFile.json | 3 + connect/test-connect-trigger.yaml | 10 + document.sh | 2 - homecert.yaml | 17 + https-redirect-config.yaml | 9 + install.sh | 21 - pom.xml | 578 +- schemata-api-ingress.yaml | 44 + schemata-web-ingress.yaml | 19 + score.sh | 2 - shared-lib/pom.xml | 123 + .../sharedlib/SharedLibApplication.java | 14 + .../sharedlib/controller/CacheController.java | 30 + .../dto/AuthenticationResponseDto.java | 39 + .../com/opsbeach/sharedlib/dto/ClientDto.java | 24 + .../sharedlib/dto/ExceptionDetailDto.java | 26 + .../sharedlib/dto/GenericResponseDto.java | 21 + .../com/opsbeach/sharedlib/dto/JweDto.java | 41 + .../com/opsbeach/sharedlib/dto/JwtDto.java | 37 + .../opsbeach/sharedlib/dto/KeyStoreDto.java | 25 + .../com/opsbeach/sharedlib/dto/LoginDto.java | 26 + .../opsbeach/sharedlib/dto/PermissionDto.java | 25 + .../sharedlib/dto/RefreshTokenDto.java | 16 + .../sharedlib/dto/RegisterClientDto.java | 20 + .../sharedlib/dto/RegistrationDto.java | 31 + .../com/opsbeach/sharedlib/dto/RoleDto.java | 28 + .../opsbeach/sharedlib/dto/SessionDto.java | 28 + .../com/opsbeach/sharedlib/dto/UserDto.java | 71 + .../exception/AlreadyExistException.java | 8 + .../exception/BadRequestException.java | 8 + .../exception/BaseExceptionHandler.java | 185 + .../exception/ClientEmptyException.java | 8 + .../exception/CompletableFutureException.java | 8 + .../sharedlib/exception/EncodeException.java | 8 + .../exception/EnumValidationException.java | 32 + .../sharedlib/exception/ErrorCode.java | 94 + .../sharedlib/exception/ErrorSource.java | 131 + .../sharedlib/exception/ExceptionDetail.java | 140 + .../exception/ExceptionResolver.java | 29 + .../exception/ExceptionResponseCreator.java | 91 + .../exception/FileNotFoundException.java | 8 + .../exception/GoogleCloudException.java | 8 + .../exception/InvalidDataException.java | 13 + .../exception/LoggedOutException.java | 8 + .../exception/PreConditionException.java | 8 + .../exception/RecordNotFoundException.java | 8 + .../exception/SchemaNotFoundException.java | 24 + .../exception/SchemaParserException.java | 11 + .../exception/ServicesException.java | 27 + .../exception/UnAuthorizedException.java | 29 + .../exception/UserExistException.java | 8 + .../sharedlib/repository/CacheRepository.java | 82 + .../sharedlib/response/ResponseMessage.java | 60 + .../sharedlib/response/SuccessCode.java | 21 + .../sharedlib/response/SuccessMessage.java | 34 + .../sharedlib/response/SuccessResponse.java | 41 + .../sharedlib/security/ApplicationConfig.java | 30 + .../security/AuthenticationProvider.java | 133 + .../security/AuthenticationToken.java | 17 + .../security/BaseAuthentication.java | 61 + .../sharedlib/security/CacheBeanConfig.java | 34 + .../sharedlib/security/EmailConfig.java | 35 + .../com/opsbeach/sharedlib/security/HMac.java | 41 + .../sharedlib/security/JweLibrary.java | 38 + .../security/JwtAuthenticationFilter.java | 157 + .../sharedlib/security/RSAKeyGen.java | 72 + .../sharedlib/security/RSAMechanism.java | 66 + .../security/SecurityConfiguration.java | 119 + .../sharedlib/security/SecurityUtil.java | 90 + .../sharedlib/service/App2AppService.java | 182 + .../sharedlib/service/AuthService.java | 52 + .../sharedlib/service/CacheService.java | 37 + .../sharedlib/service/EmailService.java | 23 + .../sharedlib/service/GoogleCloudService.java | 108 + .../sharedlib/service/JwtTokenService.java | 39 + .../sharedlib/service/SecretService.java | 19 + .../opsbeach/sharedlib/utils/Base62Util.java | 17 + .../opsbeach/sharedlib/utils/CacheUtil.java | 247 + .../opsbeach/sharedlib/utils/Constants.java | 74 + .../opsbeach/sharedlib/utils/DateUtil.java | 247 + .../opsbeach/sharedlib/utils/FileUtil.java | 221 + .../opsbeach/sharedlib/utils/FutureUtil.java | 51 + .../opsbeach/sharedlib/utils/JsonUtil.java | 115 + .../sharedlib/utils/OnboardStatus.java | 47 + .../com/opsbeach/sharedlib/utils/Status.java | 36 + .../opsbeach/sharedlib/utils/StringUtil.java | 92 + .../utils/YamlPropertySourceFactory.java | 55 + .../main/resources/application-messages.yaml | 88 + .../src/main/resources/application.yaml | 22 + .../src/main/resources/private_pkcs8.pem | 28 + shared-lib/src/main/resources/public.pem | 9 + .../java/org/schemata/SchemataExecutor.java | 92 - src/main/java/org/schemata/SchemataMain.java | 16 - .../java/org/schemata/app/DocumentApp.java | 27 - .../java/org/schemata/app/SchemaScoreApp.java | 40 - .../org/schemata/app/SchemaValidatorApp.java | 46 - .../org/schemata/compatibility/Result.java | 7 - .../SchemaCompatibilityChecker.java | 7 - .../org/schemata/compatibility/Summary.java | 40 - .../java/org/schemata/domain/Constraints.java | 34 - .../java/org/schemata/domain/Depends.java | 4 - src/main/java/org/schemata/domain/Field.java | 110 - src/main/java/org/schemata/domain/Link.java | 4 - src/main/java/org/schemata/domain/Schema.java | 157 - .../java/org/schemata/domain/Subscribers.java | 4 - .../exception/SchemaNotFoundException.java | 23 - .../exception/SchemaParserException.java | 11 - .../java/org/schemata/graph/SchemaGraph.java | 176 - src/main/java/org/schemata/json/Json.java | 43 - .../java/org/schemata/printer/Console.java | 16 - .../org/schemata/provider/SchemaParser.java | 10 - .../avro/AvroSchemaCompatibilityChecker.java | 14 - .../provider/avro/AvroSchemaParser.java | 123 - .../provider/dbt/DbtCatalogMetadata.java | 19 - .../provider/dbt/DbtCatalogParser.java | 108 - .../provider/dbt/DbtManifestParser.java | 159 - .../dbt/DbtSchemaCompatibilityChecker.java | 90 - .../provider/dbt/DbtSchemaParser.java | 24 - .../schemata/provider/protobuf/Loader.java | 13 - .../provider/protobuf/ProtoProcessor.java | 163 - .../ProtoSchemaCompatibilityChecker.java | 120 - .../provider/protobuf/ProtoSchemaParser.java | 34 - .../org/schemata/validate/FieldTrigger.java | 13 - .../org/schemata/validate/FieldValidator.java | 29 - .../java/org/schemata/validate/Result.java | 7 - .../org/schemata/validate/SchemaTrigger.java | 20 - .../schemata/validate/SchemaValidator.java | 30 - .../java/org/schemata/validate/Status.java | 5 - src/main/resources/avro/brand.avsc | 9 - src/main/resources/logback.xml | 24 - src/main/resources/schema/brand.proto | 49 - src/main/resources/schema/campaign.proto | 90 - src/main/resources/schema/category.proto | 49 - src/main/resources/schema/product.proto | 57 - src/main/resources/schema/user.proto | 161 - .../org/schemata/protobuf/constraints.proto | 31 - .../org/schemata/protobuf/subscribers.proto | 32 - .../java/org/schemata/ResourceLoader.java | 56 - .../org/schemata/SchemataExecutorTest.java | 49 - .../org/schemata/graph/SchemaGraphTest.java | 125 - .../provider/avro/AvroSchemaParserTest.java | 66 - .../provider/dbt/DbtCatalogParserTest.java | 68 - .../provider/dbt/DbtManifestParserTest.java | 64 - .../DbtSchemaCompatibilityCheckerTest.java | 23 - .../provider/dbt/DbtSchemaParserTest.java | 14 - .../provider/protobuf/ProtoProcessorTest.java | 97 - .../ProtoSchemaCompatibilityCheckerTest.java | 21 - .../schemata/validate/FieldValidatorTest.java | 46 - .../java/org/schemata/validate/RulesTest.java | 14 - .../validate/SchemaValidatorTest.java | 66 - src/test/resources/avro_schema/brand.avsc | 35 - src/test/resources/dbt/catalog.json | 470 -- src/test/resources/dbt/manifest.json | 4889 ----------------- src/test/resources/dbt_change/catalog.json | 470 -- src/test/resources/dbt_change/manifest.json | 4889 ----------------- src/test/resources/schema/entities.proto | 35 - user/build-user-trigger.yaml | 35 + user/k8s_user.yaml | 104 + user/pom.xml | 175 + .../com/opsbeach/user/UserApplication.java | 23 + .../com/opsbeach/user/base/App2AppConfig.java | 36 + .../java/com/opsbeach/user/base/BaseDto.java | 34 + .../com/opsbeach/user/base/BaseMapper.java | 15 + .../com/opsbeach/user/base/BaseModel.java | 53 + .../opsbeach/user/base/BaseRepository.java | 19 + .../user/base/BaseRepositoryImpl.java | 65 + .../com/opsbeach/user/base/BaseService.java | 141 + .../com/opsbeach/user/base/Constants.java | 43 + .../user/base/hibernate/HibernateConfig.java | 41 + .../base/specification/IdSpecifications.java | 97 + .../controller/AuthenticationController.java | 122 + .../user/controller/ClientController.java | 63 + .../user/controller/JwtController.java | 63 + .../user/controller/SessionController.java | 35 + .../user/controller/UserController.java | 57 + .../java/com/opsbeach/user/dto/ClientDto.java | 24 + .../com/opsbeach/user/dto/UserDetailDto.java | 47 + .../java/com/opsbeach/user/dto/UserDto.java | 31 + .../java/com/opsbeach/user/entity/Client.java | 31 + .../java/com/opsbeach/user/entity/Jwt.java | 33 + .../com/opsbeach/user/entity/Permission.java | 26 + .../java/com/opsbeach/user/entity/Role.java | 27 + .../opsbeach/user/entity/RolePermission.java | 30 + .../com/opsbeach/user/entity/Session.java | 36 + .../java/com/opsbeach/user/entity/User.java | 79 + .../com/opsbeach/user/entity/UserRole.java | 30 + .../opsbeach/user/mapper/ClientMapper.java | 35 + .../com/opsbeach/user/mapper/JwtMapper.java | 30 + .../opsbeach/user/mapper/SessionMapper.java | 32 + .../com/opsbeach/user/mapper/UserMapper.java | 75 + .../user/repository/ClientRepository.java | 16 + .../user/repository/JwtRespository.java | 10 + .../user/repository/PermissionRepository.java | 14 + .../repository/RolePermissionRepository.java | 14 + .../user/repository/RoleRepository.java | 14 + .../user/repository/SessionRepository.java | 10 + .../user/repository/UserRepository.java | 15 + .../user/repository/UserRoleRepository.java | 14 + .../user/service/AuthenticationService.java | 299 + .../opsbeach/user/service/ClientService.java | 94 + .../com/opsbeach/user/service/JwtService.java | 73 + .../user/service/RolePermissionService.java | 74 + .../opsbeach/user/service/RoleService.java | 130 + .../opsbeach/user/service/SessionService.java | 35 + .../user/service/UserRoleService.java | 19 + .../opsbeach/user/service/UserService.java | 362 ++ .../com/opsbeach/user/utils/Constants.java | 16 + user/src/main/resources/application-url.yml | 6 + user/src/main/resources/application.yml | 38 + .../main/resources/db/migration/V1__Init.sql | 149 + user/test-user-trigger.yaml | 10 + validate.sh | 2 - 415 files changed, 27216 insertions(+), 15388 deletions(-) delete mode 100644 .DS_Store delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 .github/PULL_REQUEST_TEMPLATE.md delete mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/codeql-analysis.yml delete mode 100644 .github/workflows/proto-schema-compatibility.yml delete mode 100644 CNAME delete mode 100644 CODE_OF_CONDUCT.md delete mode 100644 CONTRIBUTING.md create mode 100644 Dockerfile.connect create mode 100644 Dockerfile.user delete mode 100644 LICENSE delete mode 100644 Makefile delete mode 100644 SECURITY.md delete mode 100644 _config.yml delete mode 100644 asset/ds_classification.png delete mode 100644 asset/logo copy.png delete mode 100644 asset/logo.png delete mode 100644 asset/sample_schema.png create mode 100644 cert.yaml create mode 100644 connect/build-connect-trigger.yaml create mode 100644 connect/k8s_connect.yaml create mode 100644 connect/pom.xml create mode 100644 connect/src/main/java/com/opsbeach/connect/ConnectApplication.java create mode 100644 connect/src/main/java/com/opsbeach/connect/config/Neo4JConfig.java create mode 100644 connect/src/main/java/com/opsbeach/connect/config/SchedulerConfig.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/App2AppConfig.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/BaseDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/BaseModel.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/BaseRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/BaseRepositoryImpl.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/enums/AuthType.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/enums/ServiceType.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/enums/TaskType.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/hibernate/HibernateConfig.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/specification/IdSpecifications.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/utils/Constants.java create mode 100644 connect/src/main/java/com/opsbeach/connect/core/utils/TaskUtils.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/ActivityController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/ClientRepoController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/CommentController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/DomainController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/EventAuditController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/GitHubController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/ModelController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/PullRequestController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/SchemaFileAuditController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/controller/WorkflowController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/ActivityDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/AutoCompleteModelDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/ClientRepoDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/CommentDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/DashboardDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/DomainDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/EventAuditDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/GitHubDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/GithubActionDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/ModelDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/PullRequestDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/SchemaFileAuditDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/dto/WorkflowDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/Activity.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/ClientRepo.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/Comment.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/Domain.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/EventAudit.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/Model.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/PullRequest.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/SchemaFileAudit.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/entity/Workflow.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/ActivityRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/ClientRepoRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/CommentRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/DomainRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/EventAuditRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/ModelRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/PullRequestRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/SchemaFileAuditRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/repository/WorkflowRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/ActivityService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/ClientRepoService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/CommentService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/DomainService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/EventAuditService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/GitHubService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/ModelService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/PullRequestService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/SchemaFileAuditService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/github/service/WorkflowService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/metrics/controller/SlaController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/metrics/dto/SlaDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/metrics/entity/Sla.java create mode 100644 connect/src/main/java/com/opsbeach/connect/metrics/repository/SlaRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/metrics/service/SlaService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/scheduler/SchedulerTaskService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/scheduler/SyncScheduler.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/controller/DomainNodeController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/controller/OrganizationController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/controller/TableController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/FieldDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/RedshiftDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaValidationDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaVisualizerDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/TableCsvDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/TableDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/dto/TableFilterOptionsDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/entity/DomainNode.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/entity/Field.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/entity/Organization.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/entity/Table.java rename {src/main/java/org/schemata/domain => connect/src/main/java/com/opsbeach/connect/schemata/enums}/EventType.java (86%) rename {src/main/java/org/schemata/domain => connect/src/main/java/com/opsbeach/connect/schemata/enums}/ModelType.java (85%) rename {src/main/java/org/schemata/domain => connect/src/main/java/com/opsbeach/connect/schemata/enums}/SchemaType.java (86%) create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/graph/SchemaGraph.java rename {src/main/java/org => connect/src/main/java/com/opsbeach/connect}/schemata/graph/WeightedSchemaEdge.java (71%) create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/SchemaFileProcessor.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchema.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaGenerator.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaParser.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchema.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaGenerator.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaParser.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/Loader.java rename {src/main/java/org/schemata/provider => connect/src/main/java/com/opsbeach/connect/schemata/processor}/protobuf/ProtoFileDescriptorSetLoader.java (67%) create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoProcessor.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchema.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtobufFileGenerator.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/repository/DomainNodeRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/repository/FieldRepostory.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/repository/OrganizationRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepositoryImpl.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/repository/TableRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/service/DomainNodeService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/service/OrganizationService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/service/RedshiftService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/service/TableService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldTrigger.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldValidator.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/validate/Result.java rename {src/main/java/org => connect/src/main/java/com/opsbeach/connect}/schemata/validate/Rules.java (80%) create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaTrigger.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaValidator.java create mode 100644 connect/src/main/java/com/opsbeach/connect/schemata/validate/Status.java rename {src/main/java/org => connect/src/main/java/com/opsbeach/connect}/schemata/validate/Validator.java (84%) create mode 100644 connect/src/main/java/com/opsbeach/connect/task/controller/ConnectController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/controller/TaskController.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/dto/ConnectDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/dto/TaskDto.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/entity/Connect.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/entity/Task.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/repository/ConnectRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/repository/TaskRepository.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/service/ConnectService.java create mode 100644 connect/src/main/java/com/opsbeach/connect/task/service/TaskService.java create mode 100644 connect/src/main/resources/application-url.yml create mode 100644 connect/src/main/resources/application.yaml create mode 100644 connect/src/main/resources/db/migration/V1__Init.sql create mode 100644 connect/src/main/resources/schemata/protobuf/schemata.proto create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/ActivityServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/ClientRepoServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/CommentServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/DomainServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/EventAuditServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/GitHubServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/ModelServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/PullRequestServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/SchemaFileAuditServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/github/service/WorkflowServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/metrics/service/SlaServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/scheduler/SchedulerTaskServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/scheduler/SyncSchedulerTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/graph/SchemaGraphTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchemaTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/service/DomainNodeServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/service/OrganizationServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/service/RedshiftServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/schemata/service/TableServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/task/service/ConnectServiceTest.java create mode 100644 connect/src/test/java/com/opsbeach/connect/task/service/TaskServiceTest.java create mode 100644 connect/src/test/resources/application-test.yaml create mode 100644 connect/src/test/resources/github-response.json create mode 100644 connect/src/test/resources/schema_1/avro/avro_testing.avsc create mode 100644 connect/src/test/resources/schema_1/csv/sample.csv create mode 100644 connect/src/test/resources/schema_1/descriptors/entities.desc create mode 100644 connect/src/test/resources/schema_1/json/product.json create mode 100644 connect/src/test/resources/schema_1/json/productListFiltered.json create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.desc create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.proto create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Product/product.desc create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Product/product.proto rename src/test/resources/descriptors/changed_model.desc => connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/campaign.desc (55%) create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/campaign.proto create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/category.desc create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/category.proto rename src/test/resources/descriptors/model.desc => connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/user.desc (55%) create mode 100644 connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/user.proto rename {src/opencontract/v1 => connect/src/test/resources/schema_1/protobuf/repository/src}/org/schemata/protobuf/schemata.proto (98%) create mode 100644 connect/src/test/resources/schema_2/roughFile.json create mode 100644 connect/test-connect-trigger.yaml delete mode 100755 document.sh create mode 100644 homecert.yaml create mode 100644 https-redirect-config.yaml delete mode 100644 install.sh create mode 100644 schemata-api-ingress.yaml create mode 100644 schemata-web-ingress.yaml delete mode 100755 score.sh create mode 100644 shared-lib/pom.xml create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/SharedLibApplication.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/controller/CacheController.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/AuthenticationResponseDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ClientDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ExceptionDetailDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/GenericResponseDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JweDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JwtDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/KeyStoreDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/LoginDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/PermissionDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RefreshTokenDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegisterClientDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegistrationDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RoleDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/SessionDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/dto/UserDto.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/AlreadyExistException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BadRequestException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BaseExceptionHandler.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ClientEmptyException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/CompletableFutureException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EncodeException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EnumValidationException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorCode.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorSource.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionDetail.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResolver.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResponseCreator.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/FileNotFoundException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/GoogleCloudException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/InvalidDataException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/LoggedOutException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/PreConditionException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/RecordNotFoundException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaNotFoundException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaParserException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ServicesException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UnAuthorizedException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UserExistException.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/repository/CacheRepository.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/response/ResponseMessage.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessCode.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessMessage.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessResponse.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/ApplicationConfig.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationProvider.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationToken.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/BaseAuthentication.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/CacheBeanConfig.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/EmailConfig.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/HMac.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/JweLibrary.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/JwtAuthenticationFilter.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAKeyGen.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAMechanism.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityConfiguration.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/App2AppService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/AuthService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/CacheService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/EmailService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/GoogleCloudService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/JwtTokenService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/service/SecretService.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Base62Util.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/CacheUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Constants.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/DateUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FileUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FutureUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/JsonUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/OnboardStatus.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Status.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/StringUtil.java create mode 100644 shared-lib/src/main/java/com/opsbeach/sharedlib/utils/YamlPropertySourceFactory.java create mode 100644 shared-lib/src/main/resources/application-messages.yaml create mode 100644 shared-lib/src/main/resources/application.yaml create mode 100644 shared-lib/src/main/resources/private_pkcs8.pem create mode 100644 shared-lib/src/main/resources/public.pem delete mode 100644 src/main/java/org/schemata/SchemataExecutor.java delete mode 100644 src/main/java/org/schemata/SchemataMain.java delete mode 100644 src/main/java/org/schemata/app/DocumentApp.java delete mode 100644 src/main/java/org/schemata/app/SchemaScoreApp.java delete mode 100644 src/main/java/org/schemata/app/SchemaValidatorApp.java delete mode 100644 src/main/java/org/schemata/compatibility/Result.java delete mode 100644 src/main/java/org/schemata/compatibility/SchemaCompatibilityChecker.java delete mode 100644 src/main/java/org/schemata/compatibility/Summary.java delete mode 100644 src/main/java/org/schemata/domain/Constraints.java delete mode 100644 src/main/java/org/schemata/domain/Depends.java delete mode 100644 src/main/java/org/schemata/domain/Field.java delete mode 100644 src/main/java/org/schemata/domain/Link.java delete mode 100644 src/main/java/org/schemata/domain/Schema.java delete mode 100644 src/main/java/org/schemata/domain/Subscribers.java delete mode 100644 src/main/java/org/schemata/exception/SchemaNotFoundException.java delete mode 100644 src/main/java/org/schemata/exception/SchemaParserException.java delete mode 100644 src/main/java/org/schemata/graph/SchemaGraph.java delete mode 100644 src/main/java/org/schemata/json/Json.java delete mode 100644 src/main/java/org/schemata/printer/Console.java delete mode 100644 src/main/java/org/schemata/provider/SchemaParser.java delete mode 100644 src/main/java/org/schemata/provider/avro/AvroSchemaCompatibilityChecker.java delete mode 100644 src/main/java/org/schemata/provider/avro/AvroSchemaParser.java delete mode 100644 src/main/java/org/schemata/provider/dbt/DbtCatalogMetadata.java delete mode 100644 src/main/java/org/schemata/provider/dbt/DbtCatalogParser.java delete mode 100644 src/main/java/org/schemata/provider/dbt/DbtManifestParser.java delete mode 100644 src/main/java/org/schemata/provider/dbt/DbtSchemaCompatibilityChecker.java delete mode 100644 src/main/java/org/schemata/provider/dbt/DbtSchemaParser.java delete mode 100644 src/main/java/org/schemata/provider/protobuf/Loader.java delete mode 100644 src/main/java/org/schemata/provider/protobuf/ProtoProcessor.java delete mode 100644 src/main/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityChecker.java delete mode 100644 src/main/java/org/schemata/provider/protobuf/ProtoSchemaParser.java delete mode 100644 src/main/java/org/schemata/validate/FieldTrigger.java delete mode 100644 src/main/java/org/schemata/validate/FieldValidator.java delete mode 100644 src/main/java/org/schemata/validate/Result.java delete mode 100644 src/main/java/org/schemata/validate/SchemaTrigger.java delete mode 100644 src/main/java/org/schemata/validate/SchemaValidator.java delete mode 100644 src/main/java/org/schemata/validate/Status.java delete mode 100644 src/main/resources/avro/brand.avsc delete mode 100644 src/main/resources/logback.xml delete mode 100644 src/main/resources/schema/brand.proto delete mode 100644 src/main/resources/schema/campaign.proto delete mode 100644 src/main/resources/schema/category.proto delete mode 100644 src/main/resources/schema/product.proto delete mode 100644 src/main/resources/schema/user.proto delete mode 100644 src/opencontract/v1/org/schemata/protobuf/constraints.proto delete mode 100644 src/opencontract/v1/org/schemata/protobuf/subscribers.proto delete mode 100644 src/test/java/org/schemata/ResourceLoader.java delete mode 100644 src/test/java/org/schemata/SchemataExecutorTest.java delete mode 100644 src/test/java/org/schemata/graph/SchemaGraphTest.java delete mode 100644 src/test/java/org/schemata/provider/avro/AvroSchemaParserTest.java delete mode 100644 src/test/java/org/schemata/provider/dbt/DbtCatalogParserTest.java delete mode 100644 src/test/java/org/schemata/provider/dbt/DbtManifestParserTest.java delete mode 100644 src/test/java/org/schemata/provider/dbt/DbtSchemaCompatibilityCheckerTest.java delete mode 100644 src/test/java/org/schemata/provider/dbt/DbtSchemaParserTest.java delete mode 100644 src/test/java/org/schemata/provider/protobuf/ProtoProcessorTest.java delete mode 100644 src/test/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityCheckerTest.java delete mode 100644 src/test/java/org/schemata/validate/FieldValidatorTest.java delete mode 100644 src/test/java/org/schemata/validate/RulesTest.java delete mode 100644 src/test/java/org/schemata/validate/SchemaValidatorTest.java delete mode 100644 src/test/resources/avro_schema/brand.avsc delete mode 100644 src/test/resources/dbt/catalog.json delete mode 100644 src/test/resources/dbt/manifest.json delete mode 100644 src/test/resources/dbt_change/catalog.json delete mode 100644 src/test/resources/dbt_change/manifest.json delete mode 100644 src/test/resources/schema/entities.proto create mode 100644 user/build-user-trigger.yaml create mode 100644 user/k8s_user.yaml create mode 100644 user/pom.xml create mode 100644 user/src/main/java/com/opsbeach/user/UserApplication.java create mode 100644 user/src/main/java/com/opsbeach/user/base/App2AppConfig.java create mode 100644 user/src/main/java/com/opsbeach/user/base/BaseDto.java create mode 100644 user/src/main/java/com/opsbeach/user/base/BaseMapper.java create mode 100644 user/src/main/java/com/opsbeach/user/base/BaseModel.java create mode 100644 user/src/main/java/com/opsbeach/user/base/BaseRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/base/BaseRepositoryImpl.java create mode 100644 user/src/main/java/com/opsbeach/user/base/BaseService.java create mode 100644 user/src/main/java/com/opsbeach/user/base/Constants.java create mode 100644 user/src/main/java/com/opsbeach/user/base/hibernate/HibernateConfig.java create mode 100644 user/src/main/java/com/opsbeach/user/base/specification/IdSpecifications.java create mode 100644 user/src/main/java/com/opsbeach/user/controller/AuthenticationController.java create mode 100644 user/src/main/java/com/opsbeach/user/controller/ClientController.java create mode 100644 user/src/main/java/com/opsbeach/user/controller/JwtController.java create mode 100644 user/src/main/java/com/opsbeach/user/controller/SessionController.java create mode 100644 user/src/main/java/com/opsbeach/user/controller/UserController.java create mode 100644 user/src/main/java/com/opsbeach/user/dto/ClientDto.java create mode 100644 user/src/main/java/com/opsbeach/user/dto/UserDetailDto.java create mode 100644 user/src/main/java/com/opsbeach/user/dto/UserDto.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/Client.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/Jwt.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/Permission.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/Role.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/RolePermission.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/Session.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/User.java create mode 100644 user/src/main/java/com/opsbeach/user/entity/UserRole.java create mode 100644 user/src/main/java/com/opsbeach/user/mapper/ClientMapper.java create mode 100644 user/src/main/java/com/opsbeach/user/mapper/JwtMapper.java create mode 100644 user/src/main/java/com/opsbeach/user/mapper/SessionMapper.java create mode 100644 user/src/main/java/com/opsbeach/user/mapper/UserMapper.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/ClientRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/JwtRespository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/PermissionRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/RolePermissionRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/RoleRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/SessionRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/UserRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/repository/UserRoleRepository.java create mode 100644 user/src/main/java/com/opsbeach/user/service/AuthenticationService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/ClientService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/JwtService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/RolePermissionService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/RoleService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/SessionService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/UserRoleService.java create mode 100644 user/src/main/java/com/opsbeach/user/service/UserService.java create mode 100644 user/src/main/java/com/opsbeach/user/utils/Constants.java create mode 100644 user/src/main/resources/application-url.yml create mode 100644 user/src/main/resources/application.yml create mode 100644 user/src/main/resources/db/migration/V1__Init.sql create mode 100644 user/test-user-trigger.yaml delete mode 100755 validate.sh diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index df58dd54aa75a239108fbef8a3163629d0d177af..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeI1-EPw`6vvPE)pQY1@geO7DH6BS7!)QDH>Dq-Nzh4a2SR{K+J-G!noLc)f~HBm zhIilfa$+BO(i#ZOKM4Bk^_4 zQ)xk;xdtnsCwfml?b8N*q`{Q8IbjGG0)~JgU}QruIm9)y+(y~Geoj_VG)!&bDN zD68Zol$?ZaS?CptkgY@KNOuwyWld=a7y{D-cGWfd0wR&?NFT>h-inNpycBw&g`Mq#?Hck z`nh|2&fVjR*0gDtwy6qb8@5SLXR3eX)Hd}&qDLdNQ-o~h0z!Lo zB-`|ARx3;@qgq5O!>PVvre(ZVEdvJO4(hvL5 zxEhVZek`>ub-^lH#ZGDMY2Tc@W*>&C6yk9IqU!61x2 zVWg0RepS*Xb$nj(xM%u*+!}^K6!NTQjZMdL9{ZR-E;!8`EuIg*DVg{GIbBmR1TF@F zMHS|n4cnN%|9>$$W1=(!41qr)AeQQ_dKJ>M9}M}s@LJnJeuvD7{U*vv2@1I!hm}7B fV9) -``` - -Make the necessary changes. If the changes you plan to make are too big, make sure you break it down into smaller tasks. - -### Making the changes - -Follow the recommendations/best-practices noted here when you are making changes. - -#### Code documentation - -Please ensure your code is adequately documented. Some things to consider for documentation: - -* Always include class level java docs. At the top class level, we are looking for information about what functionality is provided by the class, what state is maintained by the class, whether there are concurrency/thread-safety concerns and any exceptional behavior that the class might exhibit. -* Document public methods and their parameters. - -#### Logging - -* Ensure there is adequate logging for positive paths as well as exceptional paths. As a corollary to this, ensure logs are not noisy. -* Do not use System.out.println to log messages. Use the `slf4j` loggers. -* Use logging levels correctly: set level to `debug` for verbose logs useful for only for debugging. -* Do not log stack traces via `printStackTrace` method of the exception. - - -#### Exceptions and Exception-Handling - -* Where possible, throw specific exceptions, preferably checked exceptions, so the callers can easily determine what the erroneous conditions that need to be handled are. -* Avoid catching broad exceptions (i.e., `catch (Exception e)` blocks), except for when this is in the `run()` method of a thread/runnable. - -Current Schemata code does not strictly adhere to this, but we would like to change this over time and adopt best practices around exception handling. - -#### Backward and Forward compatibility changes - -If you are making any changes to state stored, either in Zookeeper or in segments, make sure you consider both backward and forward compatibility issues. - -* For backward compatibility, consider cases where one component is using the new version and another is still on the old version. E.g., when the request format between broker and server is updated, consider resulting behaviors when a new broker is talking to an older server. Will it break? -* For forward compatibility, consider rollback cases. E.g., consider what happens when state persisted by new code is handled by old code. Does the old code skip over new fields? - -#### External libraries - -Be cautious about pulling in external dependencies. You will need to consider multiple things when faced with a need to pull in a new library. - -* What capability is the addition of the library providing you with? Can existing libraries provide this functionality (may be with a little bit of effort)? -* Is the external library maintained by an active community of contributors? -* What are the licensing terms for the library. For more information about handling licenses - -#### Testing your changes - -Automated tests are always recommended for contributions. Make sure you write tests so that: - -1. You verify the correctness of your contribution. This serves as proof to you as well as the reviewers. - -Identify a list of tests for the changes you have made. Depending on the scope of changes, you may need one or more of the following tests: - -* Unit Tests - - Make sure your code has the necessary class or method level unit tests. It is important to write both positive case as well as negative case tests. Document your tests well and add meaningful assertions in the tests; when the assertions fail, ensure that the right messages are logged with information that allows other to debug. - -* Integration Tests - - Add integration tests to cover End-to-End paths without relying on _mocking_ (see note below). You `MUST` add integration tests for REST APIs, and must include tests that cover different error codes; i.e., 200 OK, 4xx or 5xx errors that are explicit contracts of the API. - -#### Testing Guidelines - -* **Mocking** - - Use [Mockito](https://site.mockito.org/) to mock classes to control specific behaviors - e.g., simulate various error conditions. - - **DO NOT** use advanced mock libraries such as [PowerMock](https://github.com/powermock/powermock). They make bytecode level changes to allow tests for static/private members but this typically results in other tools like jacoco to fail. They also promote incorrect implementation choices that make it harder to test additional changes. When faced with a choice to use PowerMock or advanced mocking options, you might either need to refactor the code to work better with mocking or you actually need to write an integration test instead of a unit test. - -* **Validate assumptions in tests** - - Make sure that adequate asserts are added in the tests to verify that the tests are passing for the right reasons. - -* **Write reliable tests** - - Make sure you are writing tests that are reliable. If the tests depend on asynchronous events to be fired, do not add `sleep` to your tests. Where possible, use appropriate mocking or condition based triggers. - - ### Creating a Pull Request (PR) - - * **Run tests** - - Before you create a review request for the changes, make sure you have run the corresponding unit tests for your changes. You can run individual tests via the IDE or via maven command-line. Finally run all tests locally by running `mvn clean install -Pbin-dist`. -* **Push changes and create a PR for review** - - Commit your changes with a meaningful commit message. - -```text -$ git add -$ git commit -m "Meaningful oneliner for the change" -$ git push origin - -After this, create a PullRequest in `github `_. Include the following information in the description: - - * The changes that are included in the PR. - - * Design document, if any. - - * Information on any implementation choices that were made. - - * Evidence of sufficient testing. You ``MUST`` indicate the tests done, either manually or automated. - -Once the PR is created, the code base is compiled and all tests are run via ``travis``. Make sure you followup on any issues flagged by travis and address them. -If you see test failures that are intermittent, ``please`` create an issue to track them. - -Once the ``travis`` run is clear, request reviews from atleast 2 committers on the project and be sure to gently to followup on the issue with the reviewers. -``` - -* Once you receive comments on github on your changes, be sure to respond to them on github and address the concerns. If any discussions happen offline for the changes in question, make sure to capture the outcome of the discussion, so others can follow along as well. - - It is possible that while your change is being reviewed, other changes were made to the master branch. Be sure to pull rebase your change on the new changes thus: - -```text -# commit your changes -$ git add -$ git commit -m "Meaningful message for the udpate" -# pull new changes -$ git checkout master -$ git merge upstream/master -$ git checkout -$ git rebase master - -At this time, if rebase flags any conflicts, resolve the conflicts and follow the instructions provided by the rebase command. - -Run additional tests/validations for the new changes and update the PR by pushing your changes: -``` - -```text -$ git push origin -``` - -* When you have addressed all comments and have an approved PR, one of the committers can merge your PR. -* After your change is merged, check to see if any documentation needs to be updated. If so, create a PR for documentation. - diff --git a/Dockerfile.connect b/Dockerfile.connect new file mode 100644 index 0000000..df3244a --- /dev/null +++ b/Dockerfile.connect @@ -0,0 +1,34 @@ +#stage 1 +FROM maven:3.8.5 as builder + +WORKDIR /app + +ADD pom.xml . +ADD shared-lib/pom.xml ./shared-lib/pom.xml +ADD user/pom.xml ./user/pom.xml +ADD connect/pom.xml ./connect/pom.xml + +RUN mvn -pl shared-lib verify --fail-never +ADD shared-lib ./shared-lib +RUN mvn -pl shared-lib install + +RUN mvn -pl connect verify --fail-never +ADD connect ./connect +RUN mvn -pl shared-lib,connect package + +#stage 2 +FROM ubuntu:23.10 +WORKDIR /connect + +COPY --from=builder /app/connect/target/connect-1.0-SNAPSHOT.jar connect-1.0-SNAPSHOT.jar + +RUN apt-get update && \ + apt-get install -y openjdk-17-jdk && \ + apt-get install -y protobuf-compiler && \ + apt-get clean + +ENV JAVA_HOME /usr/lib/jvm/java-17-openjdk-amd64 +ENV PATH $PATH:$JAVA_HOME/bin + +EXPOSE 7080 +CMD ["java", "-jar", "connect-1.0-SNAPSHOT.jar"] \ No newline at end of file diff --git a/Dockerfile.user b/Dockerfile.user new file mode 100644 index 0000000..c3158f7 --- /dev/null +++ b/Dockerfile.user @@ -0,0 +1,24 @@ +#stage 1 +FROM maven:3.8.5 as builder + +WORKDIR /app +ADD pom.xml . +ADD shared-lib/pom.xml ./shared-lib/pom.xml +ADD user/pom.xml ./user/pom.xml +ADD connect/pom.xml ./connect/pom.xml + +RUN mvn -pl shared-lib verify --fail-never +ADD shared-lib ./shared-lib +RUN mvn -pl shared-lib install + +RUN mvn -pl user verify --fail-never +ADD user ./user +RUN mvn -pl shared-lib,user package + +#stage 2 +FROM openjdk:17 +WORKDIR /user + +COPY --from=builder /app/user/target/user-1.0-SNAPSHOT.jar user-1.0-SNAPSHOT.jar +EXPOSE 7083 +CMD ["java", "-jar", "user-1.0-SNAPSHOT.jar"] \ No newline at end of file diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9..0000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/Makefile b/Makefile deleted file mode 100644 index 8953bff..0000000 --- a/Makefile +++ /dev/null @@ -1,31 +0,0 @@ - -.PHONY:check_java -check_java: -ifeq (, $(shell which java)) - $(error "Schemata depends on Java17. JDK17 not found in $(PATH)") -endif - -.PHONY: check_maven -check_maven: -ifeq (, $(shell which mvn)) - $(error "Schemata uses maven as a build tool. Maven not found in $(PATH)") -endif - -.PHONY: compile -compile: check_java check_maven - mvn clean compile - -.PHONY: test -test: check_java check_maven - mvn clean test - -.PHONY: package -package: check_java check_maven - mvn clean package - -.PHONY: proto-gen -proto-gen: - protoc --proto_path=src/opencontract/v1/org --proto_path=src/main/resources/schema --descriptor_set_out=model.desc --include_imports --include_source_info ./src/main/resources/**/*.proto - -.PHONY: build-all -build-all: proto-gen package \ No newline at end of file diff --git a/README.md b/README.md index 9bd3d5b..46f6a6f 100644 --- a/README.md +++ b/README.md @@ -1,487 +1,90 @@ -Schemata logo +# opsconnect +Repo for OpsBeach -# Schemata +# compaile +maven clean install -Schemata is a schema modeling framework for decentralized domain-driven ownership of data. Schemata combines a set of -standard metadata definitions for each schema & data field and a scoring algorithm to provide a feedback loop on how -efficient the data modeling of your data warehouse is. Schemata support ProtoBuf & Avro formats. +# run +Run as spring boot application -

- Latest Release - License - - - -

+# create secrets for service accounts ------- +https://cloud.google.com/kubernetes-engine/docs/tutorials/authenticating-to-cloud-platform#importing_credentials_as_a_secret +` +kubectl -n schematalabs create secret generic cloud-task-sa-key --from-file=cloud-task-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/cloud_tasks_sa_cred.json +kubectl -n schematalabs create secret generic cluster-sa-key --from-file=cluster-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/cluster_sa_cred.json +kubectl -n schematalabs create secret generic compute-sa-key --from-file=compute-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/compute_sa_cred.json +kubectl -n schematalabs create secret generic bucket-sa-key --from-file=bucket-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/buckets_sa_cred.json +` +# infra setup -Before we jump on let's walk through Garbage-In Garbage-Out (GIGO) problem in the data lake +gcloud config set project prodenv1 +create vpc/subnet +create cluster +install gcloud/gcloud components update +gcloud init +gcloud auth login +gcloud config set project just-site-344717 +gcloud container clusters get-credentials opsconnect --zone us-central1-c --project just-site-344717 -# The Garbage-In Garbage-Out (GIGO) Problem +gcloud set project prodenv1 +gcloud container clusters get-credentials schematalabs --region us-central1 --project prodenv1 +kubectl get nodes -A data Warehouse is a centralized repository that allows you to store all your structured and unstructured data at any scale. -As the definition suggests, Data Warehouse focuses on centralized data storage to break the organization’s data silo. The -central repository removes entry barriers to integrating and analyzing various data sources in an organization. -However, as the data warehouse grows, the complexity of the data management grows. +create artifact registry +create cloud task queue +create sql postgres, create db opsbeach +create neo4j instance -1. The data producer generates data and sends it to the data lake. -2. The consumers down the line have no domain understanding of the producer and struggle to understand the data lake - data. -3. The consumers then connect with the data producer to understand the data. The producer side’s domain - expertise depends on human knowledge that may or may not be available. +gcloud compute firewall-rules create schematalabs-neo4j-https --allow tcp:7473,tcp:7687 --source-ranges 0.0.0.0/0 --target-tags neo4j --network schematalabs-vpc -Data Warehouse becomes a technical debt rather than a strategic advantage as it grows. +gcloud compute images list -# How do Schemata solve the Garbage-In Garbage-Out (GIGO) problem? +gcloud compute instances create schematalabs-neo4j --network-interface=network=schematalabs-vpc,subnet=schematalabs-subnet --scopes https://www.googleapis.com/auth/cloud-platform --image-project launcher-public --tags neo4j --image=neo4j-community-1-4-3-6-apoc -## Schemata enables domain-oriented data ownership -Schemata focuses on treating data as a product. The feature team that works on the product feature has the domain understanding of the data, not the data's consumer. -Schemata enables a feature team to create, attach metadata, catalog the data, and store it for easier consumption. -The data curation and the cataloging of the data at the data creation phase bring more visibility and make it easier for consumption. -The process also eliminates the human knowledge silo and truly democratizes the data. -It helps the data consumers not worry about the data discovery and focuses on producing value from the data. +ssh into vm, sudo vi /etc/neo4j/neo4j.conf, +dbms.default_advertised_address=35.238.222.192 -## Schemata facilitates decentralized data modeling +# Bolt connector +dbms.connector.bolt.enabled=true +#dbms.connector.bolt.tls_level=DISABLED +dbms.connector.bolt.listen_address=0.0.0.0:7687 +dbms.connector.bolt.advertised_address=35.238.222.192:7687 -Traditionally upfront data modeling came with a cost. Often a centralized data architecture/ modeling team coordinates with multiple teams to design an enterprise data modeling. -It is hard for one individual human to hold the entire company's data architecture in their head. -The data modeling tools don't reflect the current state of the data modeling. -Decentralized data modeling is the only scalable approach, and Schemata enables the bottom-up crowdsourcing data modeling approach to democratize data access in an organization. +# HTTP Connector. There can be zero or one HTTP connectors. +dbms.connector.http.enabled=true +dbms.connector.http.listen_address=0.0.0.0:7474 +dbms.connector.http.advertised_address=35.238.222.192:7474 -## Schemata brings DevOps principles to data modeling +# HTTPS Connector. There can be zero or one HTTPS connectors. +dbms.connector.https.enabled=false +#dbms.connector.https.listen_address=0.0.0.0:7473 +#dbms.connector.https.advertised_address=35.238.222.192:7473 +setup ssl policy for https -The decentralized data modeling principle brings a unique collaborative approach to managing the data asset's lifecycle. -It brings all the proven devops principles like ownership, accountability, collaboration, automation, continuous improvement, and customer-centric action to the data management. +neo4j://35.238.222.192:7687 -## Schemata ensures the connectivity & integrity of the data model -Data is inherently social in nature. The significant challenge of decentralized data management is that the lack of connectivity among the data will degrade its usability of the data. -Schemata is an opinionated data modeling framework that programmatically measures the connectivity of the data model and assigns a score to it. We call this Schemata Score. -Observability metrics like SLO & Apdex Score inspired the formation of Schemata Score. -A lower the Schemata Score means lesser the data connectivity of a data model. -It allows the teams collaboratively fix the data model and bring uniformity to the data. +--- -## WIP: Schemata Ruby on Rails Experience for Data Engineering +change cloudbuild, manifest files to point to correct Artifact repository,cluster(name, zone, region) -🚧 This is still under development. Watch this space for more details soon. +create bucket, compute, cluster, cloud tasks sa +create actual bukcts to store bootstrap data +update path in config files -# Design +create secrets... +kubectl -n schematalabs create secret generic cloud-task-sa-key --from-file=cloud-task-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/cloud_tasks_sa_cred.json +kubectl -n schematalabs create secret generic cluster-sa-key --from-file=cluster-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/cluster_sa_cred.json +kubectl -n schematalabs create secret generic compute-sa-key --from-file=compute-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/compute_sa_cred.json +kubectl -n schematalabs create secret generic bucket-sa-key --from-file=bucket-sa-key.json=/Users/sathish/workspace/opsconnect/aldefi-sa/buckets_sa_cred.json -Schemata frameworks contain two parts. +kubectl apply frontendconfig, ingress -📘 **Schema metadata annotations:** -The metadata annotations enrich the context of the schema definitions. It enforces a few mandatory metadata fields such -as the owner of the schema, the domain it represents, and further classification of the Schema into Entity stream & -event stream. -🎼 **Schemata Score:** +docker run -p7474:7474 -p7687:7687 -v $HOME/neo4j/data:/data -v $HOME/neo4j/logs:/logs -v $HOME/neo4j/import:/var/lib/neo4j/import -v $HOME/neo4j/plugins:/plugins --env NEO4J_AUTH=neo4j/password --env NEO4J_PLUGINS='["apoc"]' --env NEO4J_apoc_export_file_enabled=true --env NEO4J_apoc_import_file_enabled=true --env NEO4J_dbms_security_procedures_unrestricted='*' us-docker.pkg.dev/prodenv1/dozer-nio4j/graphstack/dozerdb -A ranking function parses all the metadata and assigns a score for each Schema definition to define how integrated the -Schema design is and validate if all the Schema definition adheres to the Schemata metadata annotations. - -## Schema Metadata - -### Core Metadata (shared across Schema and Fields) - -```protobuf -// CoreMetadata is the set of attribute apply to both the Message & Field -message CoreMetadata { - // Mandatory Metadata: description of the entity - optional string description = 50001; - // Optional Metadata: additional comments about the entity - optional string comment = 50002; - // Optional Metadata: Any related entity that has "hierarchy" or "has a" relationships. - optional string see_also = 50003; - // Optional Metadata: Additional link reference for further reading. - // It could be a confluent page, An ADR or RFC or a Slack message link. - optional string reference = 50004; -} -``` - -### Schema Metadata - -```protobuf -extend google.protobuf.MessageOptions { - - // message.description is a Mandatory Metadata - CoreMetadata message_core = 60001; - // Mandatory Metadata: owner of the entity. Usually it is the team name. - string owner = 60002; - // Mandatory Metadata: domain = 'core' indicates the entity is common across all the domains. - // Other possible domains are `sales`, `marketing`, `product` etc - string domain = 60003; - // Mandatory Metadata: define the type of the message. - Type type = 60004; - // Status of the entity. You can have `testing`, `production` or `staging` depends on the lifecycle of schema definition. - string status = 60005; - // Slack or Teams channel name to communicate with the team which owns ths entity - string team_channel = 60006; - // Slack or Teams channel name to alert for any validation errors. - string alert_channel = 60007; - // Type of the event. Set if the Type = 'EVENT' - EventType event_type = 60008; -} -``` - -### Field Metadata - -```protobuf -extend google.protobuf.FieldOptions { - // message.description is a Mandatory Metadata - CoreMetadata field_core = 70001; - // Set true if the field contains classified data (Optional). - bool is_classified = 70002; - // Set the classification level if is_classified is true (This is Mandatory if is_classified set to true) - string classification_level = 7003; - // Specify the product type. product_type is an useful annotation to represent a field in a business perspective. - // (e.g) user_id can be an INT field, but in the system design it could represent External Users rather than internal users. - string product_type = 70004; - // Set true if the field is a primary key. - bool is_primary_key = 70005; -} -``` - -## Schema Classification - -Schema Classification - -At any point in time, the data producer should provide two types of data products. - -### Entity - -Entity streams represent the current state of the Entity. In the classical Data Warehouse concepts, Entities typically -represent the dimensions. - -**Sample Entity Definition** - -```protobuf -message User { - - option(message_core).description = "This is the description of the users table"; - option(message_core).comment = "The comment added after thought"; - option(message_core).see_also = "db.user MySQL table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(type) = ENTITY; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - int32 id = 1 - [(field_core).description = "Unique identifier for User", (is_primary_key) = true]; - - string name = 2 - [(field_core).description = "Name of the user"] ; - - string email = 3 - [(field_core).description = "email id for the user", (product_type) = "username", (is_classified) = true, (classification_level) = "LEVEL1"] ; - - bool is_active = 4 - [(field_core).description = "define the active status of the user. `true` == active; `false` = inactive`", (field_core).comment = "should refactor to non-binary status"]; - - string timezone = 5 - [(field_core).description = "preferred time zone for the user"] ; -} -``` - -### Event - -Event streams are typically immutable. Event streams represent the state change of an Entity. In the classical data -warehouse concepts, Event streams represent the facts. Event streams will not have a primary key field. - -Events classified further into three types. - -#### Type 1: Lifecycle - -Lifecycle event captures the state changes of an Entity. (e.g) User created, User deleted et al. - -**Sample Lifecycle Event** - -```protobuf - -enum ActivityType { - CREATED = 0; - DELETED = 1; - UPDATED = 2; -} -message UserEvent { - option(message_core).description = "This is the description of the users table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(type) = EVENT; - option(event_type) = LIFECYCLE; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - User previous_user_state = 1 - [(field_core).description = "Previous version of the user entity before the mutation"]; - - User current_user_state = 2 - [(field_core).description = "Current version of the user entity before the mutation"]; - - ActivityType activity_type = 3 - [(field_core).description = "Lifecycle event type for the Users table"]; - - google.protobuf.Timestamp timestamp = 4 [(field_core).description = "Timestamp of the activity"]; -} -``` - -#### Type 2: Activity - -ACTIVITY event captures the events that resulted from one Entity changing the state of another Entity. -(e.g.) User A purchases Product B. The ACTIVITY event is often the result of a business transaction. - -**Sample ACTIVITY Event** - -```protobuf -enum UserActivityType { - VIEW = 0; - READ_REVIEW = 1; - VIEW_DESCRIPTION = 2; -} -message UserActivityEvent { - option(message_core).description = "This is the description of the users table"; - option(owner) = "Product"; - option(domain) = "Growth"; - option(type) = EVENT; - option(event_type) = ACTIVITY; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - User user = 1 [(field_core).description = "User entity reference"]; - Product product = 2 [(field_core).description = "Product entity reference"]; - UserActivityType activity_type = 3 [(field_core).description = "Type of the user activity"]; - google.protobuf.Timestamp timestamp = 4 [(field_core).description = "Timestamp of the activity"]; -} -``` - -#### Type 3: Aggregated - -Aggregated event captures the computed metrics over a specified window of time. (e.g) Number of views by a User for a -Product. - -**Sample Aggregated Event** - -```protobuf -enum TimeUnit { - SECONDS = 0; - MINUTES = 1; - HOURS = 2; -} -message UserActivityAggregate { - - option(message_core).description = "This is the aggregated user activity view count. The event aggregated by user & product"; - option(owner) = "Product"; - option(domain) = "Growth"; - option(type) = EVENT; - option(event_type) = AGGREGATED; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - - User user = 1[(field_core).description = "User entity reference"]; - Product product = 2 [(field_core).description = "Product entity reference"]; - int64 count = 3 [(field_core).description = "Aggregated count of the user activity per product", (product_type) = "activity_count"]; - int32 windowTime = 4 [(field_core).description = "Max window time for the aggregation"]; - TimeUnit window_time_unit = 5 [(field_core).description = "TimeUnit of window for the aggregation"]; - google.protobuf.Timestamp timestamp = 6 [(field_core).description = "Timestamp of the activity"]; - -} -``` - -# Schema Score: - -## The goal of Schemata Score: - -Schemata Score is the core part of establishing a feedback loop to maintain the integrity of the decentralized domain -ownership to build data products. In a decentralized data management world, The feature teams (domain owners) define the -Schema to track the Events & Entities. Often it goes to a central schema group to validate the Schema since the feature -team visibility is limited to its domain. It brings the human into the loop and kills the purpose of distributed data -ownership. The workflow is also harder for a centralized team since it is hard for one human to keep the entire -organization's Schema in their head. - -The intuition behind the Score is to see if we can programmatically find out which event or entity is less connected in -the Schema to improve the connectivity of Schema. Schemata Score provides schema abstraction and the feedback loop to -help model the Schema with less communication overhead. - -Schemata construct a Directed Weighted MultiGraph to represent the Schema definitions (Entity & Events). The Graph walk -algorithm derives the Schemata Score indicating how connected each entity is. - -## How it works? - -Let's take a sample schema and walk through how Schemata score is computed. - -Schema Modeling - -## How the Schemata score computed? - -Every type of schema have its own unique properties. So we can't apply the same scoring technique to each type if -schema. - -### Entity Score: - -```math -Score = 1 - ((Total Incoming Edges + Total Outgoing Edges)) / Total Edges in the Graph -``` - -If you run the Schemata Score for User you'll get 0.222 - -```shell -./score.sh org.schemata.schema.User -Schemata score for org.schemata.schema.User : 0.222 -``` - -If you run the Schemata Score for Product you'll get 0.389 - -```shell -./score.sh org.schemata.schema.Product -Schemata score for org.schemata.schema.Product : 0.389 -``` - -The Schemata score indicates that Product Entity much more connected than User Entity - -#### Lifecycle Events Score - -```math -Score = Total Outgoing Edges > 1 ? 1 : 0 -``` - -User event captures the lifecycle of User Entity. So if you run Schemata Score for UserEvent, it will give you 1.0 - -```shell -./score.sh org.schemata.schema.UserEvent -Schemata score for org.schemata.schema.UserEvent : 1.0 -``` - -### Activity & Aggregated Events Score - -```math -Score = 1 - ((Total Outgoing Entity Vertex + Total Outgoing Vertex of all Entity Vertex Connected by ACTIVITY or Aggregated Event) / Total Entity Vertex in the Graph) -``` - -If you run schemata score for CampaignCategoryTrackerEvent you will get **0.4** - -```shell -./score.sh org.schemata.schema.CampaignCategoryTrackerEvent -Schemata score for org.schemata.schema.CampaignCategoryTrackerEvent : 0.4 -``` - -if you run schema score for CampaignProductTrackerEvent you'll get **0.8** - -```shell -./score.sh org.schemata.schema.CampaignProductTrackerEvent -Schemata score for org.schemata.schema.CampaignProductTrackerEvent : 0.8 -``` - -The CampaignProductTrackerEvent connect to Product, which has high connectivity with other dimensions such as Brand & -Category where CampaignCategoryTrackerEvent is the leaf dimension. The score indicates a clear schema modeling issue. - -## Schema Score Classification - -***Excellent:*** 0.75 to 1.00 is excellent. - -***Good:*** 0.50 to 0.75 is good. - -***Requires Attention:*** 0.25 to 0.50 require attention - -***Blocker:*** less than 0.25 is a code blocker - -# Curious to Try? - -## Download and install Protobuf Open Contract definitions - -Go to the home page of your project and run the following command - -```shell -/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/ananthdurai/schemata/main/install.sh v1)" -``` -It will add the opencontract/v1/org/schemata/protobuf/schemata.proto file in your project home. -Make sure you include the schemata.proto path while compiling the proto files. -(e.g) -```shell -protoc --proto_path=opencontract/v1/org --proto_path= --include_imports -``` - -The code ships with an example ProtoBuf schema definition for easier understanding. - -## Prerequisites - -The project requires the following dependencies - -1. JDK 17 -2. ProtoBuf -3. Makefile -4. Maven - -## How to execute - -🏃 compile the project - -```shell -make build-all -``` - -### Directly via the packaged jar - -``` -alias schemata="java -jar target/schemata-1.0.jar" -schemata --help -schemata score --source=src/test/resources/descriptors/entities.desc --provider=PROTOBUF org.schemata.schema.CampaignCategoryTrackerEvent -schemata validate --source=src/test/resources/descriptors/entities.desc --provider=PROTOBUF # this has some validation errors you can inspect -schemata document --source=src/test/resources/descriptors/entities.desc --provider=PROTOBUF # see JSON representation of schema -``` - -### Via convenience scripts - -🏃 To validate the schema definition - -```shell -./validate.sh -``` - -🏃 To see of Schemata Score - -```shell -./score.sh org.schemata.schema.CampaignProductTrackerEvent -``` - -🏃 To see the JSON documentation - -```shell -./document.sh -``` - -## Using protobuf descriptors for your own data model - -Compile the protobuf descriptors using `protoc` to output -binary [google.protobuf.FileDescriptorSet](https://github.com/protocolbuffers/protobuf/blob/b48ba578dd01adfebeb4fac0887db1eeb163e00f/src/google/protobuf/descriptor.proto#L57-L59) -files. - -```shell -protoc --include-imports --descriptor_set_out=mymodel.desc -I path/to/schema -I path/to/protocol/schemas path/to/schema/**/*.proto - -./score.sh validate -s=mymodel.desc -p=PROTOBUF -``` - -## Supported Providers: - -✅ Avro. - -✅ ProtoBuf. - -✅ dbt - -## InProgress: - -🚧 Support for JSON schema. - -## TODO: - -🚧 Support for Thrift. - -🚧 Add visualization layer to show the graph representation of the Schema. (Looking for contributors) - -# Contributing - -See [CONTRIBUTING.md](CONTRIBUTING.md) +gcloud beta container --project "prodenv1" clusters create-auto "autopilot-cluster-1" --region "us-central1" --release-channel "regular" --network "projects/prodenv1/global/networks/schematalabs-vpc" --subnetwork "projects/prodenv1/regions/us-central1/subnetworks/schematalabs-subnet" --cluster-ipv4-cidr "/17" --binauthz-evaluation-mode=DISABLED diff --git a/SECURITY.md b/SECURITY.md deleted file mode 100644 index 034e848..0000000 --- a/SECURITY.md +++ /dev/null @@ -1,21 +0,0 @@ -# Security Policy - -## Supported Versions - -Use this section to tell people about which versions of your project are -currently being supported with security updates. - -| Version | Supported | -| ------- | ------------------ | -| 5.1.x | :white_check_mark: | -| 5.0.x | :x: | -| 4.0.x | :white_check_mark: | -| < 4.0 | :x: | - -## Reporting a Vulnerability - -Use this section to tell people how to report a vulnerability. - -Tell them where to go, how often they can expect to get an update on a -reported vulnerability, what to expect if the vulnerability is accepted or -declined, etc. diff --git a/_config.yml b/_config.yml deleted file mode 100644 index c419263..0000000 --- a/_config.yml +++ /dev/null @@ -1 +0,0 @@ -theme: jekyll-theme-cayman \ No newline at end of file diff --git a/asset/ds_classification.png b/asset/ds_classification.png deleted file mode 100644 index c085004fdc5bac9a16def8d3d3c4871d661a7b0e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 19516 zcmeIac{r5s`!{}%C8S6y%GOE?Cdrbu4M})MDC?V&bsEY#22+Vj@-9X8Ld00gzRZj% zNfJYhiD70WhGA@Dm@zZvck6R}zrW}C%7kUy3Xr$ zooA2!va{N{dCz750Jd6RIPU-e5_kX*bCwhZYwlQI<$(_|zcaRH0N_KaG~ZhsEbqO3 z!NC>)5b6McN&tX0unIK=0O7|0fbll~7`*}jg^--)%O(H-$^Pro6$_zI$YFDs6U^T~ zf3p|alwN9gLr=}KI)OldFZwh^o#w6anzLK_@dI;na|i_D-Me>fZEXz=4P-KTVq!v9 zS9fJ)B`Yh-($bR0<0U622LuEV2m}`wm&KXIj>@kd9v&PH=hv@aU%q^K`0yd0&xgTa z>FMbV2BW#TxvHvaadB~IXsEETu(!8Y$ma@InZHK*=T~?oWfj6TmTtusVPA z3fB2^96Du@&^B3&9=TsQ6#0e}A!IKLDZ|1Jys-R@Fcrml61T`*B`_u`M*0)KN2(VHj%-YsMViF>{O;2`f?K^z{uzqBH{>+uAVdm(gc-C;Nn*Sk6kX6xP zhl3X1u;iH47$zcEBVzU-TeZX5&*(a7d@=n?sQT0E)S>`PZNRW0Io|`a_cSUA;q};v z21%fEk=t7fPD`#PO-L&%E9c}wz?c3v_}6RSx-v7rr`AhYHxL8>bJL^q(eu10Y7CV% zHxMKO$gHfd7TNX8MG<(8Ra*hW?=9y480oeA7S;X)nG(;S8dK5`DFD7kkUbDRWGYl= z8$nQhTw2n%pKEw{(VRryApA0K}oHdNIz(Sn}Zu`pArAPVFaOx3X~ z2a@{^%^zJ`e(Sjfh1dHU5SI6TbEy=NtMTuq|3BI|@`-^wxhKU@m}LF{ka*47pGf4kAG{O2UM^Kq zGZg${&76Ny4}yyLycvz=mRBo!nTESUWB}P68s+JAoSu|?{W?GbCGl9^nbiJr)!(d1 zb*EGu784Wrr_-Tw^=mh(2HR!3*tRR<{JBnXL%`&(7w^BZVq!Y6-=SaUakAQY2<@&< z0x|j@(vTkKchB^nK2P4e`YF&-?A6gTof9h^)4vfX^Dm**k^!$utHL3FWM89O94_z4 zE_5FfD+oK0d82~mqChI_^j%yeMupwsX)vhnmGq%mOR{-dtvb+Krsy3Ws`S+$Qiwp* zOlRSSZy@Ao^yH;+W_wJ90j~g&gL&1Q)7j3{82>w~X0c#xa_Ws7DB2VTBz)OEx%Sjc z+p4al>+z!{z?`Z+S8!>8Mbv!ND7i3EpucFr>7KkEOi;d9EXV>*qYDz8BEpe(tV-%M zT@^?QGc&Nmeh^%9rq-lFe{yt12jbZeQd;K?_xM*P+1#lJ27BVhv(<=JLF?iK!acia zYLhuG$Mb2V(ZC_J6i|EQ;?;@LrpCq@7Ym!!+wPvq_I`CQu;0}Wub&!9jE>X7aT^C+ zaZSO{;!f?)3hiD^LlMbc8bd9IODg_Ft(9CnpUs_^dpqyMv+CnmVgLRf_7Qe zjQI^Lkp<)rn0LoFzO&Eni0;#^G}HXZ{>rI z`=&J5VCic@qU(&po>z$=Phk~PPv}Bq@Z$>Re8hP9*R-geUx0wkj(H+w1jGK_PU~rfVblKSG12aapQ+NKZ{funZXkRe(E;w)#xlfGSn9X0bJ%aNf=sfW*+hof* zV-bF_YMWjQwzp$KcH%oJrQ?S>IQkCg*{i+lk0BgpTx_lF5U&g&Mf6`5R z7mImyiv8$5&^n@$2eYpcJOZ~4=3_bMX!q3mDtRpm>_m?+cWVePdj2YVc)?i2q*cu8 zWz97wc~f0HAxN|+6<))CTFgls6a!p$9w@JL&X((~F&?33w7)u<9p`5uyT~S1NA2ST z3QZUG<5>w_cGB_EaCPzc$A0E| zx#p7n{N{z*Ch6^y$Fh?d-~{;U2yFY(y-pSoo~DLCWKPp95V=!Dr*;e%3?36;5X7n3 zn>_t$EX32ZOM!J*o-+jT+#=8?KM~fS%H0M|nor-SKvQU2qz;^N*VC?&wMFeEAuTu6 zvM`iAGh4!J3|1id#vqBZS~2H3hj3Mu@c5sgC&%{TX{3)6XK#k(;XtjXzw_3^)|ne* z^v5hqo3|1^zeGMF}NNUIZPC_N~-atOombSVs({#0>yOUEJZg`O(sh!ebPVfpO8&4A6rK zl)pO1-9=%WX`BR>KNWX!VudSh_u+c$v?L*uH{t-iR@2X|U-HYR8vCo2ZGPjZ28^!G zt$sKsY6O@rjLDjsEUBi+Mkj1Rc}5B0MLplOxm%fZVEIAv!t*XTu=ZP^7DtMaQ2V8Z zm~SzgT~b0(xU?2W#GLaFaYg?1Fd!sB6YmHW1w!malmWyAl;~+$aKW0|2kyY82l0^C zNn(b;@;~4%t-75C^q%`K(J4NUAj0Pk*g$}hF6nEi)_)L308~u0j`V_pBUBs+fkJ&D z>kf>!pc{Jteuk7N17MeaPwdBg#XUz@1E_PR(GyK6E(gJZeMT2*ws^&8@q~pgj?k^Z zLa@DOtZD0ts%i9dI_FMRE90m#O%5^MP4XODRD=!pc*TuJjNY0{FgLh}y>fqHFxl=U zDlPg=v&*;fC1=$`_7u=(J*SKCNEFqeEv#R~x|^Dbb$CA&HEVXxusSEetNa~=tE7lcIN{|y?L{T$32|3d915u zHh}tA@W*DU+@M^*?pToYwfDn4N9;pc9Sb@We~f(=e|_0nCrqB*{xF|va5sZ8Og<-nzT?wrtzmI4$z$@o8WU>7YrPsyA}*F z_4AVBqeB6thdjfm zs(O3cV=pZjnzU-*Qy$5juP)i@A#LHPNp zu}QN9Hs#uTH|n0BlWFnh53@40e?Jl5Tade`x^-EyZ&t=R_=V4Z6cJ4G4XV08^- zl|=^!0C^Pzl;U~;1#3`EGuviqh6sf*7*s?4Mi|#tyemcf- z6CxoU6Y#?4wQ1GvDLL1r&l|Or-VK&E8`3@773SZW4T`*iDHd08T{{0qWb9F?;ibo` zdM@(U-x88 z%|4;>MhA|Gs%EDOc>|?GhlCUhL!DUd@W6;~bcZULD0j1GMkiV(@cz*XxnItzdCS`?b92d&-!`ib<}YRs%+ccjRS5d((bDG zfLMVX=sxU|*>D>kT7nKmg5icEv3Ji##jkB(+g<;SiihVmoQ;2X{%@;|;ui04z06C4 z*6X(;gU+@}f`G~ClcQVF)4}>mhqzs%6;guz8-Ph)?H=bEtPH;zD48GLg6^Z_24$_8 zp9--3`|svmw65vpGLDNDzN)x?;(Do+=np!OZ%2r6kym_GDb9ff+uxJzD8W{5qkZn9 z18}s!26gUag#-s#JyYtHIV-*J3@Aw*mRY#`{*Fg`mf#BN9KBAJwt9&&6AOv^6%!+p zV+gi(w)x%3kcP&r?6shi z4Xs~SJ8O{h7}GRd^yJp6N}|{%bthEn5+!)NdcM0c3wOK>@y0$#CG3{$(VFDpOOs6Z z1j&$J7vyMN?Mx^`2)cQeon0gC;$;HOZ{yGN_Rvh#6H?V{&5I zUV0&!ENm$2K4{E1KT~(7AnW*E>#SZnQ7Tls1yc9*dbGoQWgIr(+c!}VyWkh+Gyq9G zHoIvJNhw5RtWGCky=JerYnOrTR5sB1&5iX~eR-n%+0P8Et?d2yM+dA?Ug21RarUT`_??HGDJPMx#SSdH=;d zw24J}fsw1p0elHNIeGSB?UaYD;y))LW|jGqc^NI=i@wd*tUW!MNi0|kB>RKY#&eZj zAifK5=O2|_sJ7x(8{GnF(Or+eH%C`hY3O;M24_7xHy8cWt7oR9X}nvtuD=21Id>r% z9GO~G*@hXd<{}RhyDxiRsmsccpIoia-$_%btMZs%u4h_-FHe(w>&g#wBJ1ALZ*eYM zENdLZ^{ndzAkE6M2E`4-yuJZcGdX9EtLDe>H9_q5m3(wtdZ7*Y=I!E~_Ig;^aI*Hd z&y>UF`6tb&X0MKbB0K26fx}Z%WwtK3cGQL2c>EMI{7@8dmH@3Xa3K=c9b}$eIXWKq zqw%Gg9If}j5#tXk^0a=&y|HE_rp^mw?6CE1Gq$Dn@app!<4Tga;j+u{v!4uem0dKV zDb{yf9T6Y+301Utp)8*6TbJ=9Z#&INO&=nI{m}xn{v0uBeO0%TC=anyE)_AExt-VW z#wE9VwZ73W-+mA88IWW)?SaaOB{`?q)d|4w@cAt42Fq4ytzg(@F3TNtE*_j{-|-Sr zQRJQA<}5vG(2s1;<~l|A)jv(O8%F!+kt4T%B*+Mexpu)4rV=Rsw@<`vd8_8cnNc>Af@&y%M@rjqBxqSjMIMNEwKM{j6Nk^HpeXrG^H<2$0mp!Met z>$l13g;_FXml+bG)2+ zLyjW+fSUQ6;EDvA75Csj0>)E-;1zTt%dUhC5K{HirfZccO^Rs+FAHsW|1z%2jdxyF z{8nfWn8R=%3>_ptD&{JEO!gYxNgIk{B$HKi)|?uFB!4@zU2E!P^O6hv#>KYrSIjwvk&QlN{yT(C1>IBVOn@n@^L(? zr^d!m&bGAu8ot#8#Cn6 zdfpF&aguN7rfq@;SP|udX`idP^Tz&{f@~)ZQtV*&-$}&|w7)kp=i^qKl&Fn;$HXYHzd^MyZN!L;{PEti@l-$%?TZ`-MZe|(AJByYW24bpDVptBkq==$lC zSZOpGvkJ>rpb5}YDu@1T7R$%VzZ72Xy5D$X%}Ku5=3m@guEO$SVFs-+&i_Nz1Oykz z)KXUxlu3>LCv8uOLYgx?p8o$5|J$4Sx4ZT~Ij^`^Nkl8qE6zY3gmx-`4z*yl&ZHcd zgU~UW=!&+*r6Y7`Ivj^QXD?e2wOJx!4tjy`%`&xP>HQiK#CBc~=z83%jCNE9O+MN+ zc0cHQKYG5+M%6jr7P}+2;NUh}&hRVcKHb1W2lwj(1u-wvy&vJ{ZA9c*eNu_v8F zA(d``+zta|>r&}JSKsdZwa2$0+wA+2nGI%Hh*QnK3R~aGDt*|2{b|O_{L_J3ZWg?a z;9_u>%_~(XQi_YebGA@CSJAB1dcpi6c5m*2RW=_y4?aUyUnr-p{jd@qXBQN$B&i90 z6f5Rm!X7q}X>~g|=-0YbN43Qs?i9?CaMh)WuBQ*iH#SXvU#pH?k{4a+q?T`9Z2y?w zwXKtG=EnUFkH6Iz$I~UBRcse8;4s z-`ft%U*!YwZh#F*fIOP%m3yCrxYzUczih}OrARTxuh9+xMpHAzCJpUP@+ zzAD6Z_;TQ}(~&sZQZ~nIrOBLw9T>_SnHPMdZ@DW|!R1&PtS+nF66Kp0c}fDbxc`DE)PF@}?Egramg6a#IKr2ReE^EbO>Kaz zt2si&KorkDugk<5s|es@F_{t^@VgTQ8k01jbjW(O4G2*_Ysfo>d5Jg!pzdQ=+^6dX zl0^XJ$&`#dGGaPqAXyYJ=~sf|R^0o8;NL<}c}=FTT0gyhQ7X7SxK+@d(FML_mw>fK zw&U;3RLH;n&NF&_c14(PD*SfN@AGTytF^tG+6EfszP-lqSD2eZc&!aWAu~hGNH_74 z!sZM3iPh6s{Pqwj;hC zy<5`#{5O+->*PP|cGxR917x25Jvzk>dv&=xImV!f?g?V| zA8Db8SgOn_g7NlOQ64qsXRI09r_X?AaIiz(zf+nQ(Wn4o%q8p}c`3BF1JV{k%yQ1S zR8B8WMVoyh)Q6X4=5-1C*D`^Utuy!aj2mThWMhK^R9<7{OI};$(Ml-NQV@#%mFh#@c3Kmn{KQ0+lDUq}JY|On4vNrh* zOql+kb$4!RO^Mc$6X&WQ6=Ei?Y_mIT=r?%J>@)~LYiAo?3^Ao$**08zr>9&z_F)9M z*F!(_g+|MDV}!EYa4l=LSni}mY4iwtO518!h1`dV>n)};LuQpDO`E%(MSO`k`C6@Z z=8&;+_ju(;=qcwKEvx&gwKFhd<&^QtU13iLgS+}(OE3`>8LRO|w0LeI*3~=*^BfUC z>qFOpVycxPHc;)SE^3bW*V}vRUBS}w$KgLCfH1CaR=grI>>MNclht~^PDK{~7hrOl z8RBDeZ^bFaGtqWWr9oZH_V%yoVj5TU5WCOZY0UFqJ@{Aw9ivl!(4VU0+FJ@D@i9w# ziPyFAZlW9&1J2NGnR(|gn+^U7*kt&CRQs9*qRy8wOtAFEp!Ri5z&X;r9#PXOaTU91 zU%&4d*kPa`%39her+233RW#-A=JSiis(K-3p@!GhTSHE~(w1F0pSs8JZART|R{gH9 zf+!oX^o9hMFdHSu4N7U(-cvCk51kEUzGS(2R~U$^+wG_d`gT_y(Zy=TjD0zD8T0x` zk~eqF;Q5-p@CcJCL@?GO+YI{G;^l>cfr)q%+{>TBj1@wOw)wdw<~(vWhAJGdU1!DH zVF0R-64}aP4g~FgYtH?czrH}&&J#L9@v_2YiTLLTi{;UL;S@BEpK)$=yp_|JU*ajZ zr7hASDbf!q1W|B{6Xx6i#_a5q{PlzEZ&Njq>qo;QBEEQG(-6K3oLF9!W5>-5KKnHE zldy|g^t4-sn=O2f2wx_}3&;Z>hW5qtyd0}!RE;yJw)ZiTkz8z>1t-)!ZazfL4vl+e zbi9SGR7&cc7HEe59nnK@sCpi^7R|uV=74e>GPy}zw|MCWj^H&+X@8tfqw}&_@co%2 zbERrm4V+q$=aJauLRg4^LqF#{^Le#!;MfDJ*^KPoH8^!Q|N0d#VNX1H$9P7m!o=tG zm}nty-o%pK-AW$#1z(S0SWM>&FH?l;F{zhhXXXv+IAq~7#O0{kZ=06q|3K<<$Cdix z>zg<${4108aqr{=7qT}ktHs0}Wzo$CAKJCV>3qdElP41<6-qP4i`Glk)D`Nf`3aa?KJFSglko8zA$ zPDK^rbbIaGBFSM4rGdg5<^usiR(+-ONuV1Kk1OvV1i^P|DtJ~%9f%wSsCxM4JGOo) z%_wku@mta2ekkMZ)Q&@MET~G9iOHHBDch+wSVfIx+l=hH1((m}^JBQ_UEQbq4!^NC ztKh6J+~Szo^l8yRM}9mZj+O|g*^fFkwyHIaDRHDDclz7+T|dTl2-L{5aXN&K*wC>T-3>V0pNZ&%Vq>j#18V+UfYU-=HN z$@+U!MEJYY8;-?hA>Q0c8%|8GSI-$WDDN?@*=>rSijYP!;uC-p3)!t`0U54^FR94{ z<{Np5#MR9-mV@7Sr37Uac=yu>yKDp)ks5k zd&$u}Zdbi`rE9E!*R!Xtmh)85VunZRb6?I==7F8=H;u)*}r5`lbKb5gOM204T8%i{fW3 z+et&r0(I~uBACKCIp;2u7JBZ?7kv;7zhDfB1Ht^_XurbXc*W86TB$`e`vw@~)`|41 z!ZyDGV^tQT*-XnRuU$1=wi*rco?cf{EA^^>Gki>3zwerOvYl8Isk)6eG4Jp*NZfjztl5!Wu<|8x$^r7^WE$oGxwhW- z2Dy_R7}2uUp!+lUVHca3qfp2-)t83ECB!5+4L*S7!L3_Hf8KIg4aD^ft~AFfzw&ga zx>r^8JboD$UeC3vf7+1UOGL~=X#8J@I@TzXv3Qtzluo|ibr4RZu8#_I-95j%3?;dU z8G$SMJmsz=Btk$mifI)0n2# z8&+K6(A*a1_A_xUHN?oTlTx1^1^qCV-Len>Go`*z1Fx9NKKN8-kl~&%3GOw*9(kwu zpvafh-86-i-N8NcOJ*-z>XaM@76GZWGBav40LEQCWwr}|N&cXEw z+d3(&DnDbTuDM7BIvI?WWH{10az}yI>xmg`rZpVT#TlATGuZl z#D`O?p2+va8_hWF)uP~e&tQ7`M0DKr3+8C@O!9PlV9PnZn%@y-Y2Zal)$zk;d(C?yX(xXJ?_Mv>e+)~tq}O3ZP|-!41RT%?W9lSOK1uA5Z6LS zgr<*Y`8H>|YnLc+6OUJ!c~sm?<|2$#Tr?D60|_*)7WHO+HArgy4kc=Ee**3f@bi1bZe)X@=Y&-@Su5yTPW+_X1QT-1TzBNj zwubmbzwF)Gtd!gP@Ye5`cijVXe?B%XZJfh>T{qP>!! z(>r*4o>TgzhBvsJkr|(35ujb&g?Q>^xVu<2t@Yn$@6^q6BHQ39yN*r#IvaDx(txU} z32o@7qxl8=$(z~aO`LZTDKEPJ{!qRiG4wNyFxIOLqSulVNlq(!&&YiurG%fL=KZW{ zaQVayq$r>2Q7~2`SY~%mJ{3JpInmFVt988>&CJez@blI%dv_l3W+O%W`{YqiRN6Nw zQB1wUt`8o{*K1T}n^&;Ge(3mgwa+Y5m>!w(xm;$6DC1&&kKVy3j5#B{;P+0PgNac7 z4lSwobMzG`9lJ>mI+D5`s4_a6Yr7sAy`_Us~J!o_#!=@v`LV-s3OM0QR z+;ghzZuKr&w|}?GrMMAgV(M$Z{$$sA+T~dNv7=N48mR`+(cZNb)4#^G(AC^eN~noR zAa+OW)DF!4*c>+#bd7Bn6uhZjC6n)!hCt+zQg?S3epwlJVvnM`hW~D@+j_kvz#NuH zEV&MD3nkJElb)~Uj}W^Fj5f6EvB2nbH<^{*+2IIWzqFtrx?AuMogmK9ofYHMH++0& zattlbDay=Iw=HyGu?gHi`Zq-hrkXlgos|2G zux#m>H92$SmUp-`(stfH*c7-MMu;GlJ`^JQ*~R?v$-wM%FU9UTB zz+SY$Z+rh2r-8XsH@;_uL_}RBf{83a-V}Fg25*%a0N%LeBI!U#9oK_3J~7)f?zD}T zHo2*J#JElOU)l(6YuydgdTi<&k0RyCVk;Y(664E8#wsIkf*$ZH9-ylBtcXX&X@s_T zI#0mM(CiL7HP7?qhy6a60uD=FX`U>AO z!z;tVN&5E#S^f#EJW>2?dA;=cwY#57&t4pFRE>%!qi###+rvpiF@lwjdmrCSB~ zl0CHKq4hq;J#Kb!h8_~F!>t=L6;bZRyi{$DmsNRfk5U_Z|Gt?bPqUCnG@iRVAC}=S z7F9=>KMBkDQiHXf;&y(-m>GSffI~f(`*Z4_{!JWQ5pH~ZHW+!i_BC8p%WZ!24Z}FP zXf=KFf?rf1YTisDj`TPn_|!FCXE+C)JcNU==MYbV{hlXx)4PFH#cRnjo6#o5&=Ni3 zU)#D1JNprDl5D3)NxxyBb?7{`Sdhf_XIPTK)HQV|UggYRyEC@mu?AX_QB6ibJ8 zhcAsI{5@euhFmN2M9|J$j{_fI^RKTQ_#m~AW^}lm(Kf!T@*6Q;hYUt!M}((Ssq!>> z3hbQ`aFS`bE%5s&Mtm1JC&*&Y0f^w=A^C00tk6oi;@gRim&18@6NT z2nou`y)>0EDJ(V(>M&s%K7-GNj17+8CA8B>$k1ugS{yTTJy44^za zH&8KZv@X~t1*_RLCF@(C05upCs}Fb6zE>Y{o{FohtiQe5no=Io2gTb@|MPxN_owvf z@ZBY7W)`|NMKj)dC|WRX3W@80{B1&xO&eM+k2`=x%tS6aGWQZ+joqMb!KOU<>?0Nh z%c*c1uQ3+ED3*<{ErFrprd3Q0F05#VP2?KFmC5=EyJUR^Ewy`A#}_A-ZsTHgr&lCB zFq;s~CdRFew|b^N>rLl!AfC#PG}@&vE)|YMwfGDg_vgLL{<5YSFS}e81rNp{4AA^Q zRyBt|eAqnEi*8@Y#K{dr?OObHw=Uob18`M#r07j7wfCsY_$=S!>tLyS{iG z%NlO5nnF)jD^m+?QRZzBi09Lm4D;%K<)uK3F|1g>v-|cn{sFZ|zB?CnX%7fdpWGGWaPN32T8wja`MBF3HhCrUs8e-ZhCg; z6um)BVJ0TJrQ*qBr=Nz@EW1j$c~AYT>=OyuN?vBXB_O1aqtu@~+j6Z3yeFGA$q&1% zd?&p@Y3SXQjj<|Ge?*|0^VFbXru{_1yav|AmgaxtQ=o7~)r=FaN2YFDf1>fZhoE`^ z)i9oEzv7{ly3UYuj3`h~wfX%%i>Y?z@4={#$f*Dc)a4q8@cs36?nkwHFZp*fx36~R z6OD+^wMj)gH*)B#9x6ExN%8Z}L}k=Sg#G1lUa3mQ*7o(AHt%p!FRWrF#CCWi@$Pnl zL=v(6Mwp}CV*=XPf2PylDHf?i?oSci%1E1WdO0-H?ps=NlzC6;OiS)Nsgcrb@0TZAja}1rZA3XJkBxcP6h5-$FZo7CgSMlo^P%M$_vX;qdI8V=fH`1_8u8}uymn0Jy5}Jte?7Oi z3}jov#Gcc;YaFIqUTt7r!kZZ_w@%)GeV%SRQeIgtFvnc+oD*{Pb}jiX#a2ko>NdVN z!qWmbV()H?ZA4zUp8P+sX29p_M4PO{cC2^#0ln#B{Wb=2-Z^9`@@#mSda84_H<7f} z8r5I0t1Zxo?Dt}=)iA)exwQNJ9_Hxg?YOdHQN8F|FxU`x>jF4=Y(qRjBQ3%cdJumg zkk=Tab8WQ}WRfq6srbW06-=?JV8J1(m&qNY^}}J{33yGJ27_(T)~V36D57do3m3qM zvgnuwZ4f&-2mUns!+E1s8xa(V7mGSS6dL)1QeZC@l~k)jiyiq#%utH~N99W|Ttz6; zNFWvXsQAP4t>gmx#)(p^c_NcAQF$u&yGzXw1A%4cNASAJV>%JFSz4B?0Pf4^!J>NI zk{(h~wu4wy>|1)ETRRbP!gZu_b}?eqP9r*_9nEDzoePtFxi7PaW^YE?vwGV_B>%G(1S2J zVcKI&p`3u4d*x=ZC{s?hsuc%aXe!i=fn1cMxeP%A?NOtiw+Y%y@ZiYCrNWy1_zd(- zAL)fx&&8rB_o@YpCyWES)<5Ks4DJzc8h56)%rNQ66CjY5a}YvV{G;lFtN5y*MNB8l zmzz2ibz3KQpzsu#=L|wErnygR5?W7nSw(k65R#|E_6Vy!J#9$VBA{C3_m1$)ag|IP4ds9GbtnJEE8925zM&Ka}de17D0~spCh>T zR@KCR?-EQ$r0#)5HvKx^6vfwsTSL_|r#|np$+8PJNGT=+x8IyGG*5Pe$T8DXcDF;$!&8?^$&+--*ZD7Og=fo^hGdkKg6OM3=TryQ_UdMa?YTv`Xf-(*<0dVc-hYUBMlFS_vz^#fmW)sYZ{To?Xn^moBuSw zT*P7GFXd(O3mafefK^{IQPn=SuTj z=pPk)4-9fWNHZcBSZSgIwsP2c*F?&*8Ds4lu_(17Fi*7W&kUD-IqUTjHifPpLwY60 z?55Ru*q}Nxj^d$R#e}Xh?49Ko<8u)(>4jStchWp4f6N#R@ysr+L^B*VD57QEQ4L3j zx-NZ7ikT$t9Ka?1n7sp`NOXGM{4w@No+}W~O_95Q+h>9uwXUHuI>?M8qsAHvwAg<% z@y3`x7BL%MK(bI;vt>*AQCgAI#&N?x;`)zIi>o~l#ZveZJ) zg;leKxj$>pA?bzSqPiKTzm{J+r6Kk#tGroJR%GNt@UypI%b`4I&@r?4U1Lb&k4~^p zlcA&w!J}!~_?DJG(rV&aWaqAIS5_2DtB{^N6s~9a1I#2f54sPePEB`=2Ir=d*v{RP zUHj-Y*`aYe;=YAO?h?j^X_0>ST~wfrj{Z@j0*za!RS+~HY%5EL({w{dLT|wzg#gRktrL*jb zrCFpYRrUV7(cENJZtf-ZK4>1ea&Poh4c;*4T!iNGaBgwv`gLaI>d-`i*_cm%A@eC< zPR{jG1I5%(la3|FcwggRZMoJI{PayYHoB0d(lOfE!$(|BvzqyR{mts%=eE;o4|}5& zu9xU*zpe_5nYL|$Qg+gUAgb126lNj@6M&p3h)^}4AR|bj2Cc;Xc;o%vsK|&7x_4lp zq#9GNQvH<+e0j84F|`^yH;Le#zILZlbJ?-C(<@qwFIk=8 zafT`|rIVzT(n0bm&5of_!ZY<{ZGYYJwdRdk1OqUe1K$P>k2POSa}GaI-lJxgoHTXA zwz)FR`(@NQ#tqvj%}ol<5{mA&fBb~Kpet}T`QJ7F*2e!$mhkxfTEs@GEnM~_Ubqj8 z!u)0~xxhR2iQ%$wWg2$~A3#_ILpCk{^!a^=>&a+0d;_2KX4im8P)E$Wf|k0$A5iMI zHU|9;W4{z`XvdYnVf5B^E^?d5K-ouOCW<$j6RZtzfdKi8GZj z&ftMTTgo1bY}+#q&<<+>jSzM{=y3CU!H}#2)QZZVf`SjgljGLS4gdg zkLh5HH-kxWvjNrdoR)pOqCgYlA-Zdc$91ph=o{(j8tLdAJAT~g`0d+ zJUGm}v1-=4(okhZDO6-4WGE;oR2gY;RVXMJCnzWo86rIJt4am;w)MCM#!R zd|i|7@6R3UFLDinfzf(7XbUA4C37vJh=&ExOW#ym#!Nv0iUGJsgo1^Jhk^m_prJs( zFRZ`!(7-+2zn=$NLc#tX0|g}n{0opi8}!c;7^iHQf9^qKPcL=S=Td_L043ng2i)44y1W9r+uGPU^SKLB{vN>x+&?{Lr3C*T;$kgG zsjZ+47PEIU1#_{mv9M7JA%nqS0jD=+e5&G-f2RX~2~t|PxH#~!vbwpsvAA)t*gKiC zvh(utva-Ekeer@B7{To9VdwJ7o!QQr>dzwotRrsfY~p0;;9_ZS2Yy=j)oXiK7ePwO zr;Yyg^9N2BOS6CPWas=BEdW8*r#GzZENra**ECai%m2@`r#FA5{l@Fhb^=d}@rl{n zIyjj+I|E<|u?zg(;6I-I8@xZ`RZX4kZCszQ(6F;~5qj}=%6~on?^#MtmZkt%o~He4 zm;d?fzsDe1Pb$96f41^X^iJPk|FFU6rg`?J(4>+=PIe~>PJ7Y1ql*l2-bP5=_U$TFxFtNTC z*$3yoAM-+e4lga8_u+hTPr+%g@kpS0YbIl%x~P7k`s;)Dp|b|7*M?RG5A**0a_3|1};8hj$$s1L{97 zL_%mfCki>X5EKv?>OU^!^=O3u0tEto1Do9&$n*`&@}HnUdXZcvFix_XtR`MLaFU*+ zQ^88Gn7?uO&lQ0UZm_q(1r1-|i&(^psx$ek_&15e(z^rS{oPxc$3K#Zaj8S>J^G&~ zuV|FjOd`5d9GCe%{CHSQxm+g;OOI}6{k#3Th##ZA%5qh?_ZcF*;vZ;BLLskp3&e}X z1k)};lZ);GDU%}67XAMa0|E~k3t;F^*S-=8=k*o&Lu^Q)26Cu3A1J36N$~zJ)gY0u zgJIJx=JtG6%3Yfw*i(PTI)O*%5t=_E|3fKb3~A2YZrO!#C1EK?I%jz1`9JfMVTZ0e zPc}`4u9VlEB;gVM;uoC>#pwRyK>Xd^-S-{lRp#GlK;9rFhmqJD(+_Mmpq&hz9tV8z zAoo=_hX1<>999T*T>{GpF%QCT-i1IxtE}>r%+NX7pofvDwZr`#CqWZ!HES9Z;P3Wl zH%w|M36@->D2ssq6od~+P&kdpg{05_EcY*g2%!T+0aL+UocKRQ3f%IBF-yfQR(`&p;u_PBa!U%%>@O(F zU}*VrCJb=;AKdewAYvwKf&C-={`ZK+;w-{;4u41&O$s1B$^roK!g{cT{tC380`Qg# zdHxR-;{5>DVnG6v{zBx3`OhcsuL@wHCEIlThq>S?ST;)hnS80vCmv-p0US<%;{ZYb z5ila?kG&GiU6@8s@YAsZ@N8^EjQjzfQWV$_Q^rCZ`-u?@;()RVY^VBpkC^=`p1;1K zGLFZ;fR%vYJ!BN1afJol3g&OA@k94l6f$E2`L|<1zSCXhpX`+v7Z+=gxvn1)>I1;P zQGm*z*2#M>@tb7+0*KeOC#T21zv&JL1n1RDw(rrp(d_%M#~Fcwa1_E%i^8S>*cun1 z=gWyeaAI*P>BsNX4@(wEPzS7(UUjlc!z!b}(Q|@Pp2n(^16WKZ6>8v932y}aSXIw^ zyrK-Z#C_h+U45fB!JATsCx$PHq?YR~{Jv?%C*=vX=$fS9;(H1`a=(3y%ldU@|d z>&M?yGj10Yk6Vt$7SuRzDehG|a@W?^?{PXg)*ksX6?Px+&C0Q8MZ;{5)n+!i9*78f zo@c)bxEp7eQpoAMJ0gA)LqUwF4hJoNQW5q>{P`1Sz@Gsg!f{#VG7)5Xa} z-Dc?yP-}TvqqH8+5})tZxNSe6<5_sgmc~tARvJDQ;Iw^WbbR(^`p|5)wKRpxPFG1T zktt`o=&JzUel-J!+_wEh#_sHTF3=JO_>jXz84v^_ zo3gnEZX$I{R7^GGH5d+dqr%IX=jz4>D!h_PScy5l*Gn-DRtWts?sqA|K?5(sb;(49HhPGT#QCxuc*R61=HSW@y(c z(^YVLICZOWn&i7kq}4iw+YmMBUCB~UR7H~Bc(@O*91Uk}Q%ur#$IEcO?PDR&>1=dZ zhK6;b1TX0-Mj8{so`S&}jnnkgNsCVU`&GUPa%85>22CzG+RQ3GK7toVbHPondy=)a zC2`S=wzcayIXNQ3*V~J&&0KS})hS#yv(o%-M^l$?5+iy?W@k&5Z0AO$`QPqm8}O&m zRZK}}JADDAzB6ET%}kO}&e93RPDzm6!ir1aQKkAg-^ieK8^ix4aLI}m_zY&%c%Gkd zFk-W$U%%$iTy66la69Pu;hCe=-}vJk<$gfij}a-3b8gG`C?6w=#=u45?a}NX<3@HCH#N>#oGLm*I#C4JO(w2Oy^%TNWM zQS)M`f<{Af#A-m(%34YWyL+D$BPvdUUJVgMr@B;fV>sVqKB_**3dZdUMvZ(~?R_T} zgf>MQQ5IiS_QR^CqP*N-TBapurtpW}#aLLET;h2Dq_yRE;*S~=y}VLRj5d3g$xnH; zF5-oHwYXNul8BQe1jNGtRWD8L=0G*zqbfkDuBIk;QpZ(Gb67ty z)!)@r@P3S`7}FD~XF?Cy3b!y+n%>U@b-NicKDeqNkoYp_UXH!AA+ zY?E#9rS9%aj`Zw>9(8*wftP~c!l%lIYmw2^t9%iLP5sQL^)wzO(oqRivSLl`6s z9Iclc7&atD(ht3sk6ykVRF-Sx5Ai{c8U3?F14lS6O083j|M%@{CX z?_l+jWJ>$y5Aog|VGoyJDbQnw2DUc&sf9`CPttPUvpUMWVjbSJZ*nVarCX5OC}Zpu zKQekjyE7A^Vq!w|u+_M=BbU{vK3b1Pd3-AS_Ndn193zAU`erj%4vKwP-f!=vwSxBH zO_$TatB+)YvIl(LSO=By+>5-i4^CDC>Z(AP;pQm0F^fsKECaWE^o2^zwE~x`2-n-6 ztm8%}Y456A;V+M*l1dDcS$VEpQaP>!Z>LR^c7|S6ezkjKN-Sg9-GDz#qV{?*-r|;Z zIML{AUfyZ>gGGV$GPj^Kr!#>`qho^|r_rH6Yf|p+T8(wL7?#sJT7PvckTr|2$O^Zb za#^CiIBfnLuI`$o+{ld5C$)@3?^;zu2f0o)-^l%!2H(9O%noFd=eW6jCJK2F(oX4j z1j9?g0j-%-*2G+GP#M7!k;N9Iczqpn{)Ilcg!K8Obfiat3aq@f(=e5&S*EMit1mge z7bFl3!TjXY-p$4+6;k^18Mjh#FM@&)887;- zsDo9&IO4-8KleV64aVjKG+D|O-vg40bMq58wtbQ=bD*R|

`rvu7E#l1;u@kk!Wpy>6Rgp_N=eCQ<^+T<^%q(!Iqyg(*^)zgJ-h&(x8m?O? z;vFvP_mx{89E}PsEFrqfmJy}CrI!@M$9M5zZRgW>bk#KPXKIUhmCirZacWv*!KJzy z7lFJ9Vy$nQ%d$CZYf7A#>c&oSOD%rDyKY|%Ep#GypWStLXIPcbO1ctUo6QbM!9H(; z)0~Az-B${jByVe}x=dHEn{7-h5HBPOuyOP$EP7cwco!izK7Wy!&S370TQ1Fpq_B zt{oduJk>A$RZNgyuepSu)a?2ERc9HJqq^3Tj4cnRxUGIwB_Gx%!R~ZXeT&nBnVM1) zi91>TemyR{hREl`t~0K;pGIphc8w2R_qayh2g+Ie3Pw36v^zLBn3&D|6@N-|I2h1( zah-nIqr&&|$1dmIc^3V9Z^rw9y-uQu!;ju|`b&$g=aMwvC)tr&&NkUxr%2Lv=tKHp zkhJb&-H)rU>5s0Hmd+4=8kPvnSVL6ldfH2{5w^OHw?YMuJ^J^R2mLh zDCeW_s+%zlDk*Q+eD^cY^XBaH-wB-7+y>cT8mCJyc-FjwXZBrI=*w64nH58NmouboxQXHoTe#s!Jm{3PgDuuX^NbL@l%rlqZXF3y|3}tgn<}4MnRF7fv zM>J1N$PaInULqjIQ!CIWOk+dY!aI=fI=s9~nH{Y82UANjZ z)6It|f9?85; zdHF0(eXUSe*lUgq*Dl~0Qs%uLHg+P^(0CyVOC-|oe6EGhhX#3BOoy$%5)3m@8Qm$S zXTM7+AnKVoAhpf@cC4*jRAn+&T!vh=3M~kg?$@hBq!%*kUr&iCaY(`->BvRjz zU;Q~>!)Q!dlD-#DQ2Ekck|32~(&<#RZ`uUAg=KR2&;aVkx+ajjv;UKej!^1_A7Rjy zP&HN$i>f`{Dn2^LGdVOl%=L!Whs);k#M`~aR{n$fVRya5jA?JVutc`!SLt+l{{5Or((8a zPdNQQW^E(3^lth(s=~($Ef|FJJiWHJwX>m49~K=`>Nke3WDD*?u-az_E;Ay|E8jgP zWqP@$&X$RjqC_Lg?-9#}N^eNDy+|0y=*&}b6==Kpn&O$dF4IFQW1i*6fVw>{Cy|jr zzxO&Ylr_}4+-cCPBh+nreJ2rn(zB%}(>s&Q>(--I?RlwJUszEC=A(XCD%|ATEzcgb z2~MX@wpo)2#660xcjKg$#OpkO4e%7Zz5%nr4tB8V<>X@wP5I&+r1V#KWHF)?OCz69 zO|!ishwYGuw5721Kv&@h_;C`goyyQV8%$Pwlp^y$H1H&=4BX4*r!f(DpBHSZ%{;m$ z&SG9ncE6|8Bgh!HrxCnU&DtV4+;*NS#NhW}zEN2CT~%=qvsrxwc342?3l^b=&yw13 ztL$(aPXd~Nu~F0cz0w$BEl2+LNThC0FSF*Ned08oq324*YP8f&)vN#esKoA z$8J`ExWf!Xu$Uf;yarV*PFe6I9UO!OEpAwKW5;{-`?;IhsQs^VzgeVM?h2=Pa+TGe z{pemr^Mw@~9b;ob*Rd;MJwI+o&%r zOoT@#fp8n#Nu(A;%6MK2D~$xB;0;(WE%6jlGunsNr`Of_X0n;2SijEL{zMn-T-^YRvI<;f(&yeQ&qmJ`hmz zgWw=(n-*DhmcsMdD|KvjWcZGSx^x%Q67aKxpRP}MHGY0!_RPwhB=Nd8k6XC)%GcAa z&Q-BgBTBdJ%;%IYJr69MkrvLBP&E)1r;}xQDBGA|S$=PKb+~XocSoMTHG#M=Qja(yp#*h`Mrk&9G2>C_;TJ3VXvw6tZJ_ehw=cA*1|8ft zm^$kz2`zbO{K=BkLs8hyXWSY@$&Q={YeJr|>FGqR4OHFG&$CgMKIrQ!%TUk+CSGy- zsUX%!(=6%3;vP0O_#6C*^@c+w<#@MW)N1)EDopYZD6h_cX!uwsIX=I6Bk&Z9ar?Dz zs26G?%oNyp8gVq9aBwq)f76>F0Ib+q*rX)9$^v#F7-a#)sl-^kmBu+FFdlucAUh=wg4cZUdkZ-?* zky1ABu3Vxwm0z&n@b$e1|8rkTS*v)aJFo4F z!Zav3-=6dR)Oo?_~ZtuUl|^t~YFH@HZ_{n=qy z*eXcRq2n$6PKymCXR`%G^nAxuodLlQD*6Tac@=hU`Vg!UO&nqr`L}kFvYQ;DN#YDG|>uFRfCii4Oo~M1q-U9JkdNEKDT`?eQ;Sd zQykPCxYUscdwKAu(LIA&We5&yU(3rQ4sSQ?Cvm29S>#j@70E}!~IlECP`38=BFu)l5JFD4MHcBrV2uPVYD67vi`$VU-N~~XQpPo z)Qb}sZQ3MEt>;YFTi>O{Nn@TdLNA!$YOXNS)LRLC3JTCq%@N|3l0W{F3XMhw(rlf! z({`vXlUuBlX{in-b4M={*uP%Qq!zOimUli}Dxr_@9}Nvm5$+ShYO{~$*?-~k)FV}M zxT^G3N~^Qc2z&e5Hbcd&s@i52yq_hg!GW1rfWde1nL;#3&y8%hwpymh4$M81}t;A8fd_Fg=c@Qhtr6 z8rWKc69<4#r&7u%f1hYwkvluIXjb*OkEc1?Vm|7TRUJpu$~CsW?$DG7*03H+-0a;&|$Jgwlz=u+WbCg;2!;Oy?(O0bImC-80# zQWXxxa@+5T%*Kus>_3%4T{xZ)Xlt0qZJrv?e`J*SCdHe^>qYi(aTLWv>Lm2m2^Hxw zbTNfjDr)c~=fKD7sw&2X&NOQHVC@a3)TV4s&5S=0cXL{r<;j?75RzS-$pnT1;9XWZREH!26oO=NCuTnDwg5LcZ>u zs+0#mrE@~GxiU|ZM&X%axbW>w<+&&Q9h5a;j5E_33k)aIVqgihac> zE3fy0`sxe!BTwd=Z>Gsz0S(qxR)wf(T;?kv&0Nv;zDAW$_EkbYP|HCj&g&d(AwywYTt!9Ur!Xsw2cW;D}QV@Q& zt_SNsV9ltyT5au;?`xqyCA{JiHAv&UQNET0#oDewVr<_DTzh>c8ibaIUd~YT=bt0J8UNg!ZtM}MLyFD3xClxU_S$7#K_xAg` zRQ#1F^;zY|GIp&^>nf8;p2ESxuMcPok4f*`<1Ksa-)_0dkV#)G4lD2hiG%Fk0xFyz zZp!PqDTg`Xu3wNlMkEBlprgeF5)2tIJmQC7zZtq`Q4`Sl?9*;aC=^D~o<4%R~uYq*rR3@pI2H6u)J< zxOk2tHh#oZ_FY!|Da|A;iXfQ!!wQ-exp=uy)x^ZaAu!<0(-)*H3R<*X|B*e!dxrhe za;Y_TkCk&`&2ZYRssCV66-S%?<0$rUt2m0Vs%&~Hd`M{vEZKF>JB!6Tlrea#!f=nQ zDNMun5NN>8su+-XK> z^R{>WVf(&%cx0R`?v0k1;3~_G0UA6B&7&(_m&3s=>iIPX>-sX0X1I)B5$CTZ)^`ew zn&nfAt9L9k2GYl)(HFjsIG7V}@i+aAZg zYE+pm)%@HXkU?-yiETDfcjafG+S*!)omQqGremrU2w-T%p}qnV(b0*1bWoPeE2i6s z$NAl7!$Jw&N4#}5RrWJ=8JX^v5)*NzRGEC)7sOnntyhneyv?kyaF@KS%-q%ZfG~$_ z51tUdG*qhr9Ed6Rtf^`2k`G@+9Y}xsQ~14LVv|vMDdLX*9cis+1kOA|m#d(pwKaq6 ze(T+*;$nKsP1#XO_^20H7!ncO(ka}JV!EHW(p8G)XAh5C=ja^NB!Lb35s5_#X#7BI zoWw1EQrd{{{tddsZa36hrNs$~Tbjjtm2fCYBNICuFQFD$zD}&b1-Llme~zS6Mi59Q z9{f}r`_6Y8b9QnJk3@t*=J`{h!<#Au*0HhijRdBI#%Z!lIi>alJk1}Pw-8Lwlg$Ae z(w+n4OV_$19c9)Th&Lh_sMb6=GYHln@A`?yiyMnIqPi`OCO?5IUSKLCrwN%15qONB zPwMtH9{EUDD*4D2IX7h}3_PcQA4&&ZBuAeMg>hhvy-dc&^72PS;tg-buZzRU0MDy^ z+7eMSv&$AEbP2PET1JbHK7ihs~NeI46J%g?EY`qr9Zi7HBXLzlS6 zl&4}D<7b^K!o!+T)aXW5TRUFZxJrEEYX&2M(>?xM>)?-RbXdK$-|50WVr)S$l;Nn* zzk9IIIg)`1$pWbsc8v8Da zF5RVPD1H821Tsp|ZfY*(Bbeda@5ncH*z~DJ;nR-&kD6%%kI=mPORVBSjv^R)NcHOt z-PM>zReiI}xms&{PA;xpG`Mwt)e*F;tODl1jvg2+gm}OD_Sp9k_`YlX;_WGiuV!(A z3JDK^s*f-!s3q+aZOyTw)+;h4APn&T>iDx1jYE>4qZIDcdRzP%#X41X64Krg2EA$kU-!+<{j``g2FE* zF4veToK{npm&&+>tlYfv&}DrEVJ}G4DL{8erz?4No772Vg+;F|cJa}{|0ssv)ZZU+ z3SOqOK*47zv^*MoyMaE$NQZvfj!a941gRYkLBx9sH~hXzGW@jIiFv&VP|3%ZLhGb< z7&N}$x1!^z>XqiH^MRIGKEd*^O8YL?eq?PsZSvJD-o~7k=sxiBKF{?q&# zo%^D8^39Jsv|(^GGjOWS#J(5&<3MBd`uoj%9{5IBrWxXwV(4A-13K<C^Q@77_Zp9F%`7=5i1h zqjh1&RI?U05xY$?k&@$-&=;se!Es;qz`54u#^l9sl`k-l8Ek-X5mC8r^;AK;m)TaL z%nMkxAHWH(;k-*}_b>df!bLvjKyGYCqxYWG$DMv zCVppjaZ%BhIgwJ!DsYthJ=+gzNzOw?JE)X6R znplSa%T!3V72@$Gjnl>6c+w?NeUZ6pK@0Q2(WNcFg`2aQC5-=(1eG!41*xL5E~8uM zRzm3%200c>Qu%c#w691u1f)2`I70Qe&hV))(El@5wB$|5E3r><(?MY*HO;1XTt4Rz zZ9-8yZvVPEF3+1u?gK^WJ`+1VcaZ-*K>L9#QvH8wC}lz0K1!i9Y^+wVhqTl4r>`AN z7-+}`Fv1fadjrBG8v0>2YrJ+TXXOvRF;k8mNH*>`dT_29H!raqrGII?s6YS}Y$fG!Kw*ftZ+~Q`+wCOYJ|A$~gY?vb2;M zcB69|m2>VS_=lHZk?epnSIc`IixH@j(o#l#V!}X^Kt-ZoruZP}#Q668#xskX{)D## zv72?!b`UT+VEUZ;!P6E{jHu*QnXFZNcS=S2KgIFqis>X_en?|(hqsoEB;qiQoyVt|;5pcUs5H@S2_I}{(#_9GC( z*&&1d>KvetmuuW%YsT=jT64Q|Qdjx$-|Cn5WhM9A6vE(U9!p;hJd0D=_;nNR6oE`f zYTE(>_&n(T$qSa+OCu@W`|R^GltPUoOqMkb-UqYgB9pGQx-|6mazu0jtSHbj1L*in zOmgms&-q}T?E55wQD5syg2X0V@G70a!}?IC##oPCldbxCb+#I~nH+x?EGCDzTPRG?H&4!qyea~JE=&StPqlV>zr2E*%wTFfXoNI?00@h*JUkdh&2^tN zWZGB?uhFDHRo@?}4L9PfzR~NW0qR*~Ubu`g?BG6o(&b>2~=>}E1K{QZ6Ie%g(Ud{%iHFPPCUM1V=BE}iV% zrNr$iYW+Bn^wvkVuS2{{t4XrOMSTWHBtQUBMMytKh2CrSV%{?8zGf%MFGgQ|Z?9pr z473!XF*Hw*o^v_I-Jh>Wg%Jhu|(BZX#JzMS&{wO;oV)0gjr zMI><3<9L{}w!$HgqOdAWzO#`+9*8HC*oRZv`n19>Ya%s*W~v=mkjQVdZA`@LovatC z$o6K+!aS(WS#pptSz>eMKo|( z#S34+&Mzn>#6agrru)$uV$*)Cp6+7DVs~3{q^IiBbG>eI^uTCED6b@(PlnXVlFTjG zFSXj5{%9UHr6%HI!Uh~npg0#E+8~J9%&n&Q7fXd$zfn{#7StX->iz3{o8$Xv*T?iN z%?cf20cZ*px~FpyNTVi=5CzN0*$^@#wwSgkv9=6hm}{zlpdTZA$@c+~BpUjuX*H}F@l zp0``>D{KMf9Ljon#INcP*a|H^zX_tmFFOxuc?krKa;t?MU%w73ND%x18ezg1&={4Z zR8g0pO6uZN%)26xG0*f)f1$`i^D{DqWfkx1laj{Z0;wjQA54zpwf=iYB8Xt~6#O)d=2Ujj4hgkVvR~`({sl-fQnD#w)`_X%CjWTL#8tpVV@Uw~ zrN+1HP))0;gyXC|tzT((zd)1yG8IOQ6xbclSfS&wRuq~`V&*jmN*GxxA?BZ$eDjP%>2YVge zl1)xeYq{ma5HzC<3q@Qmxv?&_8LPj5mS+or1wtw~AX3sp%*I^z)Nk0;@(JB-R=eXA zaaYyE_k=C&dl(oFV>2p4PZdO^nw7O~ftwc6WQZd1;MWj98KOvwLY@Uf;lF8lg^%z3 z?(1XHOUtq6TLn#GB3m6Q$1qnSd(N8^M+r$rqFH=f@&`p~CpaXq0>5pPcI@b;E=#K< z!#4&?I$jDhsJ9J#=SwB^+u=H`yM~X1=Ljl>RufangynSee=SQ4vE%cHEcG|$nbM;+5rTmmuKHYh{k zM;&PRNaYBb`rb^gouRg>7S#)^3=Q&jzhIu$m#E$x&Py^%Q>^@YlNkHHRM$GiA(ul9 zKPZ6?mLss3(P~oL*h>@o@Fn)g4Rc(u+)J}_i`&jGQe&Y#q~+__v19oT>t!Jhh&WHC zgRDFxX(=?<{w^|u8jX3zt5~>fEOEfFXkKvJPXJsW>v&@MXb9bFofn`DOPNG@Z+8+xP+{yV zI?`ZrPmB{I>Pp5oPZZnPZO!!-DhJYuit`cO^3E@1+(vG=K<#!>?Ca)2=LTIQ*%I|I z3kf*MIEjs{`TJ}+5wOVXtZMuA19SdP~Xyav^S zl@&p<#U`za3@uLZ2l$%S)}>sDVOsmiryfg_Q&VBJVZTyeq+S_%Ffu#`{q}`lV1y8c z>4WyYFIOwPwZtfdVV~OD-%(%T6O6 z9)!*WU0oC(7~h|S(jtrRncJJiCMoo}x%BeR7AKFauDv1X6y3hl>wY?BL`;I; zr-Sk&>}#B;fwHiE&WHwqP2oeixL-9a7Z)yzDYT`iRlEB*%be2<9ZK1awGs!iO%hVi zrw?K5o9XUfVgz_2s#nzXOX)U1`y*rYs%xiGIn9LO$8$Q(^rQSv<|r?~u6jNebX(sVcz_0|;yILfGjA_4XYY$>3Qyvs*2d_A zEz?(VpE&IY2}QJ7nvy1G*3qS0Iy;+E|Jfb67$D@c(ufo_!Dsz)^DKafPK#Z)(G%6y z-lnF&sTTeGMR3D{*{5<1(t%4H<2fWdN_oI{{eVrb!kP|gPQ_bcaH&3y#pP9R*YSIg z``fso^5>dluLlaRtk9$1cUC1|bg@e*`v$Vitdmo;RdW%f zlIc^npSdKzL^xo30L-Awf`bBzIBcz&clB9?#L)2nP>Bsu+MI+@Hm%++Zz^%VXfTO^ zMwg4THPY=R4llyxo>INRqiIR0lz}S4q3M~HLfksq3h$Xv_S7Yp+vh5;nko|m_EdJ~ z!VL8J#1kA15FK1@>S>4@G;D)x?G$Y?amJfH3PvYY3iGE@+TTGPSusD{2BHFnF6-Ad zuNFLKzgW|{9!ybhS)LU{D@v~D`Br0eoC{V$bqpGkwRmW4VHTMmjj0d9RDwn#TNfA>pwX6tH zIb`9vQ=e_{z;3I9O!Ea$jVel%L7vV62z^11_gAB{Nw+=|4itp{!UH+JjHZtGHey@-S z73FZkM}4*`_RTa4Xh}VHf{x$oFk* zzzWHHZK?R}S;>7g(CC!y0LUS-IU+fwe0J-C4d@L*t{ScP+nJa@eT{U?&`0NDntoh# z5{Io<`0-rJly^f;?{v83&P+^*+qZBss`qprKkqlR8jnk>Y)+MoBuIQ|@RG7G)Z#s4{mHS_kEQD9dLBJI z(XTytv(NTWkX#J?^+`a%Zvn3gml~9qdLajUDNk+0_ z!D!5-B#6%dj9$R5&sEBABjUn}O1G=G&8{`F(i}|`4@K58$=9O-{VBhXPcu;822WJ~ zkIXZ}F+x}o#%dk1>2u`^Hph8_rZwW}(RG+BrD2uEE+DdwVF%C%C_9R70|D*50$G@i zv${uQmbf9X?}64(I$r4NGCB`qL@+C;cr1i}Ur772GU{++0%3q8 zGU`4S9JIZhwDHDlEzYpXlmwE*>4k2*$}d+US9YNJ36j-(EpR*7wWTW6BFX4dHq@2f ze&h6k0`WtP-`S5~70=#hAY7X0;fe$v-|UXipk-4`P?X_x-Q3yjR(u3NH$@%i;`g zw|tCV+$;lamq0Mj{p(#U9Bhhd#!aCj_qmb!+u6NAow;4 z^L&7OQBg{fmrX&S>^jX(j*;3Mu=IKpfVkstJw^aqrAw7$@hpUc&Pf#X$Gi0gMex5J z{rc;!hA%Ap2d68$JaS~QOW1s?*Hxi)MN9e+?;1`?Wo12;+SXetW(g)`9kd4Hf06*%xBjyd32^T6e(UVERzA6!TZUCi{cb55e0eB zJ7uu(79dxH0CN(oyaBs-omHz@F`H|dVTh4ux6m|NfS)O`)UYz}(!Cc#`lF$w;+)T( zb3@hkLDg18${PYce-Z&9Busx~BUc|p`(sH0Fy7)=;*)KjthKxN_`ZFJ$ zObNwh$_t1k;@rG58wj%k(C4GtPHI_4N2N$aGx&=e&iL#vl6WdA@=rUQf7bv2G4Tj| zn*hIXpq==4w;RkCJqCU%y88n$JbupgVAy- z4Iez3Ze~j@x1&X5>}CX?iVFIllJhq&{a`6*{4g6aXvxHX+xcaPod*m5kEyc^iYwT* za6%xsTX1&^?h+E*f;$AA;O-XO-Q7L7JHg#0*r0>E4L0y5_tvYrRj2sDk26!}bWitQ z{jIebJ*w87prTc^v1Pqc8mq=q=UA4euPsi5%5aa60C+O9Tbz#6t&!(oR_fGQcWTN~b;JkXo$_04`^{H7JsR+Wk>MG>MKI|8eK`hpF`*LkiP<6Qg*B7# zt1qRohvtF zchoE8LASX=sEOhWoG3RVoAdd4`bShk=`PRP+7-lfwapr{sm=1e z&kp{E7zoJkr#dtku2zXS4>5|jh&ep`QZLJ|d&X6Q$%;I0Ulb)^&(6wJa2!O{Zl(Yn=YFw%oV6?0TdA~kO+D=fkMP9$7E zF`^0ilh8yD#WFtoqM&}7F&r$^S07l)5ay&Md19t8qui(|#;B0@&3Kd})JgEQERJLu zTx~jWUf}|qdo>y_66*9AVPAzYb^J+|&x6*(A1j5pjZR6q9)HPjIrM2A&sJ6LgCH?g z2j4WiugByQO>?1q#~%uEvykF}^3KYnHXil2mvc5hP0(S^LW38tt`_H}rV?qa_6A!8 z8o#v9!3TJh?)XF0nt#Q&^O6PKZ0{??TG!~{ zr>3VZi_GA(vVx$R2(+jm5r(WYak&iX4um%92)}?%AaXksNU;aKc7n<m@mSO&Y#y&wntmlIHLD(cBH^`1}~YY8vJ^_)l-PIF*10QR)}6|RnyMQ0DPQKl)a|z{iv7P zDQtxXdAxWlS+rWBk+EFnrl=rGqh*ptK%B9H@?iPU%j8M-cC$3MG&nNF zVsI*JX&}MN%lZ^1FYjmYT3rkKSu`qdmM8c)KdDV=Mw>48kdO-bzRHf>o^#{+@gisB|$3M(w&ZlyR+|6vRY-2Vzjgq)hWs{+h7QWi7_WaMG^ET zb~5QJ+Cub2`0fxwU&RxM&#g>o;JDJm=W^EAQ!iPe)59Mun@ZQ^dX*)=b*n_I1juov zygOUdba3mQEOUTHr)2T=nfH;D3B3ol{f_ypV&+%6D$owH(wyIefSUxW<@C_`Ig-GN zII20Ly+RLy`&Jtv(YF`FIZN5M8vNt>l}pZdWj-e$1ioR#^tc5E zNLY@6L>7wkMHp6pYw0c|8UzoKmvCSyIX(XaT?sBa~wYv|c^K zJ#!@@PJPvrQ?mz+S0c*e9YLv&RaJUjDF&Wr3defO;rA-4!4DjpR)H9AyUtFfDp>{| zE&`BC0Qk5wnbER%Osc%ElJ*8JGG*osm?HsMKncL?57kgzaEus-Cu1pCaoU_7MP-JM z{MB;1H5$c`aUHSvedn)^FegF=&vN8_;V_6e*I6CCb_tC1UVLr3s{UEA($0up5 zCF{@aFEW2`Ipwa4#!x$h^ZS2?EY)nvvvqPLqpjQ79nW{9(o$Tgs-n|>R{GY)`2+fk z;Jm@ISxmA!5!1`C{_t)fMbNEBQmFVV8NJ z%KmSL<0YXm4mXXAfNfG(aWQg?h^*B4w%peOON?sup>)w7Jpq5FCx|4Aa-t1HVp=kl z;ID?rrn_GRNSqdMj)7H~cffgfR5-Q`%Elo~?eJ&&QRDXQX0pRs{diIR#+%z~&9`Q_?d$VzhLqx2wp?T|vGd27{24B9;b<>+?J;L)P&K4M$m0|3jC`MGi{$^Jo`eU`@mv-tbP*hl8YR$&5ryyBPL-wfSSG zU}J9~`il0!f^a$7s^AWqj9|0*Y~jeYQ_l3!oOWGS!@r`F$!HH|<|HInjvID(qJeEYL>nKtJOxe)Aa3wfrVi*rGAX7fz8C`CV`_u5{i`N&o zorZ+lWJWApY#x{16w7^$nTHtz4SA1cy=RqZwl+HE+BiH40x6dyQGM;^A!Q z3FWxvZ!ShuHvx%ER%0>)m`+@2Jo6NF9W>TUQU-O2D9R0;+5Q>f@3`<+}po z=Qx4BV13Wm!Q~`l+Dv6WQd7p?99?eS6;mCpIb+EJwmKBjCd2E>RwaZA9uKpns$O^j zyl?J%|8fD0Sr~?eRbMaXojBVVxIe~cnqg}vFjlD`$CtjX(}y!i+`ht-7AziHb8Ff~ zU#spUmV#+q%yEC78{3?uklaoW=gw^l$YvIPD>|k{l=Do+Dcf%IYZoM*1n+mb&o+AT zLXNhjK+2Hz`B8U&T@2_c(aQ2QsGL(Rbe2m4l)!qBx#evK^68n1r4FUw`NZ!?t?xIf z?!3=Hr?MQvDk;w_j+QvURG7F~8>I`~dvd`dRm{g!twkb9jJB&#a=bvetMxV=ooJJ6!zYD<awngqPnaOi(mW4CefP0-s(yX?fG|)c{ShkNRuAEc(zae$no)Dw9yzNHY8?q zo37=#hjcS08>D>Oex3Fh7>`|<_H@e;@f)kTp;>1SxmBj?9n+KY(wmmg5M=WxRcTmH z;uv|3R-^hV@)=SuH&xit?Z!mny+th;L5sdNG`%Y>VzPe7TW&F* zDoN)&u#lYcWSzHCdQ-#eKOi>WNAL1XkoYuSGmwq!9a-K#KQ6hB5Y3X=GV8?F%^I8k z2vU46vK(2e@uUxQZy>Q-AIw%ux+Vr~szs$C9&k>MJcRvn;3rr6 zC(Ij0svk{Gt7~gVHE-@o&~zQs;prFFfthJ}p!z)9@(J;BzNO}LjSmQ^{ZkT(M^$%P`t&_wiXpDKy3_R`6BtmsxRr`Tk|K|DZe&@H|S^ z%=EXy)BYjtH&*l%T2A%x#C(y;4 z{N&`MfbBk*3Vf~09S;si@OwV1IGf8U`C+eO zU5mpe=y`V-<9ooM-x6#qmK)Azk#~-(x)*%O9K0A1!rI8es#g`7PUqYTF)Cl&b#y`zEYXf2iHYRzjyxY~@|qup(c z%FUy5aCyaB)_?aXsT$1X)lC>b52$x-#AVmXv=s`N`@`d@qPaZK{1;Leq^Y&7|MDeGV;~ z2*$RLTLYwWy;JskT7C_m{S9V~95e`JG%R8^hr*2S5U8<7zQ#LwUH7;h9G2S)c~=vC z6+!)GL46dfY8TS?XXwa8id2@4?2SExj}*7PWQ^ONwTaoxwvbT9-Rzjl^K$!b6)sOA zH(A80!(FqOJi6i=VJH7?)eOBOajdq}zV7V!)mxrF1S*9q74)M4V_O5ZE1kJg0dUb7c6Vob)6Cw{k*&(~L!xs0%fDF^2A6Sp!QjHH;MbLBtb7vI>WP|Dn z_q%pfXwBtq2(11XT3@L{%sLAOS`lY^Tq8R77Il?ZS5Musj-d;=Brj8(uQo-Rmxmnt z_&pKXY?`}ZJvgx6w0np`&eCrmr}YjGV9m)fWMK;IwP?cCLq)G2=DCayYiAbv9|I2T z1oaP!ChTWGjkZh`s7*;bydG~pM|mG;XI0s{WxVtjScC*c&R}`Yn zAJC@KSHYBqqr8{9W7@8J+wbBPs(Ar|i#>B%544`{mwRMefVa4JEZ6b4$Aab(EQ>VR zFvr5ffMyXtmSoUah7)tAp7#DSRKu~c-TyGpy6H84(wG=ic(Lh9MZ)7DO79>!$H8R9 zna<~;4o^l_?m7QDP0v*tY9O8zm~JZpo%-QU<*7Q>DVPw-K>xnG%Mh~V-xiSALh-S^09hN-@e@COtC*>3d^n|YVeBTcnYO5ozx z-mx_e-xlL0D*(373B0Ikw$Z4dwuVxtQ)IYmiq?6n&OzY7#q(qAVFsIb$`g1uof}yH z=lL|~=uQckc0_Le0OxN>756m$**N2zik(O<1bew^8FDpDTD3`Jq-V<2j@I)?RoI|#?SINV1LH0|d^xv97`Rp2>v-L?qh#kuMU7W;b=QA%AQKJf z22T=2nqR()9`ZF1XRFQAGqN-RiEj& zu0rz2zrTBLN5odBS&c5UqUf@pI$k)?NW*?ITa~%~r15$$xBeH_U15IaParp&%m-U) zQ%n2i=IdFwKRwO7=Z+va5?*1Fah4AGJk$AV{q_#S9R?;C?;NFS)QSBi4L6z$b?M}W zflY+*tdqs6d;`HRA1HVozrv$QJNA0Wk5v?m2M~-o?CaK2Chwtn;2}5$BUT!va-BzL0ho8~a7w z_4_NUw+D7q`zQ)R1RQyCfCOs|M)V!eDez)QfWn-T08!!?!{-R^q9FVILdpSsT)SoT!myK7wok2`KOp)`%Zo2;qYc#GTL81 zvO(rMbNUH3leS&=XhDoJBp7}s$3c3ndYd2WaNAfC;-B!Sgk`CX6@gE<-7)FhXF2K9{ zCp>fis?jUVubun)gvOmTernyQ&^>_yN&}4#Z^aH7kLl+PXK4L#6ggEe<c?}*!s57aDvR~ z6-UhTl~weBE^9NzVXBP|@&QZ^Xtw6HQtf;x@tn*-A@A=dtEG82s7Wk{1JGr&OeHHM z3pkS|34YntRFvyHvuzA2{zpSh9nsATyKTsL%gcYw=Een%Yvgp9Y*>CWiI6J%AX4ev3q1%i#)xO(;lWN$jx6Y5sDJrW#KUjd^9072LF0` z#}{^OvZOoZ46;-Y!{Vq&LKcRR&YThs@GA0YgASX?K``^uRFFtFt^&efhLqSwB(G!2 zh+<8aKHlaRcC)${m1supNkmONSS$jWZkc^Luy$(*E=CxaKQyvsZrSdwP5^M;KS7Md z(#RS5ot+3cx5OB_QDV-&DN|LlyBliTb&l*jc);4chR)62Gx~dUmIET|i;N9Lr{9A$ zRVdqvRZ=&x;II+xiZ8iA_Zl9)EQzz}kRJ zJry3l;arZ2?XB1Nqg{oM$s zo;I5zcYn+_an`x>Wh~g?ZWyY$3z0rj9ZbtREs)TKPvd zQS|j=m7@KLRnkQ=rYpIh*(4@jp#u37QFhslv7wjAN5~VpUm?kH8q5XFW{R1?(0-<7 z+%J(E=Z9D_mrsyOUEVpwVNx?UMh{$G-r--8AC3LRQeek)u+qq4!Tn<@KxrM$HVI6z z_D+>06k_9tf>h_)VkLo2ivbqjz!1yzxy%Jzuh8*9{9L z!(MP#y#2!fQoX-7Ed_p0#X8cVoe&%A?VA(8S{SAkZP^XI!M5 z2xidrvAWgKx#5OVYSx>K02DaDn%~**&N{lby(trMK%|Dwa#2v+C>J!`#dBvR^{|Pz zbbd--!P6D#Mn)rI1qUA?C{|1Yr|qQMH?+%yRG_s_t@omfB9lsG-!*UE2n5;m72+_;9zv=rSydzS9z<+ zJ_bz|V7O70-*n$GI-FuA@xjx915U2#a}JRKgS@i{_SW01*za!y;d&$_~x8H%ufGY%xL1_p{L|QT8>@}H%$g{SA54(qzXkZwA;8<7kK+U zU+$!DY(kqVC-h5<4^d4%9+|FMR%S90_JQp&L7eeXp%o|Fo6z@Y=Y05s`5^69E?p#v zZ1<~KrYP2Nrf_k1|M-$kcm1DiiFaA?*kZj3V%>-hCP6lx;BBn-t4z1K-RbVM`a+p? z*-_@{bBuzUh5SaN2AkR9?i^Nu3zVlTx1KN0=S_4?l{8H2@uh6s92FNHL0o5ME)|?> zWj6|uh-~BkBIQ@Ur?QbBjWQXA1&2}x8-Frm6T08N{$ue2w(h~6(Wn6i9e?{!I+80w zQHQGJ5qrxs9{khR*0uy%)#Tnv-L=?AnIwc>3(G*ig$y2*?Pt#cg!b)Kw)wWw9O(^D z=O!G!obdeCw1XM;9#gYPLT-@T7SUA{PTb)*m=wc-Tv7zK=ySZG_Iak>JJ7Bd|GHMR zUCO31))*#h-$+U?Ym>AdD_w55Ot7E+YL99WH+Peo)@RuN5YWNUFEgtnTDreKuxoh} zlX}#Au_-~uVqosYdgmjw8+`hZboAV^A-zi*Ri`aoTF(r)YQGa% zLv#25gG;&9=*dX^E`TNL7tYqboiVZrtG#jfbKh5nVs!S^#YRDfci~EN$e)t3#dfQ+ zV-l)(;1K7`&U%ib2EwhOhQG$;^e_TnCaGTQcUO=U8(k(k6!DM#7?`!k;+0W}Ucklc z7_vSR!isapT8xcQ<$Q?@4=z~A}1w)INFdf5$=b0DK%)A%A3;bB^q z71=?g*V`VoyEL8-bcW`4#9R{yVN%h>uN-cBO{xAvSvatz)$z|)SH8uHT%k;62vtSCIY|+_l5c=hLl3*qMbs9FF7AJ?KhO{D z#J0lgW0X-t8K>kT!;spGuD2R9gyKtbXaDlf?SNTE>NZN*b4(^7>W4UVe`svtJU8z~ zHDmJ@iSE`OL9(CP;P|xcn0EmpFAZ()W;2)X)BU;X&dioDf*Sn$hA2)GQy(SnM&}iz zr=@JsVyo)bsQYw2#pV1b!G0Y$?9>y3F$$if=YI*b(NN0z&ErMea~tY?x6(8bPjAT5 zvB7SX(<8t7?f{dfqkTn9pYu-TQ@V&_zu+bQ1zz$Y*8*;$ZD3(7ibi`m9I)MOE}PX_ z<0!JUePr+vK{jg!VTdhNPK9TbO-`f6HLm}k4wn9_Sd4m2wYXko_4k-lV2004)x}%T z`O8XIqs*i|dry;T>SrUE^VJFCxqo!oX~NCnl%1gxGFRqH{*dY?H4`OP<>Nhqo|}M?#`?T^^fYqU*eECKEI!0Q_OMT zWbg3;6A3}9*9Xj*O?l(5*sOjaqKFd#E=ju{&dk>f{GO7P8(_=k<0G~Nr=3so!aEJ& zL=}yz!}T*>L#cx-Uyi}QM!icRQtS{6fFF#^|T+#M2=-yiIuzfL!FqNSpcyY0QEvLUVrXZ;_G4>p9#;Asw7BkY`)HK?%F zep_&J_*A1O^WjNA_0M3;a-yZ_el%51pcHveir&b~8?O}6$23$$U`$WIHGR>uCH98nl^g7l`s+LPqoKi!D5XYU;rXJag|u#-%K0OFXqUQS9x>O}R`VY0SmR1~$Da|h57p}k_J3-(3TZ~W ztlB(O)tkM*$6u(VP*vA-ijL{aDh~0uIT{W+HPzlK{%}@D4aX43Y_z+~Cv!9Io~kCt zdTOqL|cuYHDnnh0ATzLEcdlNKRy;r{K(OP!tYt1usrGeTio)bG!ueT?E2eE_Ze$ zg`I~O3dCG1z;hcwu|;UOe|H@kshR3Yt9v?}pRUHrlqyTr)6Gh%!MxN+me&wN<6Aej z8hqK4z030-$Ycn`RB}R{YN=v-4BrFP&H(lkmYg@Aa1J+S2mDS6V79?crGc)a@rX$f zWLKm#%3*34;N8*g#O(f|qY(w16t~o-^<_GoiRivR<}WB&b}f6L(==}Kn+MB-&WaB) zvdi}8L%{f+IdD5b=l1ZCCq$Rg7Q&FVJc=fezzbAOZO>;YR>uf27)Ab`j{h4b$|6a=Tl zrthE}zRrY`i{Djk0uQP@Z=0ikA5zAA9U6&2TjQ~Ge+u4I^Oaq|MCtlMS zibVP)uM<{p6R_j8XP3A`9@UIHp&PL|Ec;keZ4Ld1&?nMIqU5P9 zcOk{;&KE++9)3@Ldc#56@lxY^i9?#3dZICyDeSlIF0WZsO+!UMt|#Rpuk(H9AQkjX z_E)y391)5)Thu!`)$8r?Y8U~DcTfYxrY(x&MShFa=KSZ|%2t)Uu-dxS>ww4>E4xp z-N3tqh4#|hYzjQuWirl+hE|IflFF z!~G;f#TcfN<01Dt>xHs>^atmK5aml=zel*)uATAdT=Kx$&hCF(S4e*myQuw{oNg!Y zmx))-kZRbr)6h5_BH-x@IV1-`}cXeo$ zGf798xc|qMG3{oFJW67 z)GDB?6VD89&l~ot-JHQ~u##b!+s{2mL$^2&Pj$!y4B1Kn@`#)v&ZnwVog8$+8AB5C zSDGflL(xMS7y1sDs~0q@kg&y}@?fk~^1$3agdh9gjb305GLR9L7iW(>QTo3M4054d zYZgaYnl5PUwD#@DF|wVMJH9FKf8((=s-5_^Rf1k~*wRsBbu`=HX}(1fT|%*GN(k+^|X-uMH7?BAz~Lp!6O;HgKb zbZ@S-X1+czI5|UFx}`3rAitu8@K^C!$r0Vx(Gr{g9+hT^ZFU|J5^^}ljWa@zCbeKM zaq%Kef4#6HgWiS1DT~Yg8g6R!-SI9~yeZjl>EHX0@_CM@#}sH-EC{gY871yz#{&QR zf@fg8OY*%=HcRv;Z*P%nW>8xozj8cYadAo9N)7UMHO03lFy%SZTo)thZ3uXvYFMqt zevIh8wC*3yvt3fyYZ{Cf4ef6B$T-1<-YWKeBhCOVDR(4YPEzI$ZG>yi>K_;gj_t78 z%|D7ri~jit-}Al~HKQ(P2E_<1axTryiGv{^il=jZ@C=j`*&vm;l&N7z3J#Whfe4xsW!PoVE}DZEdtNBmQkU!WeqlmK+Q#Ie9X zA?EIUu^MdKxi9-K#EF5msru;6c-YP){T61+CJ8a9G?@&fCcggP8~z)O=KMn$T z9V_xjbR=~K(Y*W;!-Bpx8R;MYvC#h`?V(p(_&apTE01h9Amr`P*9Cz`sVk|9O#wuc zD7yG$K)7OH)~vx`JxuHayWzc|zA!VBS765yXymn8E%1%ls&%<5SW>=($)@3{iphx{lS?y9~(4)p-_prpOrsx&&v@)Tx(!>IF zprb%Bew0tGaQ}0Qk(ggD1CfNnJ1f5|5iCaMK)2BsuR-SzJ%)#cNUias5#i-o?oy@o z$P8J{FFR+^qGU?;5Y29}ySuyXDC(l>f6cFO2V;Bx=Q(D<{yy@v6i)4~%Sa-zOl(?l z0v2Z4^(4MtE{9q`Lk2AOe+V{JS_JNccy$2XHO*eo-MQw@@HF;t^z1zIGx&sR2r6Rz zhEn$9NZ9|Al=2S29g(l&LbpOYTHroRD%G`MNk7Zf! zKn3KP%S%UwcaA=21g+Y@Qj#D)M_e zExy7VcjKG0fp6XcJa!wjL-7yMbV-^Q&a!rUpf1w%j(bbOnVyr7=*lAX|~CZqa-+!XJGHx_-K_djodU%hssSn`zH<+n)~CtHFNM{?`qH8*||pvbAU zdl{WGZ73e6k5lh$r}0Bx&|sIfZ9cEpa(FR!AlF16Pw<~-u$Iad98K{5LITfZ(+j%3 zy198}**t&8S+})-BtH{ugnopPgAy@(SaAn4_ck3zR-2FSectGL3VfdLgK596A-uxq z&q1oI$B4}5Z&$;!d0dcuwuG0Jzg~E*t)|^!o|pT|DE?5EfE`;ktVjZ>qlfzDA;P8;)jK$4HtYAFKkHr)q|(_XCqrZ_|MQ0r-uLD>Ml-bv)Jd5Z&o5!_z}hD zn$s}fi_DWFJoH|_?B(OJS0yx5DwjL9zH0_Dp|>eT%xdFJ+S_^sWlzdpT&K zP_jPXxVC)=u@vzt9?8$k;$TPI{bZiXkfpM@e7nb->Fv#3lm{YKEXV%)b=!+L3#%wg zg7_kbO7Bv~+6ElM0uel3H6c$V@m%fO;B4o`;`_;Dk$0WbVhb0)$_n&v(mgl zt#?b%Mko60VfKt0IyVJ-CjXB8rSxc~Av@@#x-CjET`;mLEkvl4Z$mP_f#i?gB5b`1 zbXl8_h4wX*iDsB~g_lX$KHbAN+%Qlbq5n4atK#-!>>v9vtE0u`&RFXZ^@q zc|6_^-wg|`O9y%6jQ4bWpV6p$xT?(5r|YD>(}R*ijY&j57`S_QOtG7*PCKDB!jhhP zU##&y*V~QZG$69xA*OU;g%L-R2NDZSDQcd8aBE5wLJxpWOY7^W`n~@g5%I84OgQNW zq@_ksQ1EDbIAspdeXHQPwN1BPo!?t4Shh248#-VeiVbjA6nMEnxAJ_tKl=K1KN>p| z_vgdnnDSvKQ(8sVFMXwYaZJ73_4S3c*<9_E@ zGx|*X9g+U<3eS%=XY-9-uinqze=+9iLyIfrB=dgvJE+PDg3nv5nXH1QQfn#o)5fOI z7n@FaJ1ADsC6_q{W_tK2XzRK8)W zU3>XJQmAE;E#{(wBvkRHW7K?&pmHAuI*h(hf?W#*rOTzxY0TB>x>EI5&vF2Ul_PFc8)Rg%i@AIBR zU?>^7g4FN1yv#QD~bm#g>gD(S!A1Fic3fF#?VnQc*w* zDZdclQ>d=ak(1%G=t4@cae~tp?%_7w@>@Kq_Z|bCSrY}@wU)&afvUyn6Z+5nGplDU zw?Zpoj-MEJC%OH#TSo*Rg&jg;jJ}E7QcLiFT_& zE))HyjtI@BiW1fAg;M4O-8}>^W4t}n7gncSz71zVrUc4^eAuk@d(Zw-sdMPQo<3vF z#e}~T?=AL5;`7ZI&3?4LhrUl;m~EuA<$G5;Je3H+^z7$FJKKWIb!Vot%?m>hY~6R- z2nHryN@Y{Qp)Y&su0t=XNBVYtiESXE%miG~yHKY)jIwE(a+SD7jtox2p6yPF+X2<- zj30a5^jOt`8UjVNqfK5TXp(wbnmx__uUCezy!t*XScC$m2&mAh_A+yK;z=L-Wyc1} zn(hwA&i{J8jV1Hk` zvdZpue`q<>E+QHAX`AhhCYoF5G9?9XwGP0xanXKbF&+7R39mqV zU>#NVP8BmX##R~t^U&O|pEoHs=VkuiIlu}B(gL%J+Z?ZLjS&WPf?CbQwxye!-clo} z;*2p)D_GPJYl0Q5?t>f`&y^h(E=mTr(#tu2sB#cB9$!S+^6}`8R1N|@g&I4en@(BP ze40QYiH;}|YJ=9R1SyrF)-RsIpn%f_LsCRjuqtir!3y?~v+k;*7N4*wphd13frH3r z_7N}(K^M0Dt?#DciHcu%=mnZQtsd_*O{a<3!HkJH6u|mRD+}^pVTUkLQT+I0IqK73fGK2EkGsW)q&Xh zggid4mrPFLygwh#__gOg)L%tR!3qKv%pZiTD>Jd^#W>;Zc zE<08kezrqX$l;^N4wc?WbacL+u;BVxI}swAK3eh9Ts|=BHT7i1vGTxiFjGOO{J7R* zO1mMGFr`(q$>z1ZAu@@G&@+5`3H#z!^?;gk_nI0hi`f5GAXmb{b`a#(Hm+51oyf$7 z2-wPdb-pinK;ZyoNVY;fX!NZ9tOjaVGrLN&fSy~_)o!O$qSn&@*2><-mb^1cwu-Rv zZ&{fs&L0hiw>pHOZN)s-GFfj{Jn(_%sbl>e@TxReGuOF1Hh=nl7)Cydzy35*bt>!- z`mFy$gKYdr%i@4>Tc(dmqm&|>_+B`CS~1b5=C`@|tU{Zg?&xL3k-fy(&SzFWK%Ex} zexS9Y!^+?R-GGb023(~P0g!k+GSgbF>45|D{L;rLi6yk_-pzxDb*U_Iy$C2;6 zij?!=xF%+RwkTcHjx}4zi`LM=^P7Le9KGUPG2KSjw@YDHMmHjhh8YFK>*Oyf-e97? zJ;Q$~jG=qB2aGt8U-L_sii;ihGk(6iUY!(+V*~PPBrV%H357NZSw23K57fMBbGGOB zC;1jx(}78;2F^KA>nAK==Ch`DCl;eJEf|Wfwh0SVytwVJp&8)1A^{qF20!;ziq8OT z%Xv^Z2D3L-Y&$qqWt}OSDy@2FD1o0VU3do)&LN1>`aQ)`*yb2V$9u@TD{Ar4QDaXs zmej_S2b3>%RK?TX8#m|GG9F{2cv0!?mS`qBp}0axZC)`@wKUEIw5LGba%;G~>~jDH zt3T%mWjgk8WKzX#Q(j1ze-B(Ebihc9zM0!~Z+R>20CZIHG}uCopV_~RiHG`5zPl)Y z{c6|#us$fHbMDgJ_tN{Hyv(3q%{cXr;;WZF&rmCL&)tWoL*lUQrh#30c{6#NP3U@OtynQi>p`p2q??OuCc>!&wX%nR#2!JOVEJCke1HL6l~ zs^cKd0nx=6&1NmqU9Mbb@V-8)1L%||a;iC~ymK%6!;!aA24Bv%Iy6xvTYSmUywr5(1k?09pqm{vifcX!FmlX=aV3()kX@y2*hOJdazX`Q|~Y;(e>_vR` zAa(#d#WkE^uDuNN|IBWZ_mA>_a^E~tALHChJ5zQmI38%mGlFl8U%g?`TA(Gu`NvK&h z=+O__!0Tvy^MP#D?@&xHF}`WN1a<%0T5K1up%1@;phW^vh|hFWP`)wZUe$<&6s5Ez zPB?n!fzA3?=M?Q@t!^BV$0PaxSA@R4S+75}Ubh0{AdBD;dgcW8q6l828r=J&wuZx9 zGSAp>ptekP!d~_up_s>%&SxzM{v+s`>FvrwXJ9i0SuntCAc;k88{tNkwIk0u|H|>z z0vHTrD=21(wMn6GoLIRxguIkmtbc-}x!M@N4q!^Juh>S?o4u>`JB&?Q-4KQ|SRla=qcwd+}u}o=Z6>|S(bD42KuBA4M zv|lF0g{?nYrpBbVGc0P4OEhy7J$s9@kJ+|6f9%&>k!ZttRN9@+TfZ#4XtnV(#}&}G zZv9WQ?orsHT4@pEaqcs#zX%_>c|D7?+r#8l!tV#3Vk z0HBRtyEH_EyaAqHq)QMpTQSR?!G3RGHhg9-O{D6t-DvZ*`GGRsqD>}X($xllVFvv` zsasP0rA%o*S+|-URU4fcQsS->ZV^t*b+aN#HFnKN`l&gzQP{2TtJr=faD{Pk9dV4A_iaC3Nli|7>R8qBe^iYSUP{Ncfszh?~S#bGl%tt}ywJej* z)lMD60=jKh5&Coz++h=a(<{$oWR2nJ1ml9?xsZiWiy1KhJo7N_GJelZn!)IYELBQ$ zJ5oN&(|KtCTb2|&8p%F-g)O_q>6LTFdlo4q7BbH%JWs6^l!kGh&b*G=+66_^MYCJu z|}b#ZSXW<-7XK!%>7^Vp)7r1Oz&#m6I7|gLH*p>^knF;ovlgC8jDq zF-8kbz@rk2;b(QF{cGpB=6YJxZzJU)8LV#~EVn?lE2JMvvmqSKwASD%AOu#qkx2N4 zytQ$ezIS39Q*qqDx*KfR{q*G7z8{@BH!S*Mn<3&tIUYaqj>uBH{TqIBR6QOA<4SG7 zc=ZnVMtI`lw^Q@hD_*W7JIqvUVGjZk9jb3K&ZJwfr)&22}L3ID3k2be%WP=<8qu?-qx8n%Yh;n7vFv8-$hK-jh>2@##pQz}pdDvz;#m-ybP0LA-JMe{BS^T~H(zvzI#$~mNBVZNJJQ&BNc2B{* zpiNJhvqYCFEXP)(HQA$S@zx!fj9Lg=p2^`Mlq~?jn-sEO%pM}E7~IBaJv)CsbmIjg z5knbFY))oSpM!Hg@QQn)Ap-zNR*-xqQj&{8O$_cuj#9#A;$22yqy^Wpmp#F1dA=r16&jm|*&mYT4w&W} z^*Q)_A~~YyE)pQIq*TDZ51zP(W-C}N+Z1Gbxz4ky=D0qm;JRB!CVpZ`&43TpUHdt{ zZ^I7&~8NF`0T zZP-_s=bU-k%hepmiI{``~1~^tt$^RU^Y(Q#6fk>^xigpIrw}Jfn{4|N1x0R!}F-}0xxkAYpfN=?2VLd ztg2gN*FfVM_a;6E8IKH`EpzOT90R)@wPSwH3~~EoC!Y+bgd(A5?kRYm-8Md#TV$V@!+l+4veq!mx`cLifgwu2#6Yb1e<~qg!P+vCMCg3?m#-6v z#Et;l*1EDO?kINXyWJ$|G)!yEq086D_cO6TugIoXMy(6v^#T za>(s{T(o%?Na=D$&ne3F)LlC7G#(<871Qt4Y&oJJvr9Q zyzGyZ?~^6T_gBO>dA%QCqOSo*d}T_oDjLZQ!9$J*BwyQ$vdtG>9Nt+)WJ#^kyGT5) zAlkO0UgaizU44k!5-<@7hidSYN^mTvpWnbx{M~SZ&$x;U1Bf>VZYh2Z#8bXjb3gaJ zKP^9ZfqlOKZwy4bx|PL=)pj{?ccs7)7eCK?aup7$zS)%%s+NG!LD#L!L}1YVscf@+ za`J{Zjxbi0EzP3+A>eQI2GK{px1#G)izu--zG(YU(@g?MyT+))Idns-dUS`bIefPR zRWlpluW{AH4H60-?8FkM>f%_Kqz71ZUF>0p-l6>ZI8>m7O{p*y7yhMYHF45Iev-!D+mEs5?CrmKAJQflCW{L z_v0t`&n84aA=ks0-UYeZ133Udu`L_oX_v`l`30c2A2WSEvj8V*wWILF=BTjQIEyRU zjfA*~1v}+`{IG&YuS|4qmUE}x4%wcR)Z6O5 zGO}I}pNCB)U#8iI_gKs94 zgbd=0U&(m7*zXYvMrCq*^+I|1#m(!^Yr_^HCjFKMb8R1x% zm`CG*AL~@Nr%nA~LM#$#uW%R(!qDcB{^1Y~cyKkFF+u^U$aX3m#kRv8+|K%1*`=c} zC_lA8v)2nQZnOQ@PJan~iiS771(;f{UHbf;pSf7)`X^ygRwBVv7(p=GY6nh+N^GK| zvK{c+`Z?%*>c6b1b;^8RA4kpoQaW(EOd1JK@~H0p6Fw75ll)f%?qEjykw=&&I5Hak zJ7wJPFoM7r!v91n@x89J&LM6zG&Ro0qd(>$ej#qn({^w@%t~HI1=kNjkpfB<|EW~f z#=X-z79B$!y20YijDDG&@V)bV*k%dmhW)QhuY*gc`U)1M>1I4s(5%gU^ z-p5lGaq=9J-IDJ)wHC7 zky+;3-H1O%ilXDUvJbY$I)O`+WFy>gWWjY;if#ScKt#Bp?%SZN`QwInr|am>%Ajlt zvqEf09thT~q@p{YKmuPL6XYTkiP17kU-?%aPGzq&0c-Xv=ouv%p`F}}8&PN|5{Id0 zcrPraASNX>TCMEm@iNQS=?a|%6!N$$T#iXqXAB;{-u5sD=-E8Z>O_qX{5JEkv6_I_ z`hG8EgK=ga!JaG#08S0i&*3{b!R*~WgN+Z1kbUNoO7MoJchoal9Q$+QBSC{^eUrWt zYDU&$LekcNWosxFnMeeWuMgW!`w4{XyG&X|uJs}Nf;t5>bwsH~*G5%Nj~?1C!`yc) zj`K_6%=teLl8eB1g8zl_i9)*IN0az_ne6Rw>d`F+G)KJTyWUvzi4j09YpBN6@eKH9KW?NHK3mS|g_8@Y~W$TDS#N0l}zVB3eu znpd-$>9UZ4ibd>@2a!T+#S*SfU^jO2?nPgzI``sD+Dp(`)E43#Yg4!wIPmEUj%?VtXPczRh3-7Dy_m~YRJ1c zj;F9>7?~QiQH#lI{)s07^EIECVM-6+Z$KR`66l{DwV-1unah$}?w%c;=eCE2U_=qO zauMisEpUSSHr2!v#-P{z-wB|xyP_6;A(}RJsi{YG{@um!c)R%-@^HboN8PfYA+zfe zj>Yq3pVcNMF|b{JuhtKO3_m)QpLg>rHsO(-A3yv*-yO2UJgzLkWJhWsSpZFvAB|n* zXqI$NLv^uB7ZuPD=qsew!V&$eQ&d#uv>-bh(-N!HWn4F$yHXO$V8>Si3;y0?xzwe* zgg``MGsI(JVU1xh?NEaIbGLpqj{c(H$1e>t?_}wr^OBy=Xf1B`44OrhR^WL1X+@a9OV6~ZJg&}a+G zrOS;;WTI5c7H4Y@b!Md=sjcnjS9cYNMEf6b_|Tc*c>$Y;7rPx8s0A4q*fqBvXAjUb zFk3oyqj?9K;XpHF+0NstM^M1MJUHJefx*HN=;`E&w~JBiZ+`^i?niy8tw^aWBXTb`r>ANPkf2Kr8GAgwDKmv@O38 zo32Upy5wyYV+NWk2svS}H>LgdfN9~UvW)5Z*gKS+9oqKar94^eQix2TrNKE6cE7~#FT zW(p)RJD5KR_IJbm`zzb4V8i;7NfwcoGPXZ7-cLz@*Be%UQ@OA3f_LPqtcv`bP)*}G zWIjFx_;)SBX;E2Py}_$bDAfsXR!aUi@iY|8;6V1xZ)cyh*M|u{{-^REAh8uqiH)Q% zho9y9YP=?VX5IXKyrVa&(%a-hf&jV@j4`)Z3-rwV`+2-onN}cwoU}WjfCKM2z9*f? z9~!CH0i=Il?(2Ah(Ly<-qB4?%)=)Xk%q-ahGs`chduKNeIWLBM;{1qcD*uTU4GCHh4~#k zjZ zB-CH8qqa&c0vB+~M>-kTT!t85tvJI74IoFi`)n8N&ySe$Sd?qaE>x0Dt~^FJ4``w9 zUlIc-$K)Z6Zz7Yl#Qi_3p%6dcx7PPgwbsdkx0+xrr7Na-o^XmiYaH z-tkye<<=V%d^)1ro{WS7Bb?QJ)e~U;_ils&GUCc~Kyfr2>-pb(e(bB6U2b^mykgDJ z{a##PiH64vYC3!_!0Ysg^y`9-F2erkjmFOZ1h{>|x#&#-kM5!S9%b8ACJ=q_E@xUp z=J2_1Xhi9H-}JvE_2)gHR8=_+;uXw$dLRT*EDC7!t#w!OgEYJjseTBxa>KO_IB!Gb z<3d2&oTXHya^C>4J?%)Xmg%fYqaxN7w>hc}w^{|P&d#pMX)A^1s|ukG-*F3vv&zRw zR*7e$hvDnxN^ZZo?g(wgFW0Rd+RIzm+K{?IFD8ic-*}uh;?!He$~JM&?=jG;eWBex zXN7u|>S-nGEvJ*ozkzY>R6%~IZWIHxa&nq4c7oUYe)AL}qVs~| ziGZD*P^L>4;*f8iI9k}An)W(dtuIs}XGm8nx_}N`eNYICd%m(dYY*7KjDW$cusY8h70$;M@TOy_!5^uo={{`iIP~QmgMx@ z*V7Nz!-?X2sfEi>aBOBKVKN22({ygYeyyg~sD(B-9CR|BBScJe0VrJAKQEfopy1EM z!sN2#)O0(4i(Z9ijC)D?k}u3yZwyH;ILqwAJuIWZxY*R)L%Z?_;J4#)U(Yi+9ePLE z;jH!xav-uVFw81f#1eP+r#TM$q)b@Acwbfm%~*d7lN=eOUR}?Y7*^`dm8RRd@qpKvD!rZf0LF@}NKQK?a~QL}H3WUeWag~KJ}QdvHe;64n`!0xj_b z5e;c>525JTEzqu_&BSd<#_+MLE z^SCROPU(;Qb>D-)q@xMj)^wna6}yn!y5M7K59?6?UkAcOIz#wz4ISu@v^Iyq=KCwK z>H@$ak(KuLGz?bkc$;v!CaEqmWNIaaz;Y7f_!T@T3c-7L2zV~{NuaWx7U^czANi~} z2?sNk3J#U?YCUwp%zEZdd#7dtsV-tDX*dQ5GX|q+jvG*mOzEWQR#hu>0J~hE?d*FL zuenXws?lSnTym&1KJg~wfL@a0o(ievK+7fdhg!LbI&zcOq|9@ zWj8L@QcRsNYrqd= z2~bIDGCKWe0D1fY9T%S$W_{ea&d)FLo6=|vPc{o|e~glU6$LvqtDAHsqy(3C`Q`5=hfLYG=;=_PP2=mta_iyXT5 z$49p@%PLo_16~#7mn%7EyoYUwJ$(Wqfz9{r`}&iHhO<9d-&C`~Pjs41ml8Al>bjfG zW>md@|I-bQ)n66VB;uwV|Dd2-x&B&IL9FEe@22Ng3|Ck5k*9gSItWD#U*0Y18`$wK zGs{Cx3)xh7ycSa7Asz~%V@tu)x?^}m#(Pyc|iyWr3;s?_BS&V5z=x`2+Wga|FQXRA< zZ})f~f>hTv^Ey@bQ2y+w$t)RsUzM|t zo`_L)xe5rPdX)Ai<-RE@N* z1iA-ne}`b!HIgWRw_3aJ?z_+nc0;txnvYs*nsCsb5X-TyKI{tPyWO((A!#cL+!tc0 z6J6Pr(!0Cbo?luzlJcXj>r?QCKxk@AwT%{K*mrhrCmr_KrwC;fEL?*zTF<+~9sGdl z*YSy>_8#;Ns8VzeIukSs21a+BmLiu>Bc7>}Fa#TOTuAEK)4P0no1DjA8XQ?4#c50| z)jmGlZ{Dy?+kNl4sHCj#=Y_Q`H8F>N+X8EMBUMT^Bvg6r_aD-tmjj_<$(8Qj0bEpM zh}iV9 z1AcS^VGV*^qDYDFY*l|&_Ww!dy-v^W{P7bEad+EI@eeEf(!+h<27K~@ubb%wg0Mz7B6 zV&mD2CmaSl1>eK~Yx!190mb?j6ik1FA1`=5Zzi+3!CHTCI|+WZ^D4qhgQqL3AT77} z^LK`g7^-x`ctLXUn7PRQS<}qJ2$nOAp!u+nJyaI5osGf`5i%l2p zg}*+JgT#m^ZnTP7xL!MMN_no(zaHa5!hDm|JBbeogaZ3S_y@NxqJKYGwO2jzdoGj6^v!FVOjpkID<%%bVu^-LG*e&o(m3wzmNlmf~UW# zXHuC2k=g@^Q2&zQoUeRtYPoY;Y*7~$4+F^_1@TjptAaHwMWqpdf*UkqWAeGx93x9w%x^!la75AG|bf^p#CL0@r-RgHNpf4Ut_>od|7-%6d#>O7pN$i2Tl9&%pFiIx0DrKJq( zx8Lc9ouidlCEg?)Eo7JKWCxd>#LwbpI+)BH+#Yl#5iW5fT6;oCX7<5ekmL(q#m8Qd z!KVbbne|zHxAA11v{@|UYTC(oIb%U{-*V^@;AQDHTr#nI)%@r>3+!q`bSNT<_hFs9 zpzgTTw#_M{?O@-4kRt8ytIR7(>JZ<28JTK9jZZC`n{jAtb*3z=`*@xy$}E7J;+k88 zrAld5!L&0?^^yq8*TT6{zE;cai1%!>M!tQCNIPm^x#2)Ktxi(W{422ge3&KiX?L!| z>~xjARZ-fOm?)+qYM5vW=8JAK23Q>@K3`E+8Xc3`%tOk@U`~5EsUVk{ta|(1uZXL5 zo{oYy1~N8E`$8bp=>2E%M5on1*q;t)Rw>yN#5GwVoYZ-+nhKmyy;yNMb}Hm4jN4(_ zyddtL(^*CHu&?Y&GY4vRYZ5aLpdEOJK0lJ8ghHg?B4L?sMZ)CrR(&2(RZ|)~zhxT7 zvXT0h)2o%M>x+)lQ2h|dCWKv*OeeicG1UcVBXub>1_N?Yw^JIzSufL`IsGJlOEi%T zV|9LF_k)qRB783oYUZQO_Mwy31^vBQWVxW582S9w)m8I>FP}qkIM6_{-ZfFSvG-Ls znA6K!%eB;h#I%3-t1ngL^+;~Yjh$X6%{trEa*fnPsvnp*jej-#_nX6PM5wK6YmMm` zp4ml$-FnidFZ!wZ#gzA%VYf4gOM!*J^46*gv)b#dxuQXv&Yl(wj4jInhKst&;R>PX z80L>^B0EA8o|54TlNyC9k`YiExGRb3+-y!N7-gSD60iQanX5~;z)RwGn> zDs_rnFT;wJyeq{&)==)kdm8>s{Ih+x7y%Uf*kkH()mkL+hg(y$=@wAr;;M?Cfy}no z<1{r*%~i*&!Va$y_d@w+Ht`Xvm(c4y6=u`Dl}a-M$AjLvf3!W53P)E2-FO>w9z}+5 zN)iH>4OPih4^A9frI}F_G1&m=_B}b16k2Z27>~nXZJbs2&>YXa75GGlGbzrx#76V^ zKif_Zgi&doMAlTjfSEqg4k6F_X1ibH4R&~IK-fplW^#mSC+kN(kTmU*R){Q$j$=J;w$ zM+??2*z_fSq#et3>dXa8%_@P^b=iey3MHKzRjtw^0{B7f*Tk;k@4+`t+{?YBredM= z4l+d(7CaUwrwCb)tBo&VVvs`jRxXI6UwV$Q#B;Q4Nw%`Kimcq5kdY)K5R?_^PLeCZ zhCE{+LjOhC_eZkwX~wKikjB(0~6G^*7oYXti*R6xQfsCLZ_xr-_| zuNB4&OHEBzNXGls&DsdjVA`cMZuM-x?C*5QZaX6_;TW$cl9Y%@hKj8SoS^#X`6GVs z#p`)`iHY~cm!}g|At>lO5DaK6STiffli+i%B9IiOV4T|tRjB_gOtHvJ-MfO z)~CZY!4fWMn(lyMj}*@`d%71kM;mDEqj)?X-DyFfN0;X~ewfw4UZ%pdAXtn!TT+EM z;sa$k7#=G8A@wQmbAB%N9LN2$lmM%gHUb#U%-EQ5z!+?_7#Z<0LV2U?ubkYmh%6%$ zxVf{f%(eNaF7EOvAvpNWHPPABC&ID+FVAJ1>{2f-K2!G#WWN3f=h(W%a-(d z=r}dfsOfVb0tiM(yZJLtCd=frKW> zwZ9ds+V=JEC(9jRLe#K#?S$xFU4)50q^O)qMF|qT%?kvMSiw%6@mD!3S{fU9&%M{q zI6ytnSSRW(?lCv-Eobe0XfVpeV9$kXYaC48=0a&ZeF_TbFThjAqcoYkL#biM3!saL z`~Vym1k@tejww?Gh- zS^QbN-bVo3mKj#$r{eQ74MluoHR4pSsBvU&kXlCr4v*+5n{7oTd~vUGqDV}iP-k_l zG2ee@_Y;AEUp5Eu{V-Y#BVnEv6S%mf7@_xiZ4%g-mhtM~l-xfN$JfUO4CdbPo+Ribh)$XGqR%Ibh1iotmg~f^6Gf=aAH3%li1$lDu8&hSw zIMg{Kx)zY(J4^C{I&I!XIcH|JMacmm+JrYB)}n924_2!UgQpaK4vce=R~;ISC?`^w zWEYe$6DCn!)<)S_35AD%+QtbT!0zE=!rq|(X^AxCN=PMDjPtX}# zoCgrn8)UQ7WpcKdjAnYvO5e_uhDY(9Bd>rE_@jPUQ`X6NZVF%SCDkyO)-!kIQkb}( z8HxkH?L2?`t#{x3`48Q#cav($VR=Fu08il6B{qrMCj;rGl(Nw7ae24)%!yMnP1&l&8633|Blkh z|Vv5^sD)AP=X{Qk39ep?ruW z`9~$YK4h3(&dlU`aj`3``CP(*?l}o2GrgvU0qQv&LH!d6paAF*%excF<8MV7Z6$8V zmjCrBNNVP`BaKUO)3|8mA;Wo#2H# zh^N-lc#5p;hB#W1`fCWTL81o!dWHD8JuR)jl}knat%yNYMMaSc)nc@jbyOZZ#P6DS z{J7KIbFrG=CG2ieD5KZA$qY2G>JX_+{u|zyX252%)6{Iz-{@F?^VO6lRSuD-m01eU z?mIN-=8eO7lKNzE?<-Wil1*C{!Kv?s=N>F-M_W(GeK`CB`WOY-4ka(}Ee@Ej5D}~- zs6$MLu1rr)6Wu5)=IcxTqWs!w#OUZ@d@!I8zCy`uxud|bjLY)#w++(S2%VH0C@ZY8J^ zfoyRIW%IeMUyBx5A7{+D&(AV=4h@?ucsV98&$R>nX7-ZNS5qX2^ux;77cd2OAZKUi z`R+^FLq#9Q5S6y+75_|DW5SN(Q7IWDiF3rnjl*B8j5e=`W~9$|e&qIr*flxI`xF#2 z<^9KG@KLT#)0Z|MRL-tWis(%vjDBZtv0e$rP@Ej9OW)Ff$zy;@)OtreV6AS7pPe_gBh6KMGSFG%!VWkgkgqs3wI`t!n+>!c2gl3~!p%+C z4Dk@XikE|lPS95dfrc=DixZ288`AS^N>$^SEYqEOy5_SK)A2U@8!foTA>$axnD`98 z#m;yh3t@E_u!{QP;z0TFDLT3xN1k6@VW1#Nl5DKHz=c4~4vhkm zF*<||m#TY=OU-U13D9i6#44NX1N;Ss+hz!j>Bn(+)qkyr{zF6~u!0GZ{D>3|Jm0L3 z*%Nv8s%4dy1B+*2oGG%1`CeW3O24U6$DZ3p-GQBzoYsg7@BNOCIk7oALT&N#=ievU z!W+9qbGyp0R7GP-vozD^ zLPWF6UP)tD)QCqeTa1;f<*I#vC+p^i1b##?GxjN)O#zT$QoYABr``{vAtACbs6`kP z!D`o^_TS~D#Rf^VR|bzIX2cIx>J3=Rw!8KE%Wi>}S1KxRhO`)q#-3Rfyzg4d7+~5S zWFv`(&Ad~VEgj}oR{2djjwIgC4VSlu>tz%N7`xi47&S+vYG~|jh~2O9bIW0q+4hFz zmqQ60u!K=wY!__4tqGi5?Z1eGz&2D;0tKCniKdA{*T{I^GHXey83|DaDiUp4_i2G( z>6!2D{7+)9ltc{*vVpw6MMN3c9v*n{v5W!cvIXcSdk*@R15U1pP0J0JQq@9B!qtrd znMU_sGnSAHVa}PbUMc>(T+f5hq6x3f zH`h1&{&c@Z_|Yc6z34YltOV~y3(vjB?X#_erhgkpbg)fyg4e@#=bug)3I`4v@%f=SzCwAB#_walh2E zdmv0@b$2lP%#NS(nZuJ7z5*JXXHFI@u?Hqj43nf{sOjwczl;7yBlrVt$)6tqqVw)D zi;;=FaC=YtWqjN*S3txt6aEXT0RIO*EDFV81Ld}N zT3Yyx_GB~5VFdhI%Tm2D)%)`U*Tk&L+@kn3rYwiCu+~11hg_US9}B}DALgGU(+J;gh9m5F{jxbxn?sw)-6xx7?7s@D zCJs3_zrPwdB2EX#w`Tp(vt3w4qiN}u6_1;du1;66qkPjF;50c9`D?}HIS6_h3=keh zi=&}A!N2|fLyaC(#GF9vc(wl5K(GRoZKL9?x6l3LuC%R#{7aP~;(h7gBGQ`VPh%dk zSt|cwUho|t>UZA@sc3U>xLoJJ5&86?p!R`XGb5QKZYI_K6n}6FwI>dv5t6p^@_?~B zqKxU!^g6ZgA9lC8#|>EC`&;E!oQwrKYnDG;$G5>>&NzUnR@e8R?XWL57nLV zb}ch_mm!{cFzPTVKeZL78_Yr1j(bf^E)Qkw+FKvrkOg8;``}!}ADO$2rZKMY$?!^dnlT5^VGRfK(l(sB<;T8#%zqa*y68`IAe(KM0-*+q_ISCW`0CY{lK zp!)eT>+2i!*LKMOio%)p`+g4bydfH3s!}&b{|(F!7_u$jYKx-aY8*#)+NEFr!TbP~ zG6!yxtB@_L{`bqi*&3Y=869$Gg{>pzU@Gg-N3V4?H3v0?gqo|p+NqnC1#F+@IeRo_ ziUkphm4$8g(_@p%d^A5KE>$m_FN;x~S+pHT z+(bHG=^Eqn{&H~CF;Q$Jl$AKQz7B$a1ftM0WdkwCqcNKr-r968Yka=xvRuxQa{4Z6 z-cK2aUIVZ4q`hA+Bg3@V);*xtZ@Y;I^noxt! z!{PaaNhiKEcC56Q+1HJCRZaTOZ*#{Pck`bC(fEveWMHxvfPwH zb7@!|D=)Z87#U0LSM;KR&h76A=9{L?`VlL1F4Z?q0hb!AoiQoB_O+{RS(t3JaKlj- zqq_{eU?em-Fts%zL_-jt_Wxf0KbSp9z>z#@^?d0P&*7)Vd4BTu0gp7`v(u@o_UMc}5}qc3nyiNW!pGmXPZN_JPOc1XsXoDnq#}Cs>M|(M6TTbE9^)*4q6Dn+|J?GD z*+6ka<_F(jSGrGGhh~SwCXk+`VYw4?_3Ses4o6LDcyYd;1@TQ^@VsiZ+$Jr5%{_rf z(4m_$3P}0a`Q7o}N>!3+DsbM{+z`+Cs#7x#V2blEKleN|+ia8u@o#CBFoc2v*paU! zZPW9E%Ky)GflJFnfF>rqO8}P-WH52){7jx$Nk>OUBp$GZufit@Lt|%B>P!87+-DnBY2?O^bZ3o8F@C_qR7ADb=4z|DRayA>(dP`5`^>nP;4wTUqHx+C zXC8gLcse>=3+&Jgfh2i>G zVeq^??H+LHI^VM>|5TEjkcnWnT0%oO;CWeNF zkBew+-U4n{N4{az!q&F^^#aY4qoeH2bvLSY=S?A6_``-m1SYN~*H2!}`xVUA$DUyG z4+_VlH^=XQw!>6+q%^{$Yv>e!z|#JFA|(+&qx<#{H5uGjpi6?~R6X9)x0&S_|3f0n z+epdR`cZ-Pb?#K+WO;Em1x{{PMy4`PUkC~d+GWF&SJDvW?<4mU`})%%S8+oFi$qAA zy0yDi9p9oP5uRWKFPQIGf}l=YEPCFrMW?FOphcP6RA{11fw8#zwMnp>MAI%GDMRF|AVXw%L!yC0+|+8#=+I#(2gI& zJ{|7Pwj~S_7QIQ}VPE_Ld^USZ&=y9-tDN;6fQTn8&~4+|yR`bK1OaA=^assfEK^HV zN+V>h7RQQiPqXur&Yyf`OCb-*`x+#k^UdNT{il% z!pA;-nKZ_jk6qo57#)v&mDb%2X!HHjMQzhc_(~aLj9L4bc+O+={iS1>5u5nWyPC(D z9j1f243p>|#^_QCZd}hv@7+43PYqVX%dC7r?^r9){+QcGQ-Mu^fHa)NKN*@XOajTe zDD-=f|sVc?v-ejSZue`z?LOxK7s+{pUZ3 zQ1PwlZKAuOpSElS7R!yV91eZ;RI2H@(q?kSb$MxtJn!Yt89pmoIxCOgF_|oyuMOWu z;Zxfo7Y3x*Eju1hh^3!LE<$^r`gMi8=`@?SzHu6Ul-%vr9v&BVZAh#(o!94IVTn9c z+r3rh`WhDrjLT*ce`9;R&!jz>D?+5<^u`6OY6at-`Szx-Xc1G#m7}$D6(wnmt4|E-a6o1*;@go^SJz|ZdA{*8mH`0b09+jc`=AcuH%See&N zd-}vCE4246CIn>)zxwj!iDV^R$&aJH#(*U6)!^S@L|C9&VJR-Tl=T2h6Hz;48PKfY zum%5TuOC}sb^p9na<>OT6-52YX#*B^6NH#3U@uGYU$#is^W)vVM zgl4z(NZk2h2a?%+mc8{h6Z$yBcuE25vJiv&7vr%dok@u)f-~n2o0cl;hn39Jv!AX> zOF5IsaUT&=)<)x(HOur=& zE1YT2(%@k>jpDh;t#84DB=M<4WWQv8qvk0y<#@%!eig;6qb8W{%^1Y6frN2dbjx#a zpi0Kmd|pUf8v+9@@Z+mLkWA>iU}y}_;fmox7~OF)ahb>MrTBwG?1=J@Hc@m@KluyM z5Vyhql@wc(`~#26ao?Q+`^9CNrl|98h-K%DEg%~N02c&uiP<2U9a9QZ`! z>$0_gM`rGHfo@WOJ$LxMv^zlai`Hf#@4N@Gd;Cx&vDrU5VFWfWwOPbzJ%ok%zzNG> zpsLdZ`hDSp2hC$zTG~B|DN->d2uSYcD01E8z1(fJd|oH~zbpU_&q@PCKn6`8*$h3dxxvxC; zXSYrh2}yJ1$8qPYB@=(|BIwqBv(f_%da;|&rgSF87bG5cBSq4fUXs|s7X<{)$_j0} z>0*_xYG`_idPvL?ppi6(rhhxy zf>A7eDqZ%u`~Y9;wq=F!LR;5ZNd6`{ay(^liQUY-qyvQ?L9|ZHN z3p42H+l5sel^gNpGc^o6a}n{_THhj5}8@M*7L1PY*Iyii|RH-tzB) zu~bB|@kM0CSsuTJdq9gK)n|_$!6cc?X}nBCG)<4xnbvJ?h=wS!sfh3f_e**@-dej} zvzf1Wbz0fX!aTbS86+7juB{#22EZW|D}#C_$NYRYtm@adYbSaIIh7{aa{hccjcKVYbUpqSC=`~c@q^^z+S#cg%gj}C6P;}TX-G3B`Sg%wwAd+(SEK7FG>JpvtJe(j z-$6Q>7EGaS7Un@BIRV~ygp`!4BoG$L<8Hw57ApnLz&*w0sW0*X$Y_YN^B8_b(M<|&6*H!eHPw-&Y z1GR&sQAppN${IB826}A2-w*qho5HUay<6Ce{O0*qBK=WPy-yNW=dB)+2NJ zc1{VVLfJxq^z5bpMcLJkNRaobTLZd{HTm6J z2Ht0{mHnB3i!>`)AI+BD_|f(}S93c47E}_xd;3no%EzB%j1{L zHDT+;#_QsXW3NTaw0Yl!44K1z^CYg4xDa-!fL8bE3%RC^v9A6}Z_c%JsOWcDz}Ol$ z*W6EoM2_cQiJ)O{i1TuGZ?zO}#=f$$a%A>uzgeNEBR=QcT#(^2#f7tR-4y(Q zI$xmymsy-6l1j2)We$#w-?;AdSo4S|2ZBOb-b$X6}0igI1Qs*x6K~T zdOL%~^V*x{{aF(Jy)pBJ%gjxSheJ%)s zZ7q&F)-BfUcMx_pGZoa^yUNcHxaNZ~T27DlfQz?P@VlkBsp$utHg2-QiF#)KO9W|r zdF6lEaXo_n;9aSl#vrvj8FR>6mhcq{J-o3@@0qt*k3ktBek?NUVn%%q%k5v%vUYe7 zl)w1Zp#|c@9H+DZoAaaHsDj=uoj`Z^peD0X%hnBA*>NJ*MU7S2)Q`cxg?f%mmw$dV zEV*nalVJ0F&+tjb1gxOAWc1EE`NSB#nRSXmy4vrl7)G-Hi@kULj^ye4hocQPww;Y_ zV`JO4ZQI(|w!N`!TN`bxiT%vyx^8~|gy-Dn^qC*#%v5*RRMm97UjitQ!0PW(A&>TS zL!BUUY@4A$XB2b-y}ek??x9)A3yoYRwJ1~7SJ8^4`mSpx|$cy`?>5 zJTeFboB<=TxWd!204)hRcnbnS;c$fF1kx}-bylm6-XN2)gx+FeCm2P}h6rgX;8b&8 z!tM+2=rZMq=Cq`@S87tvW1jLib^LiQ`n4T~TrE&aPX%*WJ@zd$0gQnMY#sQYrLFJU zg2r(m(D2SYu1n$oqf7%%9oS3UfQ!{{+9CwwHFMhR={$v?$}g=0w1h>GsKi__{F71W z>1>8AO5cTlD;72VR`TB0l7b?H4%hngwnSEq8?t}BZB7;otN;;jDG%cg$`g2A6=*oT zH31q$5ySO97Ud&e83C%ba4DU0xNRVE zM~VZE$BLX86XsNwwV%`V$?bv5%^x9dr{ZPW!3&cQ*$Z3~=hQH*jn*%s8qLlGgoeab z;Jz20U?FcL4oB8^oi_Y}dVQ%Ck;YcIT&Q%njVbk!lhc#P?uQNspJc4tPsi^Y(WCW) zP85b#+{+d2bQ9J*z0z$#1YSniWk;^uezZ(9mPhd&*MCh<*jB8CaR_Gq0bjq-I5C5D z9tJs|_Jk9b2}2>`zS#VjQQ>r#8Drr0FPgvPv@Bu0LWTde@#3qxo5Bv~HqinO?9j+f zx#Rl<*x8nxK{zl~NrTS{X9-9)y+iyzLE&AvWwtS%twV5(T+#vAZsl>^oU1~;%PuwG zc80-XhSJo78#Gy~xb&RikT)X`K*=pNgj_af0$J2=_{UgM+skI}Ydt%TF8IWYw-Uyz zPZgt^%}c@`wQN0n-cMO6;H^6^95{Sd5OmoB@ff$(G|J?0EOG-2g?=TWi<&$wusqgV zIH~HhrYjYxK<#4b2r{Y1=9tOuzW7#IX5?x;U!EKTvrHr^2X*lA4incNgzch!m5rX) z`_&N_ai{mr454ZoI#3v0>I-)m4nsUOL&#bym`@1-P&j;F))O-b2kK|i>h~Rh1uO~} zQ|fFstvxP@F7<>q%xb*c&IyihJQ5rA#q#YerVK^=9i7Psb(1bS@&c~4{%Wkg20%S4 zkk8)C6N#of5H$$P&~0-TX=|T_Usl=dxT!KpTGK^4a*9PJ=0Ovs@EDoUy=i4K6v1Qk zh&&<&m2Tty+w)JpS;yhKbF6B*|0dlOkpex~18;E3>95DaOADUM_13(qAb;m%8NX;( zf%a)YIIXJ{5Hog}jC%umzosp;M`J9so9D=(KId>?6RxUAaqg=jv@{7fURQOG)O>DL zG{!W^EyRTl7ZOv@m>J%B*M5+1)?})x^-kWa3kl@Jm;>EI@1c9&$an9P)@Kc}N^UL3 z-1O-^{F&MuuFL#~v=1*Bg-QzxRVdHuAUR z)~y_Yj=oAHDuoS#Z4>YHvT0&mdXiCR4*NxOtUO2pHg}fq+=nSH?56T+eddj(d+9YF!Zy^+>p{OHY`;dF>kkiD*&k`Tl+C+i|ue zi+Q$4i&g>Zz=o?@HS4w$0>KR5Z}o38S2hs%S?3T{Aw02@A*8_~Eek!*P?JHo+>Wo? z7AiumrMe4Qn%TBrbX~M6{A2W4TzoJ0O;=*hdW?w}@sXG6TM%zfxdQ2e`7Zw&P4MX$ zCZ0kMeD>XOJ$Hk7?NTD}?qS7a(hql0evC{gxpy~+JN>Slpr!=BwHJXHq25(nt`V=) zCZ*j+)@fKcjP^hF6rH9E+(rvay5f27W1XjY0=@gU5q6(;yb zG`5Kw)Cg@Zj5|&(YMf2({sm*j<9K!AozNn=-(lJ6xS#T7dGBDC<)pRs$^tNR!E(AI z*35inh4iw3V0t2Cj%PfzVln9ggPlkLI*=Ew{qRKQ;}BsMSg0yO#Ph<{{n`}37GGe! z(TI*|Z_6znSf2!)K56yF3{!z^#fEcTfbLod^4z23^>{#{jnz&6Q`dCE*?wG_&7CcRHvyYr zC@OSvLI9=7(@pjA{J|zQ!b^1BX=wbsepdBymqO;!*yXIBWe87)l08Y`Uw8(&bCR3l+wI}W}@|$ zqu0;X!%`o)B3_-+YoZN zu})`j<;d;X^UCFMM`li|eKF&bXYq4|Q!7-wI-Sw4Ix}%f`Hv}*pf)<8`nA=hxLs}8 z@+aMFbij96j*H#Zv>Hw3S$pjfK-hq_etw!in$G-9q;w(~j#9Ng!QFi$#82DcFS2^jy+pA7jLcqcB zEwXT3S{!J+A+R!675933)!3M^4GdbgM7x&Yas_zc@R*_bZ`KgjIaxUACrRTgJ-RQ9 zqr1sp$ho*JH2f}dxp9EK6mW<@z~`QlCUu{~*(o-`o$bNrog4608}FzevL>e@M{P_) zp%==3h9D^zb6o}4Wcf%3AZinD)?2cC07-09;;w^be0+++W$E%OT)E*O?#~ykmQUkNsATdEt2agZg)HS*6~42fcx(-BoW+C>RfXotLp| zVA8aReeYvseon=1KQfoB1>rGtOWQ{vu>0nyybKy ztMWq;6Qb`n7(=zRhpzQeNVsCet@?0Lvniu0=o0!V|MT@_=*xXB1`;6O0KHyi? z5%NyjB`I3Ig4+Ir10f|zgc6m#i0p(jSU<#nbA#%#xqDTT!}I(`bJ2H!gb7IWN!MS4 zVX!m4A0H4jDZZ$jOr2X+*D>O+wq#k5D`)k{01Q7uu|TW9f?k$Ox_~cmSu!V-S)PdO;4a{M00l#-&GIaqxiD^KQ@rK#7Js;AZ!7QOFRIOi87!*o(R-pTQ0{GG~_bM|nrp#tb) z*%5BLd!t+vIW3xh!<@oA$A}?PFoh`YNf$~O8mmeNK1oUgriUp!s!n@*J2Lh38WJBb z`TANSFy9@;3rDtnYbFeie(CJFEG$L7+f9%E1DR)XxU)A&f8@}0HljcxC+^>&~!2-q{mb+<(q^8XP<-#L>@aqh^$vM`?V-En+zJ2t6?~Iga)H zRcaj&+u*Yo!rfhSM-7w{-S1)Dkanr`S*N%`JDp`ko7^i>Cbvhcac9KY1+?---s z=XnzqzDRuMe;RfWb7eVgGM0{^xoHPnJM3a6XpU{-4PLDy1bP)xlTB!dw*m$2{!xaT zVi^6c#?HX+7Zn4uX2(0fwG2?K2=S(Dw+k!b{WV8cAJjnB|z5xX* zAvW;mrlJkgL5pDgI%D_`fjWp_qTba#O6K|Cx^SucRGHCra@DM$i5OLSzAw6pd01kud+^2K>(>f2;uR zzZ(5d;C1RB?=sCWA)Wuu?)i6!PXI|c^S@jCAFoWLAfohU*pQC@9jfym=ZM1lzYG7r z9{*oMMWM-hwe~7~9PU3y3sEBkFloHJvoj002NW_I0fs^R>kC3)fNTp33&vR(reo)& z;{zDK{~ERfFel)e1aqAK@dgn-sIOVEbJ7_>s#+-txc7eyTFL+rvxDyeP<8*br?3YR zbsGcZ?(Bs^jQ^X~KuHMt)~_>0Y5hVbCiCxe@(K0%iLK4X>6iqPq`}g){m0x2WQDkZ za>gxc?*o{AUL;0Ue&`$+5pN z{l61U5JoUT8HMv7uF?NIfHqUWi;)x)lKFqx>8NQRjC_uc&WD-!BI%S56g66*h2 zBY^#!41&h>BM_LT^FQ1F$8+zWRmcQw=pXy&|8@KT5tWk!;!pqG_`mMr|8C)b^wR(R z3jZ~n{{KCY6zU+eh4RawUEJN3Wo2U-UNbZ8&aT`p)+As^H!C*j-Snjay-iI?8=LV9 zKrb>`EMV{;(0w5sD4s%xUcEhtmZ0an$sGC>7EYGE7Y@_sJ~Ma$^w>Z%NuOq-!6vw7fS6n2cOr`9t{+a4b}VIJGtj=Voi{J5*E zIB=RD-+vInMWwCslXBxnYOPQ@o<^pO_tQu2F=}teV74lwfBhjlJ8;d;ufBoceDz}k z&n_iY_>X55)dKITwe6y<_vS_0WDx%7`tX2>_QZk86QA0tY-VYu-3<7pL-A8kC($o- zp!7h~FsMig1=_p39EIXms#hqU+YaQZwRsMfq30(J5MWTF)fatxBwX6;aR#cqEdEv} zSzJw>y6m-^|5K*^>vz-8bkTl%dZey~WBND8zI(%Wj0-WxhLEqPJlZRPED%ib=x6P+ z&G6Rv_QW#K&Mq2=R4!YuE>N?ZJHr8!1?2MlsdJdUA#Ocb637En0q6Su^CGqk}+X~4e&GY(bM z)y?-YTT#Lm)10_fQR>F#fbN}Wc3je1wwI$8BQfJkW&P3mTPbJIrI$b&N3+{DB&mP* z*le#}Hn}6kX+mClN|nx};g$2TVw~L-=VZ$mojKRN4gGUospL!|h0~a(Urr9b&lS5) z_WLO#I1;HCToKUm8mE=-#eUh^TDFJn@pAI0Ht&A9I-I-!tL-! z5J$ibEjm|aFgh16s~gj!hU)loG#iw$LH*NdC2y?e=oVNdJ`8cY5J8`XTO`~fMkmK?!#r_42I~8TdoI{bh!Zdv@tNnZ76Fj`FM=HbS z5A*4gW<}Mb{5vpn?|!`H^Wr`#$5(>)vEWGb@ zwt9D%cX)ewRsPNDaiz)TKCbIfb*0XfPp%mF`B}=KTN*@@Rw4A!u~e!motB-ZR^Q?l zY?3UBP6w5HM6)_~zkXLC{zUJxSbS@@{dYWBv0B@#5d^eq%v6ZnEDbs^YEwb!5ua1W zzV``K_CN|{#wwX6T7SI<(y#CPSI1 zw7Np{yBUP&jCstx@0IBbips{8d+c_ka+&4E)C;Ta3LdxDzDYWJoebYE8Q>^BHZCFP{(JkrwKYgLru= z#8YUAqFUqPzv@zO3ta>F)1$c_T5P!s4qlpL_or%dOxZp*lDIr$Pf zS?Q}%#Gvm2IovF{y~lBv=&6MIcq%ReeoS2BhQqbcEOHx^UBbPZg2?R3g$I&c%y-Sk zoh$cMc|PhAY3p|kU(XaYpq(TOX#n)SJx5q?Gsth;m1?x;rLU;EHChcI(kvb=ktvzL z)DmczT-c4QkZ!mk70~K6D$464c6|JCJ}y%@G(G3wA>Z4oq+QQe)S|@BFF;zi<1#O|v>~Nw7^igo?kb18neQ@b$FH zPcmDo7u1wUEgLKJsM}=heHnxGydGplH*Am^TmT1zSrXu$Bh{jJW{OGt^|?PXcu-s^+TGOd2wjk{r)7f@kwRW` z!#Et45`_3%Ba-KF!aqv#1C>8HlG$9O3|J>KqcUO4en<@kENItgp)WVE+^XFEM2{!2 zS}O;gg;Y~-qQ_ETA8z`q~@`q0emJ6*2&($CU6&`QOq`!SE;(sPf6vMi+V zI2lb7jG%D(x@EPeMoJPlmyC#(i2H*~X;gb0AS*!AvqGueo zBL&M3!W5fI`laApyLE>%5qY<6BcUEwnB$khFcm&h!{On#l~{*r5x_eUA{l@P@eXuZ zY`JNw412d3wp+&U>X?5N7)^i{jfJK=^AW!%4Ed}#7Wo3(CHat&5W(&0PTYHKjLO``41ok@!ogu zPB%|F9E-}i8E#Pz=D|z#it=^~xwfai2z&iV@VWf5^?*!St~<;si!YB4+00G{Cf|?# zS>?CiwrjDto_YjLMlEU~+delB7-Yjsdcsl}PNcw(ABo_@CQ5m;FH* zX1AKm1?UV*o?Qt_hH10Zf?VL>QQdEx+mw}y??CvUd|olmHk)`_Y*u2Z@0j_R#TruXjoceSyLUAu1@z8>KEm9zrxGm^G-z{U2L{k4{vrD52xOB z!G7@ng7>gAB2TyXdGQNHOg*^T>XyHKpIOr67lq!98i>MPF_m)vb$nyvwkl`4zC8J5 zGKn^&eh53&cvRK$W2DY!!rgZt<7~B)rp0PGs*-!s+*pK$L$A;5?4FDIYl&mzy6Kpq zaMQl0U~3FSOqACi+ z85rnT2k8{F%kh4GxLhdkKFgr89wv|~pL-y*#9}^klzNu6y&3Fgj!y-p3Dn#i!b?Em`iiK%@nO@~~JQ0uK3HLGZW7LU97$Y#^#I$>K5! z{XGYZ^`yP4?z^Q~A=44sdWd}W_}6-!KSkXZFSFbGZm}il>!UE@4fDiGZJq|NSGwW6 zMpaztB2j;@h}kaiMxf zS|S;=pB3k(rZF~;QQ^A>X2e-C+{tzl4_+?wAH)0%+sxe%zq>Wk zY8y@`3-iy1WZhM&A*GDP)f{9xf#iu9{7uC)xyxmxmEHF%E`8b<;VLd%;YjcOvUpCZ zMoiXlbn+&hOrI<_1~C-dbVcwU;7uqBf7Zzdzde;g5q=`OU&U2j%i}u?NLB)j$@uIJ z4PF5sc`Q0c+SWHu>b-XO97x=PYx0lBm|v2M=F`V4)NMr;Ac2~HWte{9KZvX6>JIw* zMVsAE<$cyN3K77tQx>F;VzgGU950w(zlKD?Y9i6Ed*a0I+_%G$F0d@F@?tI&?S254 zh?VtFEf076iHzZ7$JN~KYM*N(n6?sw3&UR&tvj5OmtQIf*q=pK6w@7(R3_Exb#a}| zJINUyf~WnnyTZbdPi0!|%f7DB(nLdrlfk>--q%y0Kc80)v9+DWoR@Bdy5OGZwVI!W zyP9>-qHv5esm)~yDf=b9Y zkPeAbD=1CSWN{#M8vt(JEhO1J$cJF;J9J`8kv3S7yzV!*vy~EKiSW_$VrdV{lDU^L z`;}zB$8G!a5~14Oelt6}7NktE0Jw%A_p#t%sMQs7oB4JYQ}!0w17Znfb-_%k%X0!IbtEEk#S`r-nHOKE zSV3Gbw=qp@6q`YHV*nj?gk~w(ZQCo0R{IIo^UBoeC&foLWuwM;ZhcV zqd8JSB>Kma2C{)y1uLbxQ9wZ&vyOAQ)`QN~iJXcM+6c;;N6@jhG*=ZUS3}Ho{~Nz> z-uuur{|^Mo!3wcIzla#gxXkx|d#qEZttKDV^kbs@MCNn4 z$3pyN8$~Id$Y0pEQ==?iGL-S2>6UiuWP*&{4~S{++UV1;gKs?;m0ReDO$ZLZnG`Wz zkptBH6oYe-a3y0t@P0*KJu()DKN7BvB5X_cFmxJZ$ZZU0gg8R5`~-x5$+Bs9Li{Da zeDyriNN^U^Yw^kWSO@1h`ke-qFvGS`0&Kf^O}gAz+lxfXc;Wv`<0bOb?&u3~K90zexX=|m`Ls2(c2 z`M{i=iN#}=;5z?3If#v~LJN+DDi6+JnDlUn_n5GN|BcSM>hJWnT z*eDx1E{9+P27km(U!&3y9^12l*rKI3y;o!oM>;XToPdzt!5MFRAZjy&=`fERRVcDw z!a4p%YbYwNf5w7cF}L+JE=THWL|FzDaxO7K>n>}g180P#)g3x1w&^cL>RGO9&pN}_ znM>Wr5d9fH#cXPZ5C{zbH>k!yePJ%O6GGF9`+I9%$DVO=6-%$sStKbi4lPMoCJEeqptTKV?GR%O$9=bGIZ&A z!MMA~^#X>yE&KZTb6X9oCMNVWaV!M~J^F6HWV1vFRO^nGb;H9Ry0KZBjDxrFB5~=1 zA%EwumK*w}xaWtuqgr=y*!6wtsQs{gs_;txWX|CfxE&6tF72v^tsP+CwCwY?YEH0A z>%CyL*4d@dXLL=VJ0WC%VbLEuQbp4*hx}Hb)vQb~*7s zsGGn>^OrL^J*i?@ z3&(-tJihGuzHTj1Um$597?yhz5_fU&yJIhV@b4+dPGQ~i&KR^SNRV=;p`;TlP)!)G zJwjCWkYtwQU|AAwBntf2io zi%39_W#~ESFn^k{D%RhJLPv(7`OLWC9WPUwAf}_Yo5JFbXp7pL*}V5Pey(TYOjkJo zTDIH`kLrI8bR$?YBoO&Y%JRSrIn3r*7Q;nm)g6$=N%~-j_i}!bH7Z#t4c~q*VOxi^ zd_37%{&=k76tAY>$uXHvnz2OxL~^vUnUi=C<_qQ_tOKyoRIn=LO}ok!h-#5S>7Vao zr@A@5;9*dZZY#KRJzzfbxEzvdwYnu0$yjVn%duT7n$0K|Lfd&r+9FgOCIY~79g!lf zpU+@-so}w4IbeXKqygVtJ5h7N35^=7*cZ&Ol&Vy!xQYax;cee#-dGqoR~K3 ztZBh21pD-}3 zfBz=DBO5tX-I^FgtqNne+c?{4va~~F+7vTj41X#qJ=>&5)X*eQ4j1&SNK*&-Z0%>%jA_7CubN}bOw1tn&0XTNbzQ=62y){>~uoPd8R7I zCFxI)BqRb%PZp#KK;?~O^oDDAdHbK>%zLRO67kqE4)MVu7R_!8#IW^DzrFmvJ^}}p zyi($QDj9VOjd-ijehbbY27@8^7eBd@!NS?dj112jF}E{ud4H08#9jZnh2t0=*%;UM zu+P@9h#SE^`5{eEP=xF2YXE-obiEy&mg+&L>`B;-$;B6=Y@LK4XQeC&|FBU1tJ9I)%$y4?@B^O5aEWo3osLkfi@^hd!IO6ov(Or~{7Wjc92pV+RPzpl z5>qGC(5*PuK+~M9m&KsoY*36p;3wku&wFqEc&h7UZ0D2x;Yp@JlS*^Lg0cWNo7I)4 z8j1D6&srC3Nz~K+odR224$CBUN7g&pt%+BvhhI02VGIJJ?yz@#=GVn`x_n0}1LeZH zZC_3|eTEy&wpr&kbQpmq#J}EGNHnuGxzQm(n+C$V^9333S!w(N0^whwa%5k@9zv5d9S0s1zW%l(}nI^_JpT~MC1GXhxrD?Anj;0TLtj-)$pk^RAVYF;y` z(Irv?-h}+sC3ZO*QgMIc|Fl@FOXa0$a=h|N-2^2)sMW6L8_8ku1i4V~KW4|=s0JM} zGe+sj22_0uutwY=-Of#c*|PtOeflZcd>omdSrn5 zK%+80jVhO5U(HRrx{6)f7}~P0hA4;Yt4*GWp`j1xv#8j7F=$|K@p_M~G)G%oI|8;* zU<}LO>8EXHvg&^OlYGCWi1^yk3#^@jxgU#v&KzaaDwxyj2lInBdXL#A5H2RFNGJf5 zLfLsL$)WWwQ>@W<9D}K$+e5{ideL_bR9|fxoD2-{lLo{&W}>ycSQY;D3@05-Mfjxt+={)}s#x;fGRHj*2qGxG!)<@wh!7tMzhc9+pEC zNmUj|!x5Md57of3a6C0>_YER%pqh?5F29jT_?eNglgqbJ$ir(ewDMwnVh$N2EVxC&8!iSGq530m= zGSw~=m(I^?4w>q`qTqI|(X%r%cJVA}m(YII<#GT^&~uShWM}NxxO2Y^(Za=CZ7fM8 z57IoRj9y1YK3eLvSvV!SWvX~)#Q+)H7o!i^)`7q~`^RS_;o`%Ac=)$TK@QzJkluMx zNUnELzw2N2Z*jWK{=0$6V*#B9zaUfn*{l~tTra*YCX=NTi!|h^+cjqN_I3U?odelE z9qBe}J5Y?-I6j;z1jz#m+2(t|p@y|H2|`OKU2Dy;H5}A6L-tBKj_FU1ir{fs=b-W& zIInTzX#J5{6R+}9j33w-88|CYx&kS>bAnJ0Mm+0>l1fU$IS)EWLBmrnI+@^B=#4c3G(A~AThHHK`?#Rya(B?@g2623Mv%PF;U zDJJ>oH|{MhqKKnm7#cTB0gDY>l^7nSI$^U}`xOQcv^9QxEG~`3Ig2o>!6mF93qd*k zDDFELIQl$N`$QT_%QDU*#Br7-Rm2Or-}fB-5Zx~%;kV)AMK+~gRJCn1{q8^X#wUYGn+f zfKFtAr@Y;KiacL_*y2b!u(*#jIf9>=IjTMf*LMKN?CNwG7W1CgYNxpt-{r0DDWxAQ zIWnc8RS*t;Ag0n2UWerN+=u2swZ<8fIkK;)%(&fqf;~xGvqQ#1%ElK@3W85-dF)Eg zDdL}+%+NSlx&#yeyDC~@8xn>KuEwWVJa&cN{GQqwzq@ZGCP#hRqbO5@X3{$~9+=1h zmhw8|EV0NFIH~6Vk8|MO*y2eq*A>wK84-~^MwAE+w665~U5gz?AS($`l+<-@ z_tw196w5dGtvMor*XJac&wbBUr(rnc7+M@EYU&=Aknb7`%Z{?kiV4!2@i&I--?Lh; z@|7tbL<>th8JAnt-3LBwQPj57$j)9sfjkhGPJ&QY{K2 zc03vhni7(_pOluBX9^Ax~>JK%-C21 zLMj~O>A6I)La&Be_D=~z!ic_3>-Cy={QC(^i$5rOHQp#gLB37|@&_DTEU2{sjn)51b# zuXGn~Hbqwu3z*ZiGK*t+9R;XyLHium%h|afbTVy2j!bD^$jd(I%BO$SxJX+ONiYb5yxWh zScJvTDVp8_jsTuDqO9%J-s{AFQo0%s`b%~CkrmK6)Ne|enf;cKFwvWsT=6|Kz}QQ! z|0cYL6MQ0f-+j8YKn*+~YRGJW)|J;)I_`!2&?R3B5V{JLP8*f!u`Y}2Ny!(&`H{e5 z7DSw@+J2|oZROhHiN9l3vVUC|Ch}Vjoggr&Y_?53E}vEs=6mM>wMK66H8x_Ya)zh$ z+q-_mSW|i(F{dlQrF;OnF6O5-`>u27r}MU#=Tb1@V?gKguyhq~xNuIJ2h3`b2K#s; zqO<(Py);`$S~m3{RA>a#LmawK#%lvA#gH@~%ePS?!ZW_+z59kdYzFc%p*Uty$YIh} zo9aVCUY`)4QBWZ#{yD;!M+_;YnMbGFd{a+9J9Z^ot%lZbqZZ8Z4O*}g!M+RUl%SYc zD`ZM8FsHLr&Ak@O<;2=NMls}vGtZ($V_IdwqK8q;FVWqP)4HXLY)OhWRKUwOO$1gP z=&Tvihu~PIM-#k*kAMc5J2P1y5=hITs4TeNbPp%6-$9a)6YH|Zt5@A|0bFM;d@nnL z6Kd(&ogfS{Jh`1Fnj!v$2;%bDrAO5PeFhAOXDr3Ur({xfykStO ziymR9ljKl<#udnivkriCG#IbdO``Ih99I)P`WxQC(xlA5Pc!{cij-aeaisB>^5OSe zsmvtJ6`4+kK*7!P*J0vdTy}Vji=A{n{{*QDZv9Z^eEYgSA8t$*^PleT5*MLI?7w=s z?BmqADhRjCEn-E9u%Xxd)R+~TS*0Qa=)=s4g1&!U$!|-&&~**676&B{O>q;v9kAQ4 z7nmJ?zFa?1Z8Uykflou=xSy9<3Zu{pL1`n6CDK1J__M?`1}7lzW^m;D+q1>$b)Td< zQW|EBiX9k-ddP#Xfj*fHuW*I5*5z6@>1=1m)>SrV4 zCy>s>QR+`C-%en%hjRT3_2MW&I-|QyPj|oFG!0%ID&ICB0UM1~>9}$G2~ihLlM~XyO9uq2Y$GKV;Q`ay+5%I|v9DzNCndN}T~`0u6~C3jP$UY$pRu zlcFPUVc3JH%xE+{_%q$1?~}6m*|idU%nkZ?rLZ6kJsySG8>5UWj?wemPNN4eZm0Y7 zV*^`^Kok-p)B2EkTh?g7Q)FnhfS<_Tk_3J?ex64)WF6eeH}ll5{#~EszL%Zx3t_U_ z?V+$$Klu{tVY=(I;#6g$H74Xuimq0gN0L3#ISdy$Wp`K-%;#uVWcrZEqk+^#BBd0GHD< zrR~qoL?JN|Xj4gk?opNqRf}vm$0;s0Cdj-v z?hv0hg`7apKn=5}8v_detiSzN!>QPg(PxRxxVuQ? z1kbE)jp7ewzU#kuk=k*y)mGk2r4u$C1&PAiqB`zwur@F-z}dB?oHUwdF5QPpb||pS z!w4h-Gt0reZ@R_^j1dG$v;BMz-_7TCwH7}Tr|$og`}?%%?;^PV2LhemNSOc*Tct|w zJoNW&i_cRB0aqz~9wfS&F7B2>Pt7M3BBE7wA}a@#UWmh=Mdkfr3V9a}F1@%#gCrXJ zn^T7FmM&`5dg8O6FQR#W?0iO5xwm=*kDiB3$!eg(^D(l58`b;t<4>uy%@awdsBBr(#5B#*mAhit~-~o9ZGXz&< zWT3h4`576_o6s*rwYhJew;tVhn^b184q!XO9oGHz*Bw*#EO)ze=%N*m$1iimZVivF z!?hK9C^TR-Xb61^vJsk$5cD)zq$p+tB|MM!^FX}ZIqh*ih)R?$5koGbLnQudmgW9g zc@~M<7^@=W2%8K?k^+mw^IC|2?9!OxgQEDGa&;D_ZUcN+%e5oXFXeUk{~-R+^t22m4b3JBoFk90(nEsXHWcdNmg<71XEw@f_Gqf$e%37J&+O< z&A=k1kCq0yOs@6d96X3K{kl)pA&ue@dNNUUiz3Wa&kQdq+@`Vfxxoq2(=irYCj7Cc z!d3d(0lOte6^)Z4@sr(g-ym(KBWK>@b(yV=P7hE}m^J0~6S-LcJG!q+bj`l9jtGli zgXYa}DsvYb5{e=o7Z+p7RT!HsD*yb7!L_!jMFWBasT^g`2%i?rQmn7~c_1-$q*1ac z?0$`Ow|I5uHQ@8Mj-$O`Ws<6aOp=>10@2H3)c=REEcca(cDot4?@{#AoWf27?bAv~ zNroK9p6)XSPz5%R8TXw*i@-j+P3 z5l3MZoAb=i9r;y(N&yRqNfa|4Q*x{O8{$g27%8G?-leDSyw5<{HL8!%u~%ws1bIv% zZ00=QUyR^Ehk^%5yHpNZ(d}kWZ%Vreo>0WDbVfCc*Ri!US`iRa;m0f^1BmdL{(M%$ z>+F}0NL}AFUd{-fBR$U#I&}W$V4kDD3|eAOyLXb6%4G`R8K9|4v zrGYMFrL%r#?3HS499(>^q_K%wq4xN_5pvv3;Xe@p8KrLURbv?*$s-6C@r2R7%fgnY zPko z8PT(Jk1e*SLGrMAptVQ*4j=pE<{wbsQ=kD7Qe-TPMFTKC0B9h~(PFzw&K%=9P7QJ6 zYrE4S{J=)EmX0i}`%;)j{-L}7C+Xcx)-$Y?6cNrw^`QtigU(CS6=7PpE87lFAnc7K zlVrkZ&E0mS#VtC3$N-&7Xn=nbFYp8%lbXXHv)Fmm?f%esw`6t4FHg_+!*|&G{yCx0 z5d8f$R{JEO?t_BusB?{ZUS+Q+);Qztm|+O;+Yg51W(~2o!k9BkvJpo)S$ZzB*8{uz ztG^neom!xum=x|QtHL;(-DP*%z0#d;Ku08v9&>I_coZ14Ov>uK2i9|=HOmRh@3HR? zWy2~ZCeE0kerU6l&;M|~ndGq7oNln5#b*z*?R7_Xg@J?5TP*>y2I{=OWU2MKXQldg zBo?a{vX@Mv{(#Dp{PXC83V`02$m9Bo$CkzK^lXqT6lpKea2T7-8S&x3Kj<19lF<(} z?^EEby}9=Yn(WK)EM^AigNA!NeL9<8Mn~yRloFkRDT}L0M(nEeUB&a?Ix&<=a1MeX z5cMIN)#(6z`pZqKpKl0!8qvOnJ z-x$&Kb>2|GLHQKsYLqGT(+$iwjx~eQ=r-L&hNMOsr=bf?uYL))yoik0ff=f_mCgaYbl43DvR6g1FZ}a{U9>|!jK4s*_HyBfkOe?L~trwbx zKJdMWAoidv>akpYdpxjyjrL{Dll9l=ek)u}VM&SJ|ghEih=UECn|^xE99 zbKqD=@E)u|ePVg0YJ9mijgTIHL?2(IX&_6KZA{ae>!%<9k5>A#P$|*g0p_|U z%^q_VraaM1B+;QkWQ09JaWKs4707PzZGFF-+3&|m_c$4rntWB|g_wH$y)SA1V+I;R zO52f`KaM?*)s2krZi&I-Pv4FdeMRyu>zQr|5^;czY5?Smx4;KHzS~vPfD?3NG#}L| zD?sRHX_)J|2pPQl-sO;^FPOOq(YXQg7!kzM)9BF(E2jnY?)oTsY|)i2cZc43^;rAQ zFpc_{*RV)qRf;RCgs0fa);P+)NBtcHH;ltPQ#0M(FaCLzq~tftkP5I(+bSdVLUF6N zBg-Jpe;E{wRiF+CGxAyz;u!f@2;=k?qzGu%olOfH-L0RmzU>LdEjl^h5( z=xn*iu=I0Z{CB(`&%IyK=)5d^yV0i!N<1Ew+7$0zpq4iI0Y(t|XbZ{A$wZDfl+WZ? z%aC<~&?-j=GqI?nizBjLzfT9gbzh^Jicffo27m+N2$&+|4}E8oveXd_GV;^0n=t3h zuRC(Amdguz-WYxA)8w`s7fL+28s^43D7@BziFtsiZ~39!&?;E|AGlhNptwtb> zlRN$3;i@MVgP!LcHU+}s{Do$+IB(?}9kWp4BfTP%b=$m@jlQD{t7cgVC1F9IyYINm34fSXxOkzlFL>e zI()*!Y;J%y`^ZVX5m;L5Z8Ci7#gm(6)izAHY1HLjMf_Kyvks(Qqz7mPH_Qe}@B?*l z?6=yY-7f_4&3m#isE!xAfn~Ito%TWy|Kc^J1$~F_*g0IjbQ^v*cgY$V+_RaL65H3s z%;&wMqb9*tuSa0fmHuvk5!9|szv=cJLC>4m{Z%p_EY8Q2{(p7x%PjnUP&YSygEggw zEQP<1x?7E(14OKcpIu8ef`1hUMTesi6^B7MA5ItRhv)6VtIgAgK8NqaMb_$p-qhAy zzpI22bgcV!FpdQ`9ZPl0sSIqC8{=#Zr@ocnU^=s;_Myun22XgSUNq{)-}ri@u>lK- zJNGKc^R!3Tv%dVqvBo3^xU7Rrm&_oj0y(sgVQMWqmKA%j1Z zZ5Mbc7eb-+34{i(3G`L}%Y@7>4fJ6Q%P{d$2$g5C^x?}mf_HAAMaTqi1%q>fE&5D; zKG|=>F5(RyYhgT(P(@4>f4!ydoX7I4G(%qv3%T@mW(0N<@E5I@ ztYbtL{Wb+MtKiqpr{}B1Zu>0t`pPi=W8!WYkBwDjHr8aU1!J$^WAiioz;wi4 z-D$+!UjMAIprD0IPTX%J4Bs9)Zbu;AIvokxWnmnKF_Xwx?rJN>BvWageuuUg(h1nK z)3=`Sa=Km9@v59sG+PeWY>Uh~V+E;GW?Rk>u-}-0OeA#m4JL7iWuj7;M8mfNIt4IdtAm_he?WGDWH5ea69F~sp*cWhX z05E2@zk|r|^{fH(nZs-vddO;xw@NqCw)h!m6i}nA*yxzwv4$fTclimEg?izD?ect~ zQl6p~1MlT0A_PgV2+K;s%ZfOe-4n2x#T$LVGW%~~S6NdlLUxFNWqtYnLJEQg=A}cR z03!LM)T_YFd0Ys|kEmq3{zOxE&Q5xDV*X(%?gmRLAd5ya%;+UdxWkC9U8R?2u7XY9 z0y+K;6`e1RPDYpD?D09D@XESVVs1WJ>(+D&S(zCkJGe(RpoGg?FP;)HqnLTlj#Z|; z?%qjje4n}(^hVDJo!)RxajBk%)VDyi=_(znqzPgI<52Y`*Nj|=Kx2M>IxK2_r1NeU zk0!2Iu-Tz9jK9{F089c3K0Cd^po6meXQ#a*&Tn<@!Pu7lx(tTX9xahKtNS~@ zWc3m=9gW3AepKvSx?^%mKA@%*;Z-bKJMyK5)B~c$X`rvt21i@}3{fTE!u|^4jx5+& z>$VjW3Vq6?gmJfygqgm6uKPEvw&u>dzw{t26Ls_`=J(pVR(A`haEC%)Er=IKDur;r zgBuw5t_fzr6V+X5v!%!p^(X(s#i^@Z7TaxmTt48D!C{8OihvlxC(z~NRywogL%-oa zlVi}0CF}p7qsNIAd?6{g9tx@*35_P8#wAA5-^^9ubP|lnS#2rM23 zS$4JXpqxWst|P%0u>+?PB*4fkp`%m}YA-gI^nECY-xzA9B?1plZdMl?d1=32+fO+` z!^l`Xh-5-d1(w4nh0OB%Em;xid;9nLjn{LNJbvfK2{!Y#lWmAxF9gmDBzibqn+Z1T(0X(( z;tb9M(xs;%#|H#?Z_k>zop6-*d^MBnfJ$#|kE5;K+tUOuNQQUVOD3>wI%A|y*b@H( zV@H@34T?kQJS$}h0QT>GgM)wirxT{dCV~hPZJnYldMkC?Qsc2n;vSC1-Q`SWbhkoKP5evix}uq ze2dRY6M?PEt`Q1`a8$@sRqRfwIeQ@d*5jYX81q+{3y4TmfwI1bRcRD-CscyWOcMz& z4!n!Wa9j*2A<_#u4tCyz>@pi!nMtrWXVDb!H+vJin>VLMUG7RifKr%wFpG8=i?s39 zd+_?#s|t=jSsuf>V#&elj5vG!3agzkf}7ihiL}n9e^>MD*8EA~9K^vd_xAz_a(k&& z2D|PYlL7hqr1spPLigFt_Nn7mC-Xf;4#%L2$P$wQ9Tr$kFK1*=%Cu6dV@#4z^8Irt zqkg4ndH1DqE#J>6%Us|6D%@XsgG`*C-TpmxOTh4Q z+qBhFixrtbNz_i_zrdStP)DLHXi)O0Ae_&2H7LmW^`>2h1*==m+@Xtn!chm&cKTY1 zLN9pfQRDMS&Yf&dqwD;v&7Nj&WNyzs?Ol5 z9+}Uyi1LSuxj#p9eZxUf2D3)}Lodm)EwnS^k%1&frkeUk%fBI-^^E=snb*m%Ktuf| z)dxFkk9YH&rWb4nNZT39$8wN81#sEnw~Nmg`T64#lW1N1k119}P=LN)YZ8nQF?~xQ zm-pc1TE|Dg2^2VaM2U~~4Zb#B_M{{~#wm}}=TntoOX764KvCz^%l`HSun9W`*u6Qx z5s;Ox1!4%zb{AZ2Qxec3+eRwdyJYm8=NhM~o(}v3Sa{6s`2o^NcQ+P`S3a}!=f?lE zN1@`MGt;o4npaK2H@xiENAckxf>%lt0ka#QCRG%Me%(fQ~bbuuDLpr5!x0*mJ^|#mSQ#Lou z@kmnCPpIwgHZ_9^S|cU?VuoaDNQxiM4`q!Q?MmY>g%%Uz+>LlYJxFDOUI$oO7DXVr z@)(VVlj$-|SOu2Gh_r@EE_|AR4vWt2Z>8*KtvjH0 zuw|$s8nikX$aZ@_YQNqd+kCdZOP3xKh04Q3QYt|SqySd)Lx@&K9zO)+tA<9xlW>`C zIS0bk^F%i`6vRe{AXD(g#34&iA2(rhLpQ+D7VjmZu5dd60Rh*IN3~knFDI&W_hdq*h;V7?R6ju#YDmdA7>QC& z*v(>seo?8chcB{T^Dx|`DNm}{gh=cFB%@XORQotG0pUV&S;79b#tf90!0`DJtKCw# zPY&jZ;Tk1VnqyzUKCJpJc39lUKT}y5=|78RuHEUn9!wf39nJvvAW6LhYdMA@N#dWL z31)4SLFm|t3ll@UQrFeJ58F*B7vf3#pkpF3=7hsN*^UUWo%w$BV;`EFC!fvshAmu+ zW~Q+WIjCnmidK-xsQrRm>lop!k0iNE$IwUPjNmJ64c!?d_xBe;7I)cc>5)~%C~@5< zUhArz{SecWKNC2%!=eSF8Fz{aYL85V-3eu8*ffPPLmLR%l-=}Mf>Xy!x90-vWTcsvs?nP9%heH6eVd(ouBk)fd zZhbILb$rQqUM%?^F(Caba)(2Ou3wk*e?P-+S(IWXPq6#GbL%eRj8FX`^RcoRg##zv zb{++pW5TdY(}-T`4-#NrtaNOJleL6m$sWmhCI?FCmJTF${631Rp-5-hv{*sM8!lns z<|7Bx4bY=VvWqCs{c9DxP(fHy0(gGePf0+{Wu==NpQ%Ffc-5C;b!@_fdO|-~`gqJpB`B!so>N>=tNC$;|&IdA7L}zOu z*zja5Crk#Lry!x>sV}$vK%)pq)TwEv!Ml=f=RPD1&y@~gU`Hpuicagq&0Pk$3K?5giISi$LQW0r4)Q@0hM^{*;4r z^Up&0Y(MP;gASa>1|BLmlm)o(YytSTiU`o}*hz7?_Qd#srkJhYLeP*LKj6=T#mSy;mC7kG&3fEv#|Rp&si6eSu=*_-%^hIX%8`Bxf4u!Jg@52X=n}G zXK&^gCOrxHFGSIR{&ECthAt4oPRb^ z8yUY*ru(oZh@ijzdekyg9}CA(>!Yn2%@H!{-THLG6SC`H%qRrWWF-+&FiHs$x}#F0 znR}e0-pwxzZY>Sq!stbP8pPi})`aWknZ_GrYjCE|4hbm#<@}UzU2Ao0^N{{1=ysy2 ze7H^WSVUMbjxcUYliNvuzu4dx5&Z1BGnyC`U+aRvx6b(8u*mG1^0?geGrPs>?HE`a zO5kKsT1yu79k*LnkHCV0$A!OIk2++`K0zT%>&9-<>J#XY1gb|aj=V?@wDUV+v(nlu zrRUMc_b)6z8!%&jP!@Hr!GE*mOwNA!e4{bxV<0BdExpIBf)Meqiz?@Jo59yI*;+S| zibq+QsD<9RYd=H_g?Z=$&j+;}5&ysgR&Yle6W(=?j&G2A1HN*Z{Ti=e4(ik07aD<- zqdxX^slR{YW##e{EC~3!&U_&H_c_8$RZk8frw{MAOFj4;3K1_^H)&he1Rd?Ha#|dVbQukh=i|uxA=KSk_#N^uB{)! z?OAb;M;v(Eep}Qf!cZchv5>gb_-j-z*#{g!>*UP>j|1_k=P@+1Hzgg_mSjK8!dO+- zU+79TjJKP+mJAOOPffm>xUB3f0#fb0(9+h41=k5nuicv2Osz0C%o^Y=uemIlRlp^8HZ@5F-CjIWf(nVme1#SMPCywFP$w z2|doHw@0R_tn|#p#E%m`5c#I@PbZe^F(0ZKOj#_*BSr^b1F-zDpfk70*;?`@HPZG)1SGF-K#w zxdFN+=YGXITI{9*s~4t45P&|?@9m=CR3@$R*}bcxw_Jm;0G%zvjU_+Bp6{b1FxKwR zm;|8Hjl(NE>}ZVCE0yBACxmM8tEb}ap+{KF4A0$-vPOc5vm8h3i*6Vj8FAsSS&v!w zrn|%xQ1CAXUlI|P&6xxA!i@ahSlwpwj!PCUUHTjFvb@J(0svsG-JiQ1^th*_>K9xXWFL2f5uKw>KKs=8vVm|n{m=Zc7U2SN+RhqFE6*sAH)a|GxqvG6bonlV%tMsTSHaH#bZqO@of2fToB>sGNe!`lqdL>Kk zLI&vQPv}$^|A#WijLBpPoNHGx_~fEnI_&-yLkwDpS~_{BWe;m1z0@~q%e%0(9ga3iGb(T5ZP=A)cT5du& z@f?9EWOF4l$Gml}>x_GqO%9=2{dTo;LKYHvodXaaAwYW)|0qL0STE#g9IUW~A**NR z55IW7m+-@k>hQclF%K2X@Zh?M12Sy?RvQt2pF<#giBcX}ROsk7JZc@(1Sj>~wZeja zTRfqqVB3AI3l@W{>#_Q$s*T3JSR6~eE@zFwZK8T{g5V=B5?N*e2bo5b3108xB|hp= z);HCQoLk|jLv~bp_HMHu5(}qb$r;S3C@NIiMR;M-0)dZgsN!gbfLu`s zG5Ia#0J)(9-5`Tx`3dR7Zf*+kBDMl=Acto6LVRqvprWN2FTAE9?gZ5Nr1EDhh=ER~m` zH)wVv;-Es%$b%A}d;Ff`ZITyV!{0VLMEm4EVVICGtxi_bX$3=V5)wFN=r=K;Q3wY- zm>SHmsWJkpsQS~L>BQ1fv~ zFX$7N)>78F6ifSe*2Fd2ZQXi^s~P_8A0s+#cWtzX|F@l8;hbM{1>pSZHUrlTLr<{Q zYNvhaKYUApp$U@;of=u7V3V06 zl6*-%S-MO86H1o~FE<+evEg$+@96(QZ^-nIRZ$ZG=4$Q{DAcm=7|JfDR`?kNidm}4 zk*oOQm~S?s(u!4mfIBDk;Y_iwS#8>X@QlD3#J65R+H*Ue1uA$fG3MBM(VK?ZG*J?1 zKl!Jt>hHNF?zwV{CBK8hq2E8g3h-=_AhbhCt^5^twx;TrL|2}#?IJ6l{1b~5?Q1|y zh43!j@fZ%wH$^X1Oz-s8HX1!j)#}%Ei09_gp;WnGv+b_uzga9A#uT!gX1KJTSub#W$5zSIEkBe9Eay(LG zqX@Gz9bUcg(Pl*j?fuyy`~~WTnxIOS*UF5ms04b^*53t59XGwc&1xuEwjL>jyWPN` z>BUNC>T;tR^rlHi6+)f}Yt48Sonkp%XJK#H*mI)QW-|g--R5l3od-|laP%rG<2=8` zW;ao02DdU!52b(0WyM7#$AgMBJYY71ZfOM16=HJ%&?REkN@GT{3Y%=;Rx~RChW}&Dj#?|0 z^0CpZVOw(4%)8F|@+DaMubK*{di(&P-@`hJ-Gb##EBKZo??p-86P*<=WY;Gfub0{Y zgIvrx2t(yBAUdsPyXX%_+1#7e3#oE{m3~vc#M{dWmtGJ07yI=P;>#rr8w&GLmIk7h zM)%PkhZ}8g+i_R7@7c3AzR2*K5T?Xo$0mH8eR0klK>M z{~(g0GH-~O9-KD~-XQ;$%2pCS2Urn^N=gcuPj<;nF2J#qTjtCW=l+LRqyJ<6J4C7s zVAET4?e_1;tYEHgb;O6E*28qzZX+?x7xl=vfXZR51W)9)x69yb3$DxgLdEZ{ME_-O zYL<3p~_7i4X4^U~zyXI7KBdBv^8}Si{F=rBI;B6b=pzN7i6YP0aZjmrV2( z%VSXo;-Do&6XmUv$&u;Mb|vUW+Rnp(4aH4&;m@H+Hul!P(Bn=b(5Kp8yz6zf1J{0o zVsyi7ElZEld2?C0l}7|qv#zd=HRGbmBq@bnL~@(w|M73JSdrjtut(wvh~YZwpLsDS2!>Ig7$!KaGK87sP`j2cO94Fn;!` z|I)J6UI0*hh-hFdsh;ofz@OKdw(3fwT`;`(h=X^ac)Kf-{ zN=qn_xD=DmV4q7y0O^|FO*@Z$fUal;ep1YGV#p2qFp|}hjAM; z=2BxnlcHLgmqgkLU)wDG0&Mx=Z18)ruo~89A2^NJ@6s`t8zR6KT|Tl;=MQLbw-8g< zi)K5RdZUr>x)#B5TFn6u zL2`hmleW%}{GXX>0|i*yWFJ2-wz259$KJaYb&!`PFNT69P98l&>=%D0Mp0jbU1?DdO$AYvBMoBxZLT z_()-v=~OzV0x~Yj4dflDxonthXXtDyev;u_8BRvv+99(p4B6+i9-k1gWm2j=#d&XQ zi?LoJh0fL8foZ}4SRBen#QP^|JmPr>o4^9D!e5=Z^DfO8G|9n!Uqk0CL``mAy+ovD zz$90wYO|lwLQ)!o-p4=}f16qDSGuFGPZsmro34M$HW3>TK8+&{|H5YX8GP_axxi%E zDb~pDDmO4O+9EQfTPgs)wCa_OJpv#@S4iT*mz-zok}Ku zu(u@a??eu->+fx1DASzZ;bFxf)@Monm!Doa7C`5(vM8A%b^Wb6${ zVa)wd$XS_7HtMmNT8wG?mE$h^+WX%5F6c%%J1JO6jiY*_`}x>u7m&;`Z;|>$b%8d7 z)J4FSd-&Fs*-XU#=6=wpwjE0HgirjZT2a6t4XiINYItCeJlb#rI)mSn($ld(TCQ@6 zGqL!Dx?_uDbe!gm=bW&=WV^UcduR~X8vA>kP-ovB`!}EuCPr8-jA#1SIA6Ea`i4#~ zLPkMOokDg{MgPZNqBdkBSS}POO;3u;q3m=rVlnR!!{KcnP?W}Ez5$LY(nkATRx3R2Aw;p$}Nau~(zp?P|x1#!RA zVBkaN-9cD^q@u6-o*$(5M|N9XX=z$#S~+znphtM(61q-1j8cQb;53~I-Y;NU&W(1r z;!Tha@P(IE>pSl4EG4>oCoS35LWcciTk&1-sSA%5V+DHNUc`3W&2JkOrCE2FWNV8GTi9A`Ny_;CvV|O(@Esy zm~usS8gCXc*t9rnswgdaE@uI}5k&)OE{+6a-Gj>F`qz@5?m@k@K_1PiZLpLLeG^E~ zdfY^6DwoZ6!;w%->VU{#^*;DmX!6Pser@WQ>q47OVaj*_0GAfM+Jxj9&o?|ZE`zM+ zidP&_Cnqgwdr$F(!MJ7W9YldS?WJyX~o7((-Zlxc?Lq- zGTsy24ONa)eSIfA%+vSUV4qOB>Dh}P8IXprE-6hyT`qy^NgojXyzdx_6b_WT8bkFV z8DG+=;<87-hu#)mVR=PWsZeuusu*00=SE%@Fw(!1j767#3rN3eJVxJsU8gNn`5`a8 z%bYHmKu|#7iZQIXCIlgz?ay_dQxl8L^)qD7A<9vwB711b52WeIO~9~xN9A7C`WVjo zkfcYA^0W&1QCKFsYc$_D6BQV+tY1NR{f!Y4AWOK=0SaSb+M-*@L5;vv(Ue;aj(x5( z2^A#}nBWg((&IJiJYswJ7tJHr)SR!K-rN?FFj4*stf?%$nocsLu0QKw|9(LqjmrJx z$?}{ZUi4aAO6$SX#-m}(`K0C|eq}7GV5;6L_Q|mdmUN{$xFG_YU>xL_@qKeM{VmL4$U3wgy{Y-H8 zN#;7jWw#=)$SKIllqW_0+YZG)qOfp1{9=|2hx}5$fJa6}U9V@~GkqDUUbd=AD}h|_ zb)o4s6PISekWOYF{FhwaK7Jx}MfJZh&GnJrhy%>G6CmTapOkXy;+9nJT%=hxR3>H` z<%HcPPoxLhoU_J^DY90m_#w#WOA)!OJjQi#>V^} z&&cU=VYxCf#IE=(mfa<$qX{M~hvix;hTneF?G(?mk80G{O&sdfht?4PeE0c~9_+?wudwZL$4Hjj&~N5m7px$eH~F z-*(2Blk%hovYa(c_&DJl>$(Fqe>qxP(m}ghTUZ`M?~hX%W7Q(+!m>c)e~J!}>HV); zX_;Q8TeX5Z^8e%6|9<5&Hx^{R`2YOY<$u2Y?{NN4tp2Zc_5Z&%wm&`*7Gv&=3Gj?V PL9VYdD$>=G#zFrJ#d#=< diff --git a/asset/logo.png b/asset/logo.png deleted file mode 100644 index 52b37b5d43706baf30efef09e3be45e5a0e4c774..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 64039 zcmZ^~1yo$YvNnn{5Zoci;O_434k0+fEx5b8ySuvu2n2U`4+PiX?(l{q_niN(`(~|~ z-MhQ0yQ;f-ckQog!j%*x5#jOR!N9-}rKQAFz`!7cz`(!_0WhE%LM*cs&<%lwsHl>( zs3=g$!PeBm$^;AyC)r3}-%yH05BME7%)gs2^=VGB~J=2L}C*dN8oB;Gh4etOri@ANo)WFsOggn1Ze!A1Tlm#Qm=;L>|PyDIkRM zApczkH~b*Ydl=dTUEu7bG#$afKB0en!ND@JaKOMIaV)-QIBCes@fg`!GZ+}#8k#V; zS=)iA!NB<3ctAyK6DI?po3)jVBaa(D$v-G~K;@5WMiSsZNSwa&lW52*0Yz;cOn@8= zObkpU0`Nc}kk7%`lt)EO;$Q5bJAM*#Cnq}|Mn+dxR|Z#B23rR+MrLkqZbl{+Miv%& z5Cy%XyN#298@-Jq=|4O9cRyk#jz$g^c1{+yHo%X54Ge9ao%l&eJ_h=q&p+jKvM~M6 zNH&iDQVXOY<3|f4GXoRj|HWqFX7T^S_R;cBwtwjL&*At!I^z+wwYGCGadZTUCBV%0 zj{*Lly8npxPkI#-M_ViB4=vPeESv;b{>AdYs{c2a#(&xcIRDM_-tqXBR0j(qX&WaK2ar-u1|MSnkBI;O6#u&~MF$HL&?5eW``>f&-*x{*|1q;X zN)~P=RvKa!)+RRpm`5%ykc$6j%m2x#`5#Vpw*QCozgqsq$;bF%JO5=%|1{Kp)PgKk z0G^NWf2>{rzBUpUwB3Zjq{W24xPhPO!s_b`%)HHeo+r?2JJ`1PA(j{g9?|Cqp~*lH zqX1EWL2*K8XfkLJ#KdG5uxJ3{8>GQyKk5>k&#qQ$kyf(J3!bkBtFE+vvd@YVoz#9q zKX!Q9Zk$d2Z2R%?F(@uBCSd9T>Z33LaG($%V88>rQAB`VfEi0E_J7y-33mhYP?#X% zEC8VPkGl_Y1a7D(Ft*)_H59A=F#DnU$$kExuKa}c?SOgJ8N4^qo()l8yM&*w2mdpa zzmga*D)>wj5P=~6OD-MpV{kuOC?3AF+t#N^K3351Fvt)F0M&xn1b+|U7(j^M&qxm! zBxO%5Sa41qQ{;lUgaip+R)qC)I?=}EPt(wv^>~HXoO_;_SJv0|(i@zix6j$NoqyTf zOr|Z4Ja3iWm>;#JmW@Uz@g%($rmB_~yY4m)-Jbs9Z)~3bszzmm@uYjy-YKhn!mZ_5 z_AT5wqTRXBgSFiu3b|``X>9r_J#sphf9=?g2R@97d_4xz1;7kN^`Zob2n*}3N#HND zHII9?bH_3aO54T*3HmDG=a{#+$YESmO2W!dP;E^DwL&3rhQ;<@FmTWm z_{(sR;X#EUK@46`O_zTcEf?0;2Z>>>|Bzk_G9&*yk?s?SPZfWef|&tO%%Jh$w?~=9 zKMd;Ma&v;nhdEm^zWmeb{xJf;5F{=}f3i`8>3@l9`(gODcI@RJbN+9o`#+Qpqcx+# ztbSh4K2r=qBMkZaFVQdPK?7dVcTzYbqaJp;mOi5+C$h|hut7-J5za2yhH(OJz(Gpf zg43Zdd>D`eRMZRlE~asW847^#LjpeMnezlEC#O7J96|T4#mu1KV1HkP=Ez@vJ#Tx@ zvSWts@9b}Ey%#QT76ce_nGai@vR~CUw3!L6XQwduPftbSa3|O(_W$BSR7@NzY_3pJ zq=58e5mHxSXQ9~QksgAry zL@?yeHgj}HyK~&~r2TB`#OLiCGu5(qucaRu6(uI-#-$|{JDs0aw!)h%6ImOF=G{S+6og=@vSZ#!(02b|1r1MDkt>vd9~^H$BZwIcPUudfdU{c~tbiw3^SFdIIb2}uJB3knfY?Eb!~ zGK@2hu$0mF8XMF@t799ZH0=BUFiL^lPdsHC3N*&D+H&h)1D~9Ao+~5fxS5l%4BLLj zqOaX&Y{9rvi`F}Vj3QX;2R5NtUVaKC4-LGh%vjyc*q+;K_Wk9o*)bC_sa{Y4fSgK0 z3!OgQi8+3gTs`HoC5Cc#scT&;%D_UJ>wMS9zBkK=$>+peOiV1Vx?0UVQpr9b6-l+B zn*;^kn!gSJEqSIJ$m5Rb8ysFII>G3xDrI3|A?)q_Nh_J-v$>6RUrhzfmofGLwAlWU z=vZMdIgE_mmGxf|jiW!P*pDEGsi}naCiBS?|@XDCh*$=8ZwTuhFd^W}hDf z_RI)PU|CCT9%s9-$XU>w4~OaSNBvJ!*n{~vUV13WV`MV<8&aS>U8+pb+vX|bk^ zW&D>i;*!Od;#SNi=O&prxS!ID7?yvJ|JHe%!MP+S6$rEKe6wZj`OEL=m%_12IY1%~ zRTi|2LpC^6an%?GG)YH(AX0|ci;v$5OOUQY8>YGk2m~acL1=PBiavE&1+q(ml$Dxj zV6^*+K|m0!_I+`Sva3a#jpUTeM};`Uq_wiVm0XO=i7;NswwLR@+UhlQ{v^owOXWc z+163Wo9Hr8>`7j1H^C3YHB1#)>*7pnju$RPI?c%a}*Z+(xXTm=ly#5nR*(6 zzYHRm#oRUf*bGdvv8b}ndiS_-czj$$Tx4k8keyQOE!Wttpn!HAujg^q=!>1X6jL-# zfEzM6JjGhk%<_B`WHN~x8yhJlE9Q?9MF|68h6cw5WipYe!-9Az83h$@?e8H}p39$r z?FYN1O-(&DevAF#j~e!^&rQd?0PZa7`s#y}V1tq2OZ#1-(b@p>#y?$7b z3TXa9%dL8Ykyb_>Mtj4fe^+4!z@32^bZz0L6cZOFNRzb9%#WV?jOes-vb<>W` zl0g_6=uyk`!2;d_kMmI;3HH=eg)lIB6@9a#ajuq93M?-f~4S|gSMy8^{Fh~|8 z`Z|yWA2DJ(J8>K-W0P*JW?t&=mZYyvg4fh|2+2J$}3)4oex7v z!88fb@t!|M_;+QR$i9p#uRJU{GU}hCDMwf|B`VXPRJrYc`#7m`0n7I)wBKyLhM{bg z{%R&%y;n056NWi!zazo-+}#=;n^FzrL`-YC3l5p&(GZC&?%86^{xvYRn|j-YrdLKj z>+dDvO?yNmAo6DLJZDd(p$UPd+61w~Bmm9ub}2wguXdRtjXt!~ZhP8Oa4*=8KGhN4 zP=SXrR+KnqL~kfK&v4`ntvg#o>D_A7Ilrrz&h|Cs0c}QUV_KTmzF=BaQDa=-KA(|w=EKwGx>Qg;gzfiOctMi3X%@O0cE zWLc##C5``wGq80@Op3cQgV@6ipP%Df8a8f`QO^h<0d4zYcIfz`dXLub_XuU4@RvEV znSWRa3c$%(V62QAVXvTgy{-$dlz;_w$?KhDoX41_ft9ttj&t_vnB4m-R+3e)^Zfp@ zsy-b5BD!o@1PZ-e3tHCUc@zgf+owE!Z^@Lxt~ zUlfZJD<<1?t$aJ5Og}rA=>ATAVwK^!E72|HdR!K@Uqw_W(MM~9;+Si>;AUbKE# zRjmbbb3oWP*wmU?X)TlTr`w}0x0__r`8QD&%5X}j4}Cl$V^I$Q=sTvk)kg_NzT)f^ zc-nR+hhl5+>OwP4T+bt<*eT0we=EH>#1gqKKn8hqWyt!_6cqy~ zqoXpH6K1^3+c(rQx|ZLt@NgN{>ptCY*ilb%KO-p54LZAQD;=@W9c+b8_V&z`ZX^5p zAb5`5!4*cJh0KdnN54n>P_1_=HuoRBBNb$ne1tzT#L~5&$OgG#o7zwXq$CTI7586l zht$l70wX7RWf3w%!#CrLpx`>%2j0q`%ND;cr^+eL<4|ffvPDNnlN=5+Fr!+2t?2UtWs1nBE$A9h9D=IW<+c3dzKrWjPHPNJ%mX{VCuAz^PfE}>Ym>kIW}k%o!DQ^L>Z@g7(&FzNmZ>vo*H(abVWgzf)v3Znhc4O~ec=g7`l8#~G%+Y8JYc1$RIEp>PyqB$%Nxtc zu1P6lF*R&2&u5?QZsH}w3>*_?3#J|0;L)c3=Ih>Yzn2UKb40SGm1J+I)2bU?4&`oV zBMV9%`O7#pIs>J>Ajrc{C$h>($@o$2%Btf9Tb1D)A9G*w)wcf%RbvEG!!(3D&1A*m zrd0bhN((d+ZYoKsj~HI<5bSyxzS%R=@S^F)Xc)=8BOp@ z0(w+yiFypy^7yn8lW>b@X$(Kn9Cl%GO)S*twUT9YF?xFuAF|@;wfw;G~!j|*Q~>gW?}yq2teu$&R`T?_^NFup<}E7gyPG+N&8Pr(ij}= zyme*m2^LpmF9}GSV}i%$ng;kR-(5Nrus^1m7f0tBUoBJbQq@V?b%U5IF`k-Qkx)L~ z15t#&`A#YfH62;n<71Kj$gs(dKrJe?Z|=*6J7LNvWMO4pKuBceJrD{tcW$89`%Huc z;gBuO)&(0RbolcfJpMc}7Bf-#&1=*cDDee>uw5>ny<#J_kmfX?gEnUscR_- zsF9Re{$PJ^!u)A$Qsfhsb3>)olW=0vwWQ|+$BrG#>ik+zLc$-M6qN5fp4s0rGr>R3 zL~w}1)G3oBSK4JM*uQA#Sv;xV;14?F*rJVDB1#x~f0-0$(ia{++r*Ct5{LR(eku`m zYg*?9goe86|Cvi!oRE&}05?p3z>3EtdG5mM{my!3P4@^TA>$d<_JT^=s1%I3ncl7r7`z}4 z5&;pzw|aQN&YgR6EJTzkrXt&3P)Vlq7`T~Y8spJWAj6rP3tGZ`+6Fh-j3AQ zdmJox1YBt ze;l0{XVo#6YOtAet~u}Qop`(k^X6vMU*$R)Uya_QV@hi$FHf(3dfOyV)&c`2 zJf$B={597yBBwf!(0USM56trNJh^rrUr8jZsf9~~6CpOIShIEFe`k;Xl9vqhu zb%nCLoTjn2ZnXVSKWZwak1Bw;7Z)!vTN6wq|I@sHOwgk(CSgyi51XA~y%mIE@^Zm2 zU>?aS&w^+6t1Bfl(LW8@ky1j}L=o=Gg^aLHRrE{31_RL%AiO1oc!JB@A=-IX4|+7Wb)19wRkWmb4s!J zm@RJPek__&@{l1$BH;Acv*|NC zI>D&4x;a*?Fnmz1%hKcdf!TR#JaLn|u#9Oia?;~o@7-+Q3qD~uM5Vwt%bNMlbUmuv z6Tgl>{%qr#yi}98img1DoVu}fwW$131T@&z%l%OxW`DTs{i%F%M&|?ivn55LhLBCf zxt0hY+y(&x2#?#3Ey9RvcO5@}>Ws)&`DfmB+IC(uyxp9u+wCqJG@NE5p*<&jwRMNy z=XY&j=lr=<#tiLt5q^Lvo~q-Pqs39weZb}Vqpl8$^z(Uf5p_?;=A5{C+A_RaWNs0n zwkt4a2y&(KG{WEvCIIB7!vy4Xb)H(?+>Q7B75fEnO4(~k6zPJCuZ$C=5UwyM>i$D{ z7BC=pQg4Sn$4BG=NOPIV$)BxwHsB;S|E)59W;I?U!)h`oTExb-3 z2wP|aHSf;sG=_!mB{O%}rWSWpFq&OSTNN!I)K-sK=kN zbT@eBt0=yR_3~IeUVP5?C!#RyzJ13%I*ILlh3R@GXNBok+kqpp+#HIavn~!>4^jP# zsI(4cyo&SU(ivvppYckCO+5ysY=$o*H$`j4G&-V;yh@c(aKTejhopgbWTfarK&Bv+ z&6wH=@@Kob`$=UpkMcr7uA{%CFekZuOW*I;J04!Yz55rLbJOZQ(IN)HI+|l>=WzcR z+FU|@{X`mq+_&Xm$wz?iL8;LMoX*&Ugj%a*eE_m18vCNunW9%)uThNXf>Gp2Pi`~g z_pm_)cKe~`b*m_=1+L5u?yQ<+Q@nXp{gL&?^wB`E!Hwkmsc26sDbZ4wv>Y?%K}mT0)BZE+0BOcnt1X~Wz{#!dYJNI zpBUekdSL}xOR6+yVT%n}O5mL!N9}(O6rzN{aFY3+dC?I$B`Tq%#i-rDD|qZ;WZ_Fv zZ)S^|ejtOx!y5z z3OSrf!^qNF|3Eqx=a>}tRY@6+)vahgp<1qCB#Jdhge77`lS=OWTDZ{Y&C6sP-yx3i zQ~u;J3pUK!Ql*!b8q7U}Z$e+|%5|!TMs@VHkaE4R{cxU%g9qmGPpbh(+s@GKwJ75H zn}uN3)vRDNN}D;!{mppFZ{54niOM<})31kdHOLdXexNXek{@k%;fZwyk%8o&f;p;| zf)XT3d~X~d-=P`eQ%aHY854`jl4dMd!3)=>;`wBF&2+Pj+gcmz9zG_3TLf74U+-2E z&a+=@$XN%{fo5K3WcX-4*a{fd$}G?t&0xT?TH33yc92~6<+kl(I}eS4hL zi!$VU)%GkmL2wq&)S2e)#a9rh*H5ZO!XL3$xnWtIK&UlHwt0isML0&Uw_ThLRh05`0$NKn~sGef}JCPnXAR@(z?rk}7idB#%^U z!5-5$xNlH`T#$LnmH?)Pz9dek^cbU&i7%7@?F?!aDN_AgCQFK|Su`f2zfNM|9oZ^4 z$U@el6W9CItC#DuYErV&7~o;v>hHm9`g+uSm*J6szGGX)#+17O(>b(&C%<@uP`&a` z>fxY+7n3IcM(&bb@{%|y!gkaW<$?jiw9ziQlBAZkV(2OYOwXsYn6W9j9@Sd<{=01Q0_cclIHBDY!mCqoKv^J| zNNxh?(8=xn4uYm}pk5;VLIWdwKVf3D&X^2#R`AFPnv-nrY#Rep5(i+xQpv%Z3<}Nt zSQ5~!e&^-^*~{RCYj1h))whljKR=lPtc^W#7fMn6ykM%Omp?+2oDi0LMBRC3=T}dx zf=Krga+a2%9zK-ow`IBc$+f`Fg*Z_^w_;)15h=Eb|<0RlHWQ{z4?<2zL zMUaN}5#Q7k&3h00wG@G31xI}<2diZg`gJSJpddO7EA|o+8-p5bA>w-BDa3TfGA+N~ zxk>vpzU@53q^_VD2p5Y%+PaqQ@tV-0Ml9yT` z`+0#uF9J62#Xe8?^ofO~6q2IGM}sR^n!jvkg!9iXU$2%UiTFZ^V+1xv}a83^aCo%6OHHk+rc@+uhnplQI_)2*Y9wIx z_xO)#aQ?!eiSzIIv!gR&-15B;$oP4$x8F_Wm#N+f*ZiB<2vStVUz#TLz{@7TU3&^) zwD^#WHG+EkF^;6MyWariO5V`sI}T%b$b3~!dQ?lktep(dds(0#dFn}x?cl{$-!2tQ z&ih^maVme3K!}dQSA>T@D8Re;288DA%s*vSWlHC5j;%Ji>97JKAVCBm7C(kwD>zU21EI}@V zPl!gvLiW`GR3dBHe2`l@Hyr8DfeQ*z*U{M*Hb4*?gbX!lpiP5Z0ZXu=9mHPsTLFcB zGhAx%i~U(WF=F-zk_ zD%Q|chRNJ|Fqg<3{}UFy#L63F1kFH<+qxnO?5w14ZFw>DF^5%u?%Nge%fZ6EQd(jS z()UJ2o^)4tXMTzpyqNhUW0Pfz_w}{UpzSyb&=}h(uWwukS7GcZWNuPmS4<$Ud%#&m zOM^6XU=-+^#~CvGMkaB5+W8dU7+FR3PGgpW-k0R z4IGTp;j}rQkDcz({G5QkW1m+5zXTE_jky_Rg=JOMpOx*q2No5Y$H)?zpZWa{mt%$b zc{Y7HVJtVF7{_`G*rqKZdLK3$gxOh@1tbjF*L7K}{U%}kFw$uXbmHG|4p(HUHr|!B z7eON-2nu@JyGjd5^TsEC)MLyUYH!)|iE(lthQft@+3awfP}5^BL6w`bVCT7jU??!~ zbBksfFiUI9&v`od{Ld(SucHt%F-7^hUNRcQa?I>_#14{!L0jBhN_xTBo_EJfDNS{y zxG+G%)CLmqw!|!y(8dQhwoe_Cy~vRR>2rtU7;^mP5d%y*;_yO`@+qLC4hbQNH5^oh z!|D|@rH9%M!q%?h)IX)OwxI_HMgzZX z?-XwF30UF9j*f;qAuVf3EN9Z@*>}eNhx$Te_o$pN*`)v64g3(h=(pQdcH6^VWR&Fj zx{V_YX4rQ`?oPI+GJB>$TZXwB7nZ&=-6$&bwl1*(+7`ZuFa}wna-=x=W{}$>cyt5Z_IW)X}`3g ztkm3L?x3LqGr);hNFFgYlH2U!H&`V!Z92$9G~SxVO|18+Z8>5p?R5p;=*zA^FHe%W zJZ&}_QYfxhe`yP~9j84f)wl9%k1d~4F^1hrNNFMLbDiW?C9rzvO5Nx#sO2y;)`FN# z7!ZsxmpP<%-wZ%8i!?AtRij6~bj-|QS}-4(JM~nY*=!(q59-W#k~pcX5cSgGJNxxy znC13jPldtxhH(dz6Hvll7@ zw;N9kk5>vn0NQf3YlVCU(c`)sI86&N&qfx}vDuu!GC#ja!b#b-WXl%~G@T~7(SWU1 zKwH9?i!@G{oh_nJ$SGk?+NrnMHtv(Lut-PsgP2DFVH2uzCT3g}B((5i1kb4O zJ?E{gR(|Oc)Gf{aX3_6LBWRGAu)eKa^ojO;xmSy_+e(WI&}d&K=Jz;=6%I9*;T7*G zuk9&Oh)qZ*i)NP`wPN9>R)%wLyw3-BOB-j9|41@=3IS^pj4v=*ZvP1(zbzWo9tc^x zPwXu_!QEVJu`hl7=0ldi6ulv?cK$TP&zMSvP-8CU#f_vMS4d%XhwHB*;| z1jzFFchKi;slQVN>Z^@1MG>%$vWT3-qHK#%2d6__j`Y=8Klb4`dNgan&&^Q9Ha0g> zlkOSC@U2~q(@Ffclqn;m)!_VB~Ci&(@v~-sqM~(-^U)W!$1)*wj za}<2BjX$EiDZHOmi$Z@%&j)_}`S6o*F*!Bz-TLHrr7s~AA{KuAFl8k$P0r4SbLk-e zEIK;31{7o!+_P%9&TnA;#sB; z4ipLlcKggPk4~U34#F^=L8}6oya|UANyZira@^tj#mnW-E^G2IN$fmzLY8!q>?La00%axmw@*bLq`GqOCJ zX}2AJP;QL>Hc&qB`}4%#app@P?fs;GY;T|VGwJx$KVtn{poN-@jU4>zd^|%nw(FF* z6)A+&GcwI>mK~CwI5>QF2D5`*2Ada--LwyD`MJKdN-hO~zK{Cq_oQIBDDjA4S7-T8 z!U(RzZ@A#*)9Uu`fFh+n1*sjr;T~@mID6Foo(y}heF*I9VIT@0K7%tBBDiB2n zaIp;iJL81GLcu4T5B_aK`fw@jjzDiO@gIU zryfhY*;;+*Z9YucsTvUbQW?jJqF$vFh{27J@v;mBIzV0g-La>HlQPWxg6x||qLA#* zti8Cjk0|gFNiYzoVf?2h^F%GsWbW1T+8aPf32rCxh)r-v;O&}G>>y&QRre~9S44Xh zo2Ei4>x);S<8X=0DgMu{bNbcLjh8J(9ERVl9*d=;*%Zf34oKT2^sDg`y3V77tj5xa zqXD16si-Q+FM_w3alW+8QpV1LX+};XLGO5)K<-}5zkKY!fKTA|>cp#T-|;e)X&_G@ zmMGU7h>D=0=7-N!a&F(SOO5tEkGW6`++FCX*AuqZd&M#wkDl ztxJRBY~c}xi`+pW$iXUqu3oMQaV=01NGg8E2`@AcHUm?pUAOOG3Rm@+Y2RNMc!-kM zRpafTGx7Eco3us$+{_4h{T(acEX^(KZK8T9|7#Fq%x21KlkN2#xo)RS<}E6DuY!jR zTEna=&%2vpJ6z|-LEPELv8!rYT9nwQVZU@nbK$*oG!;_b+B*!&g@%HJIH|;Cj$hVn z_Y~96NG-&W-VwcT$4srnbN5ZO1)80_SU+ar`F!8qK zzY6xDrXE|UY^1Ecu|$=F#wI9-q!0!Ua?$uUbfJj8zf5U6R~j|(NnCQLZ7aG_|LV?w%2ogk#lX<=(RiCqhfG<_#kG)u5ckrpq~(aPb;QHMNYOBtexC!KTBPz z^*DR0;}uLxb}OM+0?G{j2pV|;gpI&^_mabdaW(E1@McECM8yOF!146Na;yni&O>gt7+UNpI z_IvzN+CMM{&{Lgrn>db0(cKKVV`X6>T9iTUa-ih3VFE<}5tL2>N@E5>+}&|pZY$Xi zgjj3b-@>G~Plu}C`<#9XOOril@NoPTBe2c%a$2KZ|Dzqo88Ny6aZRqs%fC(fhuH`8 z!zv(pQ$2t|z1cW|hO>06TQB_^)9%jH$}hy6mDxBH1L9;#=dPmSdw;~j_h~YLPP+X5 zdt(h%noJb3wV;q0Nqi3U*ta*Aq7t%#ssahCq;VwXjisQG9zsxreWlRgj2BhOg+D^M z32yGB6RL!Od=?pH=N}pBP=XMA9tKuEcmfZ`xH7V1#@MBM&da&vrPBjlrl8f^T130!Ivnz~M0h@b>H$nF}NA&TD-_92EqR z&aLA_g^T5MDf3{<>?l@={XxN%5Ha@qP^s_FmXTa6g`Z!~Wc3%HrJ4{s1&=aqKNeL8 zD0F^ibj5Oc6KQLxK0?^_2<0G7+r+PfG&VfkeY>RNpZa#=2-HEcN!B&A-)4&!NdP5O zeEW!ud?V(?nJq?)b0bxg)t@AZyCDA!CQijT6C#Zm@|S}R0TGHTeG6AL^Nz4Pfw2>z z<#4 z%wN|U-Hit-b9tqqi9o0wjHiw{%B$3D7sR(b{vtB`y9?*`lAfxprxu+R^@Tg7)Oz~1 zUdfU*u)azU)o;%>-3hP1~R zBp~|5a4-tzDqYR%&tTP8Y^&3a&NpY>A18l-I{w;%pA>fG%bxn;G!dMWCx9N~zXP)~GV<^HoUsaQ1I+uPG*gI2{R&wEgt6nn;z_qvcm7dx`Tl&rJ_ z;g@h>(b385M^2i7F=9U+s_G8wU~0Qn3}XdMJ|q2#4~q#GmbWI5p}jA^y@A}@D@)K+ z)xh!oev#P;1utr_{Y)u`ByT0*?%d>Hq-Y)MZJxMsGy=EW=}jr{aDi@uVja$SSI^h_ zGYArX%Dr_ZBBiKbtf;sI;GF^^^OH3aqdb&psKG5El5x9E2LZ#wun5WW_^)!SZP&ao zE6CNd&ubU?>sLA616u55!nbgYZ*<(6nqiUAqF!Env$LwnN;03LCwY__p+vtO`lDQr zl0vyH@9(0vx?MeTF8CLt55;2oEaBVvt8>d^fLF`E4aO2g=WclMn@*-hc)s0XY?JcL zOCQlU1m~)fMPdd?ykC{6$dPrCOq+pA)eBhI;!_kB6nqJN-2K5==dgh1b+so$r_+He zkLety7;{P%H1UHnWo6Zq!Wb0D=wwawSwCznW3s6FE~26Z5tGY<^aGh2kVq-tEu2V6 zq6bC70+asxUP@9ha`Vy5Nr~$HxQB{{25%1&GvHN=?prI~IlZR;6vxP~R&zx(!$RZR zcC$6-K|IJ;ZF=vK*e|#!L%Swn*@{7r)tVSNR$soPuF*k%5+ZylxgpJc>)!G`>;U)33yj7r}rOH)yCvDbj9 z(R%lt?$H5r{o7AnfF-H{54^RAzM^%miR0jgn)~U_@XI2x2iBsnE-Spl9JA{_ z**KRuM2Qqp8rcuK%GyZb_Avr~?}?dyWEiOBkRycp{Cq-1zE?gU_pw{4xE-41pS)$! zaQehei}3*pUOIG>TqcARVN5q02j6=7YQ6nPBOMnMnBnbnZu7v<$bIRjxrbcV%)+dGOS-9cnQ)W_!E8VjffJ& z!XDl%hCmUi$$_xmEn#Nf*u(s)DzK!*NG){3-M`?Qx&?%y&D7)xI)B#__01;Q-9npf z-l!hr0~ysVaBep-Tplv{C4%Zozc8$4RbdHN$z-q)W_9}XC6 zK^dW01u1#+4vqSFhrhn^OT7`>h9^8#qeb65M+)|A zUXY?NvW&FVGWM}ftp}xe5p)sd3E)b&@_RTFgBx-4wbMp z&LJi!!k&p4xe}f6*EQLbl9A~>w`oKAfs|7N115t7eq@EOl>YOjqiIuD=WY&`$dx7qW zu18uHRYP(5+^V3-ugZ@a-a_?faVPL!2{Z!eQ;9jweuu(|KR)a!*7haA?Ic6;4#Jgy zPTIF6nB#;{Ldvn(YtgRvrVOj;H`?XDX|EA6pGo_X%qNzPFKgdB{W!502bKfm*x^(Y zo8J2LI}z-moulQ-7wPSk@IMSOvW=skt#qS7Qdhb7`+Hb|4 z_h@J~$jT~UgxY-dtbYGQqOSTjLEMD!J} zBak%$D>HYhODBfb))(Mt8m)$x(7bro8&Qj!x)CBUQkE}mr6LCS)9T4c=zg+MhOuLV zNAR2>@yua8Ggi!4pN`^L7JTj-cIS=?o>Ff>1A{z({ru@>JE;Lw(;4 zaE%A-D{Wgc%C*~c!hpQ8>Y-b7-1Bc$E}P3E(x`}+hGN&#G_0|bxo%o6lI&DZ)eoKT zOL%Ui^^WRrXV2G0TwWrbR?5v@hH=C#@Q{6aQ60MTxs#!H;ESH+my@2aRbA@YzjoAA zR5QW?s@Puke6KGjd+*h9u40YEh~5fb{hh!~zBQTIrr-EuGkJP}XDlStdhY+_w|a`S z*y#G9?^G1-WJeO0o)UE+sO7(8?6*6sp}uiX{6JyRMLbOhaaOM?vArIe`vsjI9xVnP z@yL;X8S<$*F9CIxOxI*uzs-CX@UrQdy1gQ}5tDTyXSJ+MLxG?pvE=E+Ry(ckn=-$P zISz0m1B>^wL-qSwqZ!I6@C3ioz@JiKPrJ-4Yw?XO%w$MeKDtn%?!CN6KCKm{&%rTz z<+p$h>6FVBSX^RGz!H|P|HAjq6x_O#hO0RiuptkYg2!_oKY4P`Z6S-)i>u*5)>x^&{-D=AH;64XF{rF?}dq;r0c5}JQA_4eo zv26`ma*m^*PM19H^LI~9HHFNJjo?J4Z;S>zwN5$<;PLZ;Kz89FIh>f%Z<>Cmye>4+ zbv_55`{gU%#QY|b>F8Ohk?!Ab?f13=ye9KwFpcib%GVAC?nc8=Qu_}>4A<9;?3WmA z2Ja{NaWn5z3JZRHy~My&BsaSKa!;MXDT)YAPjWBxvk5&SHauKI=eVX<9n2ZQW`liT*_WRwC!$#h-J}k)3kZFk zJ3nffodBn~KAjk>B?M#}k2wq}{vs=99mN_{53$1yL5c@iw@4L;a zVk!Zb7eoyRv+FpLiv~3gHH6xQ7&`YMmb;%Zt(6f+^P`h((VlmP#uX1> zGSW$G)~6+ogCq29${IAG0O7Uauc#@ zcwE^z!LX{egpVFXpxqNdpMOB z>?fWp4Le6z!IeN^|MN78>|Prg8H+Q zJEMQ?a)`_6dLZ^U-_M}*F=dJL@iBq8k?7H|p{HnV<4l+8{|^K~`@V`pEsU=iGc`<_ zwlERWcI+&{J72EDtl~U}I#6-QgrzJEVoPz`4==(Iht3qADha}vDID5@zOg5Z&-8Eo zp3z-B)6excI=+s}_YC?u@A9IrERpx^JAl{Teg`kS^%08l(uGcX0+&~|z&N1$C?+{>B(;g@{BL#1XkFT+#U&mz^ou`b8tz}QU#^AeR z^f_ggDopqi%@sHqKSUfSjvqG~b7j(a*)JZzn9KwW&&d#>o*w@DoV@v9kRsq#0J5ZN zVTr=PY52Kg=T0pCYOrHkWB>p_07*naR2$~bC{{KZ_IPlB-neHUZoTSaeCM3A)G~(C z*kvC*Xz#N!voLY|IHaeh;nw^AfoUTP(9)ro`nWY5YWZo^JF8G#QzHX|2myR(KSFq! zI;$Q!P$oN|>eiPdqB!4{1G~IU>mWsFmQMXM*(yGB#x&{RdZy#u!q~=0l_7*HsZj6a`DY1b~q&5$ANUy-e9>*05W1D zl~7}!C9$CA>|F+=`liC5ppn4DCZ9LCs8IDEmy`i8WL9ase13P5V6FJ-fu zh|F$f`ynctR0FdwG8C0jEtV8hBtBZ8w>&%k%KpUYNad4%ucTjb7M)TTDSIt^?jbi*B zDfMCN6rK67OqHUMnloz}rcRlpW|_h0N2RmCJ={=>#iMObo_9R{`_>1n6l+i+_!3kd z2n3BX)2@?t8El5os+3kyRe8-n{XxLeT5>sTlJNEK-}V3=mt;;o>4eU<;7h*OpAtKN z!}95~?52xH`7xtM;jo!A@VI!q{$|^56pA*8ZC&%zuD{o{||?m5-GYVu2+lMfCGAX4FVUE0?^S z1D&AltF95|Chd{*N(6sUOI+*Z$N;t6>m&iu_sD)Z#$#A+rkW+II*@1nD3>!n1O3gu z%l4ijv;LXmi?C&Ti5%$>r;b%2U92T;5~!ESL1Uu`Km*Otd>^z3u)4WqG_;~1}-yW=-xj2svuWpE1VU=twNonvKJfjj>$qI41AM<{g- zhgTsWV_`NeGFu_WU0K>#j$Et(H^i~GLyg!IV|PK0=Cs4XT`ccESN1%_+eOk8)@UFc zZrjDyL-AdF9bAvtp17<{*AL3x&(_AeN=Z#A+g;Zab2m5CM=HJ}WCwd&*IImP^b7LxaNSjx8bR2yhNPZrE+e22JuWaqcwV32#K7o#=i6JfOFF1NZpkW!x`IDzp z9-bvfzg#6k5FL_o8H_uwf}P*%=FIw|W|=}84_4*@%ilihH2m!kzry#=J00ug%sbLV ze`H1w8XB$L3j$Dg<`}Y#hqGS0dY7N7v)DZf(5)JULV8T?8*ts*l>6`d64!ky1AXIM z3QK?gyeqtP&hpo2zc;li8m!^^L}t?Yp!Wyheu zoWk}83CF%^{i3ZK?N!_4uc&YsuKNDPxaX%oL~Ua;wwE3dEfM|0d0}|$Pl3%henY@x zBWO=b6znqrPiRmg4+SCV=$0VI4#U47WvsSx5_|Pu=b+^owZRfgq06%Kf0>2ZTRR0y z89gZnmyHw|p;`}8Zt?gemB2c196CIq-^3Z*2$0XvLFTcO3-O1)KZy;SHseP>xC|NT z;xmfI#1gtD!0gq*+lF*Gd9E1mPd|B{awGEkTkqnb|Gb0(ambpS6t8Tc2FKW@s>l3e zhk%y^Aj=4v4ij~wvdbf+Zdv}QD4u<0sdlaI%Z?nRK?=y(mvsb*xUH92TF3@rt`TB< zOV8J#CO>dHEe)XA2fCl36go9RMo+(Ak#g{^c$SlT&}~1s2eZ&YIP^=(HbI-95xE(7 z<=u}_Ar54(77fZ+*=kPzJA5BX;{eO5XcBEuUT!wNcgc4!ed;9q{a;VxwWaHDL{Yx# z+w_MON_2&OOY=Pi0~UbTaOfDVQqEoZXvt^NK%~%`XZpVUUd@5U=1wbi;9J zD&OE1C@B1-CB|Xfjve^u<0Wc2+ZJXDFZ&lnNt*vCyU2-qXQHo@ItMR}Q6x?(N1+<6~v z`O)`r_+c|`!bF=aA%#v4(1HzV;utXNkQo>@tNSeC6o&{36n+@!P&O|+OHMX88&f7v#6SQ2KP(U)JGLN4=Jz~_KDZ~wAp&>6 zi#cEch!k*qj+B}EoTR;Oa#EZso&XmDZ2Rv8F9x7+Ldw|0^s`))g5n!KS zJK+-i)DhXZ^DqC#7t2>EUpr$)6`@tM>6ZU2%c?I_+6+CMbIUx#PtJ`nFtaldS z505-82SG<7BP9t)^NdIHP-sd z5AE+={npPY{3wl@btH*86HBtN$wZ9h5Xk6fX8Q@1 zpv$VyeVinRW&ix9>oIfsL3rfJ=g`*DCbmH-G9Jh&^V~TplQJ_a2$kBEm8;MxYH*aa zMRo#A$5J4P)tcm#JOjmA!imA<0&zae*Kf> zm^>^CVZwu`i8mp15b&x1q#H@-kng^voT)ela(ViA;W)eUpO*l z&Dp}&xm;bP9NhR@bP^BlI$XMP18N$Y)amRN%b{taw}VyZDpi3!J3UF+x^-(BcG(W* zIw1C>Pt}18aM7|=UOls;-`pAmh(LRLn;bIQj)^04v17+>oOb1{xa&GGkUseYq^6{} z@EH^uxKZ1z&WIEvXr3}Zy0{3l<}bi~k35AO*(H`B2WfMX=6zli8u)}-DZMHH(X*x~ z!{yKYARP(-iDggcp_X0c?V^cyGDOmvmgheh&f z(iZ+a+|@NL9CXv=Bb}xg@Zs#a_768v%4Hw2x=*4Fcpz zY#U9oR2nNn!HkhP_~T>GV8zNcxZ=_aF=6}|#jmtB$l_wbkDF@mwk158K*Yz#;k<93 zg`$xo@TW(fz&G1=W0b5Aw1|LYv_ZjNpI#7v*d(l(VOmC-YV;P_)^o>u)qGTZ>$$6c zo#W`=&T;L#jPpI?_CJ3%+&b|^qZ_b&O1cy@cx@L|Zs0;$vej>bdB z(G+Mb^>bIHcaEcfJIB>|@VWCnpXog9?{!+v`8&oX?lsjl{LpQn?Nu=CM5XYECasqY6Ljf1^J0~Ua26Ixx@ zh>4>Lan~#Hn)+$CZ~|Bu*S-lySW33PDs(Pl}@I zK-yGAWi{@5@L_zjd8hI-)kcS)_DzfTjBNkRlMGCpA&CGNsJX2?BQ;4an_hAIgShhB ztMJ|P&%(rUW7RWGYAhi^LlpedwJtB6=MFE(mt#RL$E<^=#3D&k7 zE_gRIwIC-eOYl!ojKm#%!Ty~1E5lm35^~j)LV#y*D}JTeBc&!M%ZxTn(ZCb6La{HX zJ|KNr^=at`s_lpFoF>?t{E#W1((nki`Z$TXGe#+YLGLd43ctMeQk-!7u}Bo{X(%40 z!Ahhc#I61uKaM@>a1@Fc=?DJ$DBk#NHKvJwBrcy@D+-}IJ`Tpl>IDJF8lgB~>Bdba z8mfWINgR0db91nM<5uMjT3RlfxC2Q%DVWY5{Sho|#VMW3%&e!}b)jrjR9Rg{4El#! zd)3<7(yQ+yG8%uo+UE{HcWpaD7$6Nl=@W#rJqiwvEhgyUR|V8<%(8dVzbOBco=~JH zG;kkErDPwXYmd~V1ldsq-1NtP;M-p;!=)FWFWMmP6q5s2xpX8vHdI<1dkVL}lUFy2 zi;fY)3uHy)rIU0D^PWIHDty)Oa>(iU+rmKq&_(YfheK4|P6)JPob^low1CuMKv zMrGBkgN)!vlB-<0=UK31cQJ#5#WKU3^QcA{HM#;VC9A_xa6GEaPqvl$jq=# zm#4543ge0H9afjImirPD6L8_VXUcZ@k@)>z9>t#h6)4C~lihK$B-Yi-ya}mEtB(ly zL;&hz8GZEZ8?E-}c+u+2~hi)LwngU16?P;R$*&qz$@^eo^PEMw19W5h66-R$=|G@z6$;?zU zkSiJ12Ooci&D(b&Ry^A}Yl1{hb&z=6Q!s!61?p2T;~FS3OuoU10Jn{2rOEQWXs@ro z?=f7uaxK1h(YY8mcC_$2(VB>T4MQjhLKW`2EVjs-Q!J|(+jh7aEii3*s z#4bo2Cn}pKOUtFT-}-xV^$!9@0CK>&_hBnamR?qt9KcTxnU2#>ohQ3wWG9#<_B*8H zjUOCjxztlFCv&e6_wqZ%_fSlXoX*{5S5CWIZ{!&qX}BJ7$yB)PxUI4lr=B=hIaX^B zbu(?Ye8E5{^r&>DI5aljxqAo|N=o=_34-rKQ_K~}55PmU3Q`dCLC5Tke$hRjMD8=Gu?U!;w2A=KB;H56?~q`Gp%;iM%`aP^g!V#LVdxa%)Zpimqqa)L|SLVMoth>H2; zO9cF5#p_E70+4o_5?0u_MSUqPmxCi`FZ#-7`j5|boX%%DzJ6!>R=-#A?cQ}719J4| zXgrjIO=7e#68wp!2qo%|INEU2SnA)bo6ct*;IGoB5vJ`ssd&9UV`>zxxA4!`3Z$LmjCS2PR3*R z-i``6)VW?JanaF2k2f+gY4jNZBLMXoG8!E-u*v$V*{W>p^%|-GL-rrafg;XW^!ud> zREE~UsMhU}#oiEZ5bzZO&K!>ZbHYVKGJaS#7Js@NSN-%ic=p9tu>ZgTb;3N$5lVs( zg&;2F(KcwN_zHU9SGOQKCJyD|$IaSY;TLA+tLh8Mbd3Pi=W3H90@(4?0PNgl`<$3( zAE<0>Bd?#b^sHckjHM+J-vdI%rGnWB703!Vc@IGZDBw^VlqU`av*h@jYwmgkKfmh_ z_~Ogu3XP~p9uO^4QqGD(A=r2cm&9fsJPr5X^)pPJI2yb6SE@4(Eh<8UmQcXEmogdw zs26~`zqT5u?}y#)Ih!X20fT@+U??I$I~f{E&pD_FyLRoxw|;yl{{7$QP+n0XXN|=w zpFwoq8SH`MhPAv-yPzhF9bJsuZoUqOA3Pbm#hbi4HygoDSE!cH2tc7)bEez|0fT@+ zV2B~WWicK#GfTFT-~agYxb@C^u<-p4m1D(NIeUv5qhMYZ)5>IBZ4`oXvNCbY^;hGl zSyQoTSDA9Y8!8GdO-HC+jQ|v?HD}6gL%^T2K7ZyF%oS|(pTS%^gS~8R;&GkJViSkw zVC&93xcIhv@w?wYsEni|#Ab+pTowz)-r8ZejBaUyxTClgxpMTv!z2 z+?@>eiVOn!JQ9G0HoJ4(ja)zVagDmlRGtmFZP|7WveQ1thtLyt{&gB&yUNRoz(|86 z^hAiQ#W6}x(FO?;hK%4tfPJx9c2y-M#Nvoi1^Dob6}aM--{93Z=8L1+T5DNMiXCj) zAXmJaJglj?8F@L`xc=(z<1`V1O2sB9n!DYC5Bfn>q8!OkGoJ z`Gs*-W%|5TFvA`)9Tc$JK&1NDtqPd$*dTkr!s~UR3L|a8)*xJA&3EB^p~@GEa)FI1 z)y{~Z(CfAmtoNZvB};C?U_iiXuWfRi20ck7u0DGpcM{V?vi|1Vg%daZJa5h(}z zs%0@PO4yotpb)UJkZ#$Bi!)amM(5>Z;m22Bfr(>BqPDKyZ|6;RE6@;m(4zpv%Hm&- ze30)Y7NuA<8+TX3AqJ@B@Q#JtRL>a32v%P>4pXZ+*;ly4fnV)!|?gCHTdrJx8sTbJco+PYPDsa$0w@YV%^$u zu;9aEk1U6YxuOZW@tP~p)NXB?5041iLI(?Y{Vqq30+32oGj(-!?FBbNDy~HuAk}!T z1crW>&d(pRnUSCKud-S?jvRcc2R&UB!oXQal&B|_z^7mM=*qIt$zMWTESiyrD}Zda>1Y=6$qHiUxr`AYkKt)CQ%?5%a~l z8Ti9LpT#eK_eXrbbeWn|(Fq~FO$T!zuWy!dbi%kX`0Z`i;^R$w5J`uu!CdGNE?e&c zkZLe;CKU&r)K~{*NV@qtDLxh(ww7S`p1oqBA8i?+$qS`=452G$2+`6>O3U_P$yaNY zOLlrCwGT?@scuG+xHV^E^=Xh!%`(Nr#$Z@M4oWI&JM+}tm>4IEaI|$t2^wTaL8k#= z2;tt@P6SMz^|f;KI+WKnb<(OaCKI6WnVp*0=?&H|j6twuh!>o#4&az8>nJNF0pD!i zA!_SdRR^6kg-ktIq_Nx_D0c1IgEv=iMxyj>HFH)7SRxV!k@(mswOhq{VtZ*W27v%0 zz@EwN@@=vue)5PMY}mLJm)&$1{v})H_a7)1Upv9NQ;a-Db9E@M(TB}C1P}h?a(uOU z52B)@0*vh7%-*xJz@%QHqN0Ix5rCWo0xdLD$qDJtzWAybab1bzq(mkB_(DTM2JNeK zDLm{GFQfl_^1m{p7k7t_`xELB*hQhe(dL*o`@lLGG@f|+IUJIkh87Vxodf-?#0o=BNlB49>SEf?@yu_7fZq^sYKCmf z)CMId#v>&u0l)p*)A;0zuW;kl-&5KZ@<$3vzUW8&qFIaxWi(NChAb1w2^Sj9J^Kvt z6kUbKo_z!3W!D>NavTBrm7DpQ+mdi=+c7XyG0_ojn7Y8R<$J;g1f)_3Qk+JdB=}ua+8ZP;{28xup%$3Ulzp zGcTibUpdB)9iWtg(8+J+==xb_fkeHa*spaT*diEbm+B|5qbl*O_GJhfV z?W+=jA{wnyCtaOgw2;!CwjQX)+@ohBD}(hzVWcaDeUje3=Q8gh)25)E>S(7tiEy)& zr6@O3F{-NSTFf|TlHio!)nZUUtQGtzq{NB9<*mNO@>9vq?ZxF))p%>c2UxRVvv}W4 z>vrlrwVt*0^_VtkJZP&x0o@r7uM%w%GYIq(0dj^`n*cv!R6cf!R`V*^DR#pZ-@yrU zj*-JjlhGo6g~(*AF6|#);*uQ6+B#jpddrfuOW9*%qjAXv=PC{V+OIdt#5+#L9rjrM znJ`cUJQ9G~WN^yP&c>`mrpXDtub?^?SE=FlI zrc`pvK+9vkyN%^F$Fy za+Nm1*UX>k)4r-`L?LF3FTy5SYD|cycEM?8F2Z1(ctm~n^>=0Y=ykzJz?c8&4P&4y z$n0tqcWGl3Cz`QFse2}`>wln?%k*jImmr4%4$DkY!Vgv0R{DI)JL^|@V+$ru7%Kum zj0nSACbOOovx)z$ z=sDfKy=5IxGfC{5r99s2Tf|uEKoX3R{xvc?O$k42ch2pbwr^kUuE4F=9j)41OKVHF zIDMna#5M@{9)U*DoWu*ykQH6r^!tC{?9Z3s`l~L-=;9)=xoJ|G%r0I+o?`V0-fiIr>f``zXUIm~oS~_s_5r^T4=iidSDh#b% zFVr1R&8EBZMDfcoAt_$echZP#V-qXOQJ8T0*n^)s4bFOj#%Syx%hs{#LKo5y}?G!7}&5vNWYk110o4!mIvtk~vjgMh~fun%(*!+vw<=;8Qc z#n-s)*T2V)ueuBePd`Z6t~SZ?b{9|aIBB0nRIW9`ETd>n`fmEcg}Ct#kK(Y=1+w8t zG*>1J7y*w3Aga2HMvlN)Cme}?zq|lbiVM_$)H(mPG3Itr`*1lU^g{Q+%$^1s&#l?I z8{a?YWE2+_+Lof&t^5-xcpQG{OdLIJ9Cqw15j(2{aK1gO^{rN-ckJr-UA$dG*$EkV`D2nyLMw{n3nR2jR@wQ`8bsW@@rp z`f_dsWV9m2xlAfF9y-^BCHTh1NF6NOibwDLmhxOnJaip|23@9R??$!QB&bb8h<{pc4M02%l^?I3P$poy;SquEk-q4i@3le&F@7 z@FVcj28)4{02=sTdD#VMmpZ8v1{y2NcIves+k;gaU;f*@E)wJ8@a~cg_|=Wy!Re>Y zv(2Wv$Ma>WOxgwkZy-Q^%H>^ZG>@M&690L1A#V7^@33Oc8rcObn|&nsbVAifBGmXR z{68fn8CPC@A)>|9p`$~T45s__5y|e+JraQUnQAmr2_p*&@w;E#go$H{v3b`%5sIP$ zJPWjqPTgti=aa7Vs1A*h0dDK=eL{CWZoBasWXr*IEoyn7pUL<0GcGaG7=7aSarnPK z-ibz8j%%oIl=2b(ekSUz&$NW#t@Lc^(B33UguI0xZ@_PVd=bt&=PX3X?+zNl_%Tow z&0qa);m{q`E6-$e@-!}BSFN*AC-)q%@dOMI-T>+i%9VPd)~VzTP25WG%8p5h=6bXr+~) z;K0Gc8-EtCJu!IZW9`i8dw!1JaYhg+jh`ByXc=gmWli|wZ%X8Vy4kq@uAgIAex9PA z%OQaXq?4MT>9WJ<@Av%@DQQVqwXHMM&(w>+3zs z%(7GHp=OagzRD`9v3~afJn_&^@x4oB<|FU8#LBb-79?vf{Y(`h=+Ou6!py1Tv1rXU zwTqB-z&|<&WLxSh2HoRZnD%CmIZ*8F^Q8xL3Z1M2UTf8&q5Gke!jD4^sbI+4y(z)r~aC z{bxBSZE*biD^((&Y8=f$fP zpD9z2tB^+Z#roZ-m1UN(!}IW)pZoylfBST#%eH=Y6b{r3RtoCYZ~y4B(o1r-Yy&^y zutQLooq=^5wqWtbT__cSAu&qu7wi@OQZoIG<5kn&{XL24st1~fy9<0{VL^egD}DR> z?&(|c?DMzot=U*3cF7@tD>id>6M>VCnTcQC_(L3Z_@Syy!LsGr>I1SQnU;oG(g(&5 z%aq-DJF#fhHf*n`LQ*7;k>H>70b5Jy!iSjH)2}gde#YMt+*d$n#i+C$>OiOI?yB?B zS1X=K(r6U>h*HsNE!$LzCb2O%^5Dt1`=+aK%DlO1X51?Mg2G)e2F}{IY}<}^7Jr6x z*@zMBUVnoT!=YLxwiUMPg7joO{Okg>HdJEN$U<>H*LYrk`jC^{zG={-G>r+rq@v=st7~f({THQapfECHshZXpxi`eqiE!AoX+%Z zAG?Wf|JeG(8jd35B+CL>-YUq;7Byor3JZs$Sd6~u>x4UEXxPoXf+Y}JdhF$O`_7%% zvUNK)Z`p>8G7#_By;uCXh}UQ-E=%0k#R{vw1b6wEZUagQA$70= zJ9n3gom_>~f$QUAJfVa6@W=`F0SYJXg0xJu$dXil@X_Beb!tBg^Rh6aAWwWr6{A3| zV~UHBE{-Q@bWVDMsSnuW!Lrdg=E4s?#7}%1Q9f?^ z(G{3Bb&B$V77P9t)i1q!Kl9qV(myw&4gTzebR0Qt z;u!hP*S^?-PZzRMuP{FfbO~_qmxS33bx#z0?7O|XnzpasQzlPnG8RVc1APk$qzJy6 zc%m~;ai(t(XGfael!JbrxZL#V=B@J6-~3hb;dh7(*y>NZ-D2B4gwH#E{4Mx1De`E> zd~;8`roB_<@u)mJQ(n}7BPIUfhWnr-o2Yn9#!Ku-M6Vu@Y)JY()w*Bui_#t3Lwcys~QcmB|XQB{9O-$hz)^G2^rK3)$!c=vhuaQ zRXpn-1Xh*plqAORH@3i9=z5LP;uww}?I|EuxP_ zxAo1!@!AS@`YmcyT;R)rokxDS#-BcK&O$>ICwX}}$Q#DVj?vjlhk01}THn?^SNZdO zGML)BI#u7K&!W+FN>(a8{B0x9Dj(Y_+YRf0ZMIJz)C2xmbX(sY(QivuwL>1KakS*P zay+S@@h?+i(5bWU(JOghOh9NP&+<6uWxZvkZ>6A9X-$#8SqBupER7H8v1sIT6AS{O ziGbBhG<7_3f`?(x8P{KK{jYoFkEc$__1EB-A=t)|FZOnyAfKE%d6Il_{tQ{Va)ab$ zrL~!$hGrf4o+|GIpbqjUqhw&?Xw}nn@InWV$e@)52m%h%qdkS4Y4cTzAdEwI;hc5j z(r1Bl?U>{}jL?BD$BSy}VXivftC9 zD(?gm@~tJb>VS7tALu3=T-PTKbUzV`we9u+z$>0jO<4oZ2x{!62B>Wgc6!=AF~1A~ zp@=|R4#%#=$pt6Gb4L!4AKv{pIG=4)$5O;%7bK63>3EFr6GkW)M;}hO(s1fM9kV|1 z&c~9Uo$jX$LpxbloV8mN`=U{>@wG5u&S*KZw+oDgzy<*U$?;3y3)8<^2<$!*OhMzX z1x*FA3rtxn_8n&~qJ-rwpY#E?Yg8zOd}8df3jkfnN9viWhp=pFhG;({z_F(mD-d&W z4(8AP@;J^8YrtX0H^JVlcQrwr9C6|~dE$7vZ0Z=;9v+cc_)F_)7a@Lz-|xu>=5`Lh zlWj8a1OW;->`+RuawUV}o-YP_Ps$Q@ag-dZwLsC>Y@k$a?Lg3z1J@ITc9G4EFh@WM z)EGD^EZaX}4vRru6IJCYHAx(#6nCjbG59h{rF+XdXBOk@c_)_1#j=0>*=Ag*c&*o6SfT4DX@ zgK=HdQMYn7FNYEWY)9uXVA4*Fngbs8%TXc+zLfBsu)Y8&7o{p)a90mJB?+8yRNdWeE8HF~)@*>e8W^W@G4o|egj3yc8d zMI$i+kQZQuQ&e_z4y;^frXj8#f!iXWgd_E=?S1phcL?zK6H5cs#?;>-nHmj{@30QV zlpxm}*nUbIym`L!G-@!&<^oFw2tXKH>R-b($PYL(Nk$ z?0|}L+d2#G^kowPi@+ur1l%G39T1PXu=>1nJV;7L0CEP!hravxNSgP} ztj`57=9WT$6PYMzCK};4hJI}#aLvGe%MaUEa#DiCLrCJp#{_Q>fHPhQk+a7SkYC>W zpse4xNgamVll_r(NE^V+j0_n!vRJAiBd3&OO$Dg=QVbz&Ycn;X~w9rP&SU4bnyc z@&>Iij7vKpGEP_BnfJ7PP#6N!LnNzNIt*FWz~w^Dq=0H^>0H%O4;9m76YvN)S_h?O zTR3;FSyEDxag+)Sps_VxPiSlr%w;cb$pP z`OL^a{;DO-*vQ=6%_ezYKSM1TImllX5!~r^&_UT`;$mYZ70Y1tuoH4Q+X-&!d#3c{ zm5?U0U`+n}E3ZpcRW){R>I0(P^<7^Tj69f5E)(P9Wkg}F@>1=NXZtG8P)WxKK%r8T zexXATuC6pd%uX$jxqIlB@`P8SO2DKI5CtHY(v7tX%t$`r)iQiXEeGc)9JNx|H&05d z>!C^LKw9!$wjr5P6NHcq98mi2W$R?yj-3(}tp%W-AeIx=#H2*&ho#Ia_$D&d=o#up z0P+l{u#Lr`Rk1`jW2~8J__t~Ff$?e|3{Vt?95vCBA!SeH2$6|fPfN!uw2ux zUxA#g#10}eq3HsOc*TJmy)X~XN|DE&eMw4AoRaA1Xtj}~Yl(XP7C8W&F{Y*@;naGg z$@3IBBLI1dRJg@+OawW=x?AMppv(c<1pgtRJVCcCfwdQOlo7N`?U4VlW92qDityHq|}{)ys`uD6i*xJcyFsK=;ix-)3cCw!b+ z$RuY*uaK!qKM=w`%@NkER$O;FKjdtZBm!V$Y8yNRAa{aj6YLK3VW{c_gPTFXD+H{e zGCn>|?PQ`YiP7qM1!23$JTGjTU+f`;>o-9O}P12=$byU5v#}u)djT z>eP7Cmbzc#+_Vr6ZOop%`+KBsk`F{d)0C5)g`KbxL zMKf{7wf%XEZG)>W$)X2KZ|k1p+DgNG zhdKf_ehLAb?I$@oNs9UxNJTXap)qiq&^-ibI*^@`AX~QYl2fP4lyK6zRNcL_-M!6P z&B#c{cK8_8x821v_gfL@VoPt&jLA&dk@(@Ynz?fJ3i}1_ENJ<*u6f`7%(}+&mUnrL zvXX_{x~wX_Wthuvrmxr?>T42w#D0R3$Ld zgQ+v6tP{X!f)+7UySMiN8ZdX*{;W31Z|j=v(e~k*J_jVu(p_`{sJnIBWIMLnvii2t zu+p`@dEd&&=n|Yu@ay z=_&9Iy?%%6*u6*fO~oesND%9i_D0HWMr#%OF~{IfI;y)QSi(*uw(3#^N9M{7RaaLJ zX3CihH)`iYK%*SNW;30n%;kT@XNU(reEJAB^Qgf&k`IxWeyTp+e} z&2I*KVvc9XLSp0MluwMaXPecj$!C#2qjErL8`Rylw?gz6>sJTs+wl{SXl(e~0ixqk zD1TY)-A*GRVjH15dVXk(*wEN0^*DjMO`ErxksV4qbk=hw&UTN!q-@ZUo|*#K-*Xfo z8bTKP8xOl|LO+Ozpb@Kjy|su|9#;DHbZqx1u*IPd=sv8|qd=zu61AeXCd`D|-)((v zu7U-DNYwqvVa1Z3#52L{4J2472&{5ie4?(QUbb!9DU&9Q(~c3_3_gA+Gy5qSfC(NU z&_w~L6=3=~U}cw@{{H9_>X9CZ?6Eq&*6~?5J=kPtWk@n+LYWzvGIUUpq@|^*vNyubxnlo{ z4!xwH&V%V9T^*MyaGsJ<;_R;e6n+?_)fgBiJk!oxEGEZ3Kq2HP4no{};DA(DRY@uO z!KqVa5GK0TNG#E!85*8rB_|~bYHfO&wFe6R!*bK)o%ff?uHAc;(60I$BJuPCZ*pc` zM#G3QHz!k)U`UyplOqF*`opL)QDxZ#0qY!@tGxhDzuG)9KY|JY`mgB+XNcM9$*LW> znsf#gK><)$PT&jS6KM1H-Ixs3OJYKTa8hmOA-*i8X2JN|wv+lYi;zpJO9BuXzokY$ zbmXWke(POX{>eJoxc9Ie-d!pZBa)Y&qBS?VR=gf0Iry>@7G$PKd<>btm5{CjmVxTN zBgan4%1yg))B9TIz^;SEOV(}NChPVeld_%Y16eRC%}Rs-=>;qV z(`x4G8p0<8sQ%gMF1OM$@7S1VS&e*OUAjtQ=z3pe=Saj#%abA?zd9U-S$4QmBtBZk z4a<`uMfq~!%xNH)t7@uhgo_U3MKxg8BM?uk5P_|jr03i?))<-x5wr%d3si>%s z7hiir9(?j;0M#U!aDXUT=vjw-hH$z0 zoHW3j?14ii@{{`>mW4|{k(;motjw4?N%bU6%^GyPW%2kUJ_zinu?#wh<~7b+=xEdbHhrwS_m$De*q ze(~UQGHXPk3@*r4ou!GkGxfgmLExD%!vUZHOH^G8)_~ofnR8}6q;HNId~W>iujNNy zzEZyRm0RFyoGN14TC{(sYh?)RxAeg4npVm?90*lSg_-bk;>1b0=YJoRXW#ooCJv+k zu5I5MphXdWdN6RAvf-@N2UKlzJ9oD4IJnlIHnb30*5h*ZH~(9H_meNnE!SNQAt(|8 zp8r(&-7dUIz-t89hGL;bU3>P)u%SczhX$&P zee&?Jqf!G$jA1f7bsJATefJ<44uq6u>tNXV*S|d?zxeZ$GI#tSrR|{rq6SJ!p70>O z&ZGD=F*n(vj3(Q~#3MTI`Yl4q7m9lWd6GW)0Bn#8qb^ zYJ46Zf}n1TtaIA?l%75<|L>Q-lUJ9llIg<+C~Z#@Y=C;A#dhAbd{YY2%1;BBX_T_;C2guA}g|Jy^G+I6%R28R(6fV!4sqpKV z!HFn0UYtBtDzU~lo5wZhu>iyrS($IX^Pb%M@M|)E`bep*Yw&BOdQ62MBeL<Xp^Yd2yl5n54K(t zMhudZXR6eM*Xu;g3vCbpiGq~0ZjtYBpyU>g@5!$<32>hjHwZ!!&4sr+M0yW_XK ze@cNzH7KW7>#?>vb4Z>%^UCWoWBO#t%g(_z{ZLYC<+_+#=7t zxl~3J<|wAdNkL27bO53HKo@s^bp=c3{Jlv-m%go_OIlqqQ1*?HIZ5)yJMYWbF(cIh zniP)w!PLE>!Nl!80<1&UdlC$F^RNT(WOcn1WciWNcE89$lR^}*Ei13Uc+srfWDsY+ z^cm!H4%(f$|8 z~!!>B6QOd_qBYifs6b~~b zH0ELmNK2Qml=G)gQQlL-c?QhES5 zvn|}Vr)^Bzwr$(Cr)^K$wr!i!wvB1q*4=Z?z306D!S_7hkGm?fvMMWLS7yYDwc-#; zuz(GJL-gi$TBU9}e}9kV7nYKeGETd4-Eoq>9GVr657N^oZ`gIHF)=ULZv4Ic4ffs4 zO2_7m!^3DLj?yaqqxD7uWZ$69@?LGt^`=ZF_%il>TG+~$s{bGqfhsB2`x=Ksy<7p2 z@I<)By4n z;c6DDbopLD^W;70a!uIxq`F!wFdU2fQ#|jobeVh6cJm{Kg=>wIzfrPT5Zc(m(xZ3? zdv9r<%0-_x!a9-H=~=y~!Dy%TStatGof=YXpXPzX8_)YQl6_R9?W5%9c#MeI=>{S= zAq-WIY-W$$3H0MdyIDGiW0d#4bgR@@x|It|t7z$)D;$cF7K!SO>r_K9_`X}|k>LGo zP$zxZ3)^qVAw(F6s&gO6SPArCqxFGR&<&tm*bn2audQ0?+c-fan1^WzD^>3`NR@YS zy^kl+4w~FILS7htR15mh{lQ%CbJzy6Z$(z1GcDIe#=oM01WEl8Eky^xoE!_=NP!<2 zmuc9SG`MB$y9MbDPT!KnvQSP9p4BfAzMGDM)ODQcd$sG~$pEEoBGSZ<+JOG?8!YmC~qI!ea5_8qS@u|g1KvV(LSu=LH z_Ngy)iJWWRakcVO(n2qhsN=j%C0OC{?8;-5@UO)z{9{Ht2R&57mdY}V1a_8*6DQ^- zd6`7(v%2YXP=u_UR6_sfT@g$oP;=#iAhM+Khv$QbXC%UF2VzCWg|DIJH>&3w`T=FY zmx%}3dQ1ZC+A$<$IPGE@tUA<5@e+Wfu-#%= zhO*YO7ToPIrTrKi*pIPSKs!&(Jn!Z4w~~IfMNG5!U%@5LT#g9K;QdnC;9Tp<7pj_ z1yuFW?raMM7dP)gi_s^!o;BQ(8nphiQf;r&MurPG%>T%($r*>y=J&&tq#IlrIssf8 z2;{E-4ndkA`Qp7l!izPAHAOF;^P>+`(^>JKVUE9PV`?1-3qY_-x2dua!3cGK{Xs`_ zzMxg=$ffUWTXNT`t>#vHdu#o|d6UUEHE11e?Gfxifr$3hA^JPHxEo7T7Nq(?cs(zu z3)!va4p+9Q0^rpjFUF8d4I&?3RJ+onmRB&3IxHu{&4FvBRlKP`C(=*jGPprfhT|xxU5VJT{XR^)?IcWTJ)v)t z7byywR2vG+wY#RAMLJC>#8ilT0#=&NPzgl*j$^=hoN*@$CIhLk7Y2$5Tj)t&AR9bl z#J=obr$EVId5@*q3i~+BPVWkJ)fSIU9!0(wa&6IkB+$)|Z}8$5(3>BTD}*y5inh8s zH3b1i%8tqvtNMt;M=<|Dn8~AD5u3$^H^*Ik8GU0DWc9ELXcNu43=93;|2t|4vaNb~ z4!TLi-tVQ(D4p3cscN%Ll}X7!e?ZPYa0Cje8q=mo@(%>GJ|ql( z{I4D@OxTL!P+^e$y>`dsqlFRFo(gv5&o>cXawWC5n3-rlDmP2j^GJzaIi2GcZ+F9Z~Pyg&JbkE^)Mw@Y*=~uCKc%i%$Sa&h@ zQk-;gAT+QK#ZTe@vE9X`+SY==;Wi8I`kK=klahwWeI^9{Q49`zH*@e}LD^T+5#w$- z0jsCypn16)?{oKoBW-FxhkNhT+5(MFGHVO`TZRcaOe7_Xht|(`_j+`7V*AKZ(TjqZ zo6qyLru)6FnqQ9**gRuV0r|khHG+(b4zyzBlCaG9rkyPWfmK3<*9|5qhw^y;cT#j_ z>zF>iw}vgwF$Z#$`)r9nw|Y!2RpLt;na3;~2r~?JeJdz^2!U6g^Ty<&dY9Zt zju?j?bq^Fo1`wncf zcAuWfzF#@^ZMUwY33&{49?yU3G=;9i>yPQk%M)RZf7_u`g6b0lt3xenaj|^UQM7~> z{B&Xxa5le8qFmkApyb*)KXDVEh973Qce%zZWXgDV7aC=%$>+MoUYJ4BCMO{fe1(*_oD%`;h}X zcmQcbsO&jNz&15IIWox9TPFz)SEI-kP7&y?CpTwMgk0-LmqlC8V;|JAtol!TFNYKG zI?)du5`k!K(3D>Hob5WIprxl@^rrGF(ED`fED-Ctymg-*M@w#|8ar7G>F(_zMH7es zCTj2#Y(|i$epmBQ1=BApqu2o1OE9MZWL7+3*oFfF`5ryI0xFF6M$ceG(gv{D* zA>W(KchkQB(ny6*f8L!k;B3R@mq8A7tOmU0x7AH2*eSl#2k}beW)yx3QBg@U27P*8M~$-$ew%)CI3-c!J{6pKU>u zMOVt07Zi1@v-IL+QINJa{t`N{uj`+{SdQo2F>#DDo<9HqP36op07B?YR|aep5ktp2 zf3wLjk>=e5?~V5ZB;?hkk2VRY8b(^l=N+A5ud!bo+v$!F->u&H8Qvd99XS*@b z`OABFA@-g*VQ+)<1)M8V&c0Wh!K&RMSI9gt;GJFgG+YmzS4k`8P~as_38FNEuM%E+ zah=G;&TrbW%I!6#(wdsDH+!M^O7mLNF?)Y)!3_6}AtE{^iO1$LHaOTi)?R*a$>+TN z6#*B_TTz~6n>W(*J2=?A9l)Y>2lX*6BQHPqL$nD=AL|(c!Ah4td?~k#9G|mm)%5$+ z>MYB_3G2QPjr!0*v-qH-nBqRDrUc{a+C>}4D8zyDCw%Gb*G+}A7FdP>xEd=1COn;f zyIf3Cl7Uxa%Qm8#XNQ)Z^=y%WwXPd$wSr&l`DatW_|$Y#C0!`lsSrPHWY$7yJT~l*uS{0L=21BD%kt|?Dg(Wn?TA?u4GPtqhF%5%oJ_O z=@4AmO{| zl|Fp`kw{IZx^9LiuP1Bjm`bl@=65&oTOOYyDty&0t#{$`x51H^iRtclQo8Kr_wCE9 zx>PpI+UjPj9O}f(WV+7rancmX*L1;IQx~VT$_JHp9S)y{&0kxrbTT~z^QcR03V3C__I6pkUWuF;vuST|RJ+5n+ z8pf?zM}+Z#W^lq&<&ntov~Qjy;8w7*I```01ECal10mdybP-O}!Z+9`?YEfC0T1<+ z;aY=qSg%H?;BF)bv+qSHaz)KDoy&@|&3>X0hJa5xr^}>9M#m3|QDr=BDr`iQmdU z2JT+KWI)X3hbypzP*13509p9dwBI>l^3)`=az9Os`nJ!L&nvbXLdlSP;J2d(o>(As z`!BFOfi@AWMsd9bcA%;FX$)4tHk}R|YU~NBTJx%Fv^c+^cKq7Y(R}z4*PyFTP1CS~ zrVVdD9!Ig@RJ88dR|(nl0H8v;lk%sNn}!o>l9Ib)+((Qy-g`bF53l9<=m+no()D%w z=a;zoQ2cdBZLh$YK`xpWK^)G{bN#$Zc|L|3@1DXci@>Zm9)Lj=3Dd^z%Pw%gS(7I>VIFn-(BW@syWpyzCsAd5_DTK(lQCzfXi1MjQuXU@F|`eH%I>38Mq% z0J5ff85@%K_*L-7e&W0Pu17~RH_Bh5gDrTk12_IIRlsIeEW2<;mndYNz|&g~q%(Ji=ca8EGbmZb3>x9&=RPrdDm_4D?0A$oQoR>i%j=pfgPPRAPy&Q3{nuIxsS09 znnzWSC^wf~x#z1``zPMUc024MPDV41Bng@ge z+0Hj)1jlYu_HF14g#Q#I0kyvZ79@T7&5gTMfIqwGQwJx79R+$((6=#N{%>U5Kr>3? zT@IR^CWKi}2RC1K3cn!KGxvi0*MivGmlu)ObzA+ouFWjE%QvfR1w zL%}sfA_`u#Cz0_zLoa}k%Pnl45J1qypK1{G4?v-l0qO-Cc)iRWrHT}X&h35`macu4 z02@I61zLOA`}iXVR$YIG;u9A3p!M6Q699IgICmrHXOfH3;#-z%@~dUtB5VhdU;)i@ zP>0nnz3U)*9n9uW@#VOW#1UyKX`^o1oi#O>0u7B73VN#K|Ih>^sOZ)>?1Cr4weIbf z6x8S0uiO5~6xJv(KL7V5#F6+fQL>hmkdabcikN#X(t0Jlv~l3>V$~wt9{9`T^22E% z{UyBcBrNg44apl2vWe~8AGr&n*Q33*-P|xDFG7J1;79d+!sVn450|{_+u~Q%-uB?? zoOg8*9By=o4ybE$9Ta>0v-Lb)s_iB%N0zd%>of#N_wRz@5UynR`-vh%C3VhDgBI?V zpaM!akfCPiu4tuzF$tbHr(^+oN%=O)TIP5*5xdRR1}NoM7;Ow##@ zlGjSHd)sK~7ryB*M$`TpYHfPc2W*?#tuy#Hh4`=BarlSc*BJKe+Lm7mUs)p!zLS-! zcl)0ybwqDMpmvSaqMij=NPsxYXJ(9USG_*7dbbhI;=wCMSDa+FO!-o1fC*rq(&2$zNEy5ND$4cRn5GCd<(9GExw#VPK)Pvry zx1`Z>>=)nPsFJZM9*Lo2qRge^A&n(tdBC7aV!s0wtiS7&=5+}{JZ@(fExLLgw*MgY zH5|73`AVo=tdbw^twuquPO~&G*wo#(SzC0+QsCvW?YZZHlA0fs?tBF5e5OnP1c&r@ zb6JaAWmM=?(^aOm@A~Zd_~;Qg(eQ<^i9}f>Dz){HzcxmSAU#n5YM>hDy|rjNu|&qD zQ2|}<-5NpsATKInU9I(4nUe%Q3lguq)25*971g^_`CObCNuiLoMmTQYmp& zbIOx?c^?~i-Zpy=GeQ~MkQ@R?7j^+Bk3Um4mvzU@B0l`PNyed+7 z>8Qav!|($bf>L^UpOU(IepN?ZLm@~KX%*qO1g$Lt8^XD=LP@v}wrn*h#;S@i; zR~7~T15Q^%Y54Rf3*_q`QGGV|usrnQCh&HB+xNBULyqW3!>gft0ajemE&)0E$1;GOHBCra2Shk1JUav}O z*Zd3#D@8X5Tg0g8#?~rIzZ)-LKYWhurTRoM{z*?QOG`bso8i$xZ#t7NX-L-~k2+53 zgeoq7QE8E6@8^o({h>oP7zs)3NjaqP(0)Nk7g2c(yWTroBYw#|-jUHYVRWo*ade z3{Z9ZqKp2MrExz^grYQ{$pU9BsNoV$h=%lwV3!amyN22b?e+p=nj&qdKNN@+GkIz0 z?yjKKL`Sbf%X4lZHfSP0;neT?LeuBYy*CSvGt9KPlfX-G*>;^u|78=6$?n1eH#IRP zaV9gr-=YN?0vjF%MynE;=<^+IRNekF5I`9r$fwbwv#rx0B{vW&w=;<H`0hreti-|DtL9Y z-rz$Xv*e+P|MHBIu9!rUZdn(t#NEv!Ju!5L2Z9ePAhm>nRu~+>wpJKDEFi0)s~pLC zU?nLxzz7_7Pz@nOCKHCUq64yOij0t$71I<5@Ft5 zx_|E8)pA_x8+6(Tdd2N6lknGAKzhT8(U;jw;`Rj2v&FdTAzYgS3_G~OHJHr|u`3l9 zgBGB->%L74LKJ~c_zTC{Ry=CP9Pa7K{1I!rb)6-5BWNk6+I7lYZIiC`J4NK7K@<+t z1>9oI`K0Y9x&KiLuw-hmX`Y~s4P4Ht_K`gjai3?=lle>z=-pUHbwYfW0r#BlicO?u zP!Jysq)q7p#Ix!~GygL|ZxX!@mu^kx8#9yZgT&3Z2ci5}r+HdUQ^2C`$ z0G;k=@jZR$?X{I=yK7<9W`p?M0!qKzzJV{8A+T-kgoPD1Uyl!-1{ut2hYeRP=!m>9 zp=81gU7LVeEyyb!G0D`$c@t4Vz$*%r4}b47QFs{nAI?FT z_TtsZH*G)~=1 zkXbbOk{PA75R~!0GxYC@yuS>*kA1E)x0_$Z+h!{2j@t9P!dw2t8fr%~BDmKYmNzM$ zC4V0j@rcm1an-B02rJpH6NtdmFQXpYU&vcu%-6 zS-m*?k^DgiZjNQlPo}i@*#cER*PyFc81Qo^9FatK6oDx~qL_$7A5(K2_)=m#jXKp5YlD zDdrVg|3a&-PYg?!LmOnUFCrMNRjswks0iHmae1kGxHbP;h5N|<&Zmw{T%Q=D_(Z@# zA>VzI!SnD}o2i~czW9%>-j%S|=GF+Sd`AQTRE3m@NX<;b3t-`zRc1jx>hy|OGhl%hva!Qe={)We4pZMOrNCsXjZ&`~Af*IPciQpS# zT9OG{jCXZq`T<`c%IH0RKoiOAtpW*+ zz`1LQHwrGN0o6u%(1BoZdPkZH(A0)SH9eA+HfCrT z4kSV(6f@u^UV~vNpWfZOd}PJHFHFHWlM<3RNp5LOK86>1mQ5&X#%B3e14*9 z(ZFbIxL&AW*RUI0JA(}GC_`XSRL^i?jA%8P7}9yd1x$kzsxZxXQsOg7nB}l5soAYk zopKx(uPHqi3f3!CBAkMS`ptj3AzsVOi z+4T43g(kdjY({L!RVn*M#OI;Xe;w0x>h~}<8SNALv%8dMZ0#Ij{p9)%$!j%Pm%(we zSLF?Bc7NV_nbP%OqAKH6uUWr{iOa7`s||Cv_4h_QTu^H6l`K{HeLk1pY%C(+Q4M0Cm208XH42){w-Ku zRh8fq79%w_wtzHWj{|#J4|k~gs1)0i1{5vF6dX4hWg;oc4Z3b;gPoY_4DZ=q zpXXIk!reAnf5tXm1^n*#^??JqM}`P0X6TxcfkZKUr2>K-p0dbhP{OB5Ye>-vH8_{8 z027sK)2g#Q)=wyJI5Mw50SqR3aQ}l`e!L|6eTnIZhr1gHtE^0;pB-7x%-A)7uikvF z?#~r{S7aHIl5#v_EG)}$->Aonlgmj|qQdtO8z` z?Ub7);Dq9k8CYlF^2+>uBu+tMAPYj5#j1<%{?v?4_Io&gblFyLgTjC>3Xc|=34-}= zAEFxd@-Kzo*5^erGmp&cA{W%0q^)#m(6*}zr2!;2l<+$!N)P!@BXd%eHvXFT`dBHw zF)3=JP*4xJd7T((HBC3O_YjYNMA|j=e4WET_R>X}oHgQgns;Oi6F;vaMSkWHL#NnP}`aL!`#sw21YJ_v_RxlS@r_`6sgvLv@>D`a&H-^ zp8z>x|6fz}^OWhwMj4@OCeSd8nACCBW%N)hx6s2Ht z7#|oI?fMzJBUh&bg-~T7(628IEA}ZQUCE4uMYcKnCi1O(*Dk9|2q~^bBn1NO=1|z$9 zWju9Fy6h6ph65c~I8xB#z{^AAfbO?&_xcrXzOI;?Ob7Q%p{)_KGACfutvY~&jn;21m4%-}?1g_{jVqse1C%xx-K#qXH zC1Yp7*dkWotlEQZHD_DdE{+w~K4aJ?rRsOflSKitiXUgXtZKQ4T*SgXeTzA8qf>Ec z)@-96oEUxTh~4y!J-;|WyfRw1KNW^iP4*@k$fsJ-$}@K^m^Vo!aX9ICqfc@x!H!w$ zNPUdgu+y2W$|loz5S}>OUk)bv3cfgu0=aD*;UGb`OK1C`IhsXYiaBe8;969QSLgi} zsd2q2QnizuXpA^75>Y>F;Jd_dk0b-aN)Wmn(LJ2Xs}Einn-6m*LQs8OFSf~Z4MvU` zFIah{;CK;4P`z68E2sk4jsQ zz4p1UwA#4P&+&uhQ-o3J+`Xg6syy_}f9HHMgIO1ci3I^5UN7fqM-JAGeMvBkK#HdM z1E2cN4h`VRzz9K5aYr)|@#x~Y{hrBj#WD(_yfp1mq2#i-Xh-C;)W~3u>yiq|YrjZ> zv2!3eRDRmahRg!cO+QDZemTT9REH19N#)4pMme{l z7*5I%iCWeR1dBpiI%2#;eWnEL(5}3NzD;p7(H8I`w|1%bL#oNjM&r_Q*64Irg$<5W zdm&8ZC?^vyJu(?7v@`_KUS(D{sST}pv=)ueen7i}@u2&wcm)_tK5AimY&-2YC*-Em zKJ(vId3Ri=Y26paW8r{`2c(dVMH24CRivH#%vxI>aPnK(`@sYzW)u5y25A*Zwlsk(`*fM;tZzo-1J?5{PFy~Z_RLVLGAYOC@C3vS{M~p$gmriUtu@94(qLl;72;j zXV7fF-gU{RLiQsv^5D_WTii@0yDFIA+#~LnwVxmw>S+V5lgH^v{SPM~Zq%`uH8#z# zO(}eWkz(>{Rm;6K-x8T;L@o~0EH(lXXR!zK@}$p`4*Ayc`)eN>G|0L`UUtEV`YIM1 zfq*CiT^i0+tdyY3yHiU`DQV%6q6dL&AmH#sXE)*K0;~ayOJ6DJc565x%#+&6+7Y_8 zh1uO@Nm-Zbtf82;pw+%cvvWI}etxn9RFXfMV^%B7ZjSJMuckxM8LUSuo-ygUz|3Rs z@W8cTge?q}zg8LOjqxGz2*TPvq$}yI5TYK?;d0B^wSQ5iGkcM3@*OlG z`v!*5DJ#7V0yZv6(_oi8k%3Sl1fe-7zFF2muPfYH^bM^)`%m|Vl0;6Kn-nCibTl~} z{|58CuLE=HXi`g_sB20(O8JNPt39Jy7@z)BRGbxb+h@Xcv&t^7Q1s5(SH}a(H%@k` zQxDOv491ikTt!37I)NzOX)?4T9xUdD;kI%Hs$o^aGdYmSd&QfDawiAkRN>UBGYqnd zEBFd=+L&U8;`PW0G+Ye`5>FD*g-0XiPUF?MKE?X|30;G=l<2yo*oBa@cHdO9;VWKN z{RI$$Qd+E~{}47Qx1DjEwW2iULr3fuTR4yu%sye}7lr|D0k^){MFt9jL?3%xC3L7N z5-ET5V1ZjYty%9{x>^UC7uHS#Z*uW6(@b<2l7>lD5|1Mk@G_i$C$|T)ZeH`_n37y8h`rzhwUeolO=ZWD&VLG;7+&ZB^y;I=czS2G)h@ ze{rBV7zFXVS-IlJeVuSr<52u+=n2is>JH3iHU zfKZ?iaH}8=oMxff790c)bmSK@JVEKmXar}h3@8O>V-==+%~oDPRR9pBGA@7Mr*H^D zrQW353?UNJWhjXJjDVRFBS1kN4x~E*2oE11VEs=|{l8-2{(^*6NDLqf8mg^C%l{Rt z|L+q3A!ot`g#X*54{!pC8=y&z>Z)o7#rD5_0a5i5`2V>QKxNz&P~}E(QL%|+^B+SX z`^(_`=YSXg23%EbCtCfF0ZD+;+5Y3c1XclR%z1T{PQrBn_45CBLJWw>_J4mt1l0Xg zS2t8Y3^fSy|DR@2Kql+|`wI@lFbt3x8>ktF@Bc6I)BoEac(e)lKROK{1eDbkuKw43 z2h)HcqY&LPDx#pFQMR%4|9ko$V8kT(GvSeEHX7y{&YIy-Ukz~uGF ztu}5b*qzhs$;JZ7tnA+uafuRAQArKQV$xWamph?%^(7)Xe!Mu&7jBOLnT0=Py55-c<1S8sWVtr?_F%KI3L_r^vlfZm&30)N|Z6;GV^)n%*9HLZ)c}@F92KMJH_#2 zz}wjKI{E)a!yL#$>L*<49}E9uFc1Mqf&ddCQi>0S`;WKHBMvy2V{B@K{r|iGV7tWs zY*&P8KF;tzZ1?zI*`{-qjLd)Z76Jp5F^5QqVl(}Rf85{#4tm5Shg<${Z-L*003${! zmkZuVQHh-mPCrvrEfnE@p3SR*~{KY9m40T>Hl z;v-m0{==1bIsefZ{Qqh)3wUC!j3Ai)`Jc0P*YDrIU1zFZYI2$*H$mB}?tZ1H0tgMU z@p!LkZ0|P-kt(3Tly(R!o%$1m&^$%e3HJ!*1-vA)07-T*n%RO}aQ1v`dMYL*;>Vmss(OkHSwJWRf$f8M=v~En=lq9&G*&krS0D zon>NOv2NV-&ou+xg0sj8g)V6i*ICSM6%UuxMy7eq_`0G8kL1RR=>?6v`r)HFmd>Gh z@?7q#mWTWO)SXCyJ?pcRe(Si6vGAFixQ5oX`x~Ccs`g<_M1)FL%a12RQT83)V1QM4 zLRsvX4ifzkPAMe+^Jj=6Kd+!d>I?_FxTZ!9`$iU+oQO`&$jJC7DW|4UVd0{WZVXg; zbv2~0jaZgDXQ`M5T-90r*Lmd~;I8O6b-ig{M8w6VRb8qVS5zfUO^dT=c%ye%=N0tn zp~ImQa#E-OAMLi7RDh;AloYb9MLnjKva)j}(jT}g$V;c^D+m#mzG}$$-=n|PfJKU9 zy&wEk$xV(>*=BN@!O)0jv0tP~Cf$)K=p&)~XNbBg%$Es@jNuVd@}!93L2TVHc^8^` zXYK$|rZCO>jz`W~O|wUFQUvb{BKyxxV^|+?y?UFbP%@&H`FDHl%@qA^8qfjzH zDuVHewDw1};Kp9zovECsyZ+0ih-;+F2|c!FI7I-$hN98-Uq+uly=VWznT{L$-=Tu z_T20tS_JPJ-Ea2G%J?$a>jW-P-w^3#PQU@4ya=1;#w+b=V#V3C+ouo6z{R`Zu#JxZ z_I%3pG-#By7?R=0;W4O|eB_10XydZE>E!qHIkcc}&kH=@*SxnE)(z%6g0FwR-}%hK z;tf`*EI)2-6?`_O5)#yc@i9k%0J7}G55Lj@*#KoW?+#5pAFuQ(?FLCEaX_(;p(qO4 z57MyMoFsTJX_4v0iR4t-6mX2`I6weQ@_V3%YH4^_EO~kvkNdEBWhb6~$Eo|k#60kn z-6gZxfgv)$!L{n3jpymWIUzNZatx`DqAuQ8TmXqAb~y+iXvl>(xMhCf5Csj7WJTAf zxTxr73$liWn|JTQ3$({wdnzd!h@O*F!jMp$splEN!f;qr;;p2yrU$3E=RbT3DG9GE z#YK5Q-@tODeb30RcY*U30C}-y!>{&#wY@^#Stx2WwmJku`#=LsqD0rjjk5Dm-Y(R? z#*rMc%EC?BHO;JWDJ-0KyYCvnQ7~{S>zq6n&U0zF&kt|rjPD+tYbDgoxPm`Knt~`o zpmUWaY%DC|l*=NeF?47Ei_`L*itVoH`bY4Rske>RZ-1N3CQ;o)y~>)Gu9g-xxzy}< zk)~v8elN}wd5Z-IE#>=#NQ#<-=C#^qlM33N)UNFi@+Gmwll%Y%R)AK z+8-rZ9GWmTD1B~(Z3W#(@_fep10fw|ypR1ugk)@5&HR;BQ6rwse;a7;g?$IIGFGq3 zZXvEK%c>FrK;I_<+cDN|>}g|@OL}|C>&d<{TkKM`{XY2bX1uQB=14+~6SU;-Sm>|H zt!^;l1#@bU{R1mN9{bSHut<}aRPW}kBM<{%c}}WDO7d(~*Rb02N1gquC#Lb3`8wPdfTLgamZ& z8UT$%Bagl4CF0WQ(O6&4#*Z`eo#cGb$e&ygLJ%P7?mvRldEcKV)}u2Ia{PEmy!`Ij z+mGK{M-Njc6E7xZpl}o$Km&Jq-M)qFdcLd}Fecx;QAZuHtgI_d-(#3wMUJl+XL=Iv)*IlRycfjc zeMO0rWq*esr;$oqb&c*_CNSGY=x`cmnV6bW(h{ul{yixc5O2`)qlB2uQb@6~K#Ua{ z^*XA57I`ay5+nN=+rVCr`%>3c+A?0WmMU?Z*MqcNkW|<2#u?|N+0alSHcnk}kiDWt zxCENO@T-G`A^b=zou?>QhwIxYJT|PQ9y+i*xqYSB&Ig!O$+o4XM$GksgVN6r$JSy- zZV?u2AA%<`B5pIwgtNyKE4UD{m_OK$^re|>?vQ%^lP`EU_hXlvSTs>oZY3Wt&+JE^ zT&!I}xifvDBhqddB0&kH+257H01%K6MJs&15BKCMzh_bz7Q3VYUvS>}CGD}Ls1T^S zb&TG}fUb`-DoMOJDs#ueyI<9#;g7_Y$77rKk)*7wf=Y>rgs7sDF0$D?uLoho^)X+Q zPidH>`dyyrEbDXgKs^9K0m{Qe)w_P2%=kgJ_yNg;EF{`+#(1R$%T(|8_ttUJ`(pv1 z2C1S6>;0+lQAGelX4)Jnmob(_1g6DL{K~a5|Eyd zlS8%h{#MQ8awk#@ZO9`^3u6k+yi9X_Qjg-XU;TVA!rvxN*ZTwlAfU5L-Wv=Z|3u^? z?)AI1UHh2lCj>eQxMi`R(J?&T;|(Di?^`p4V`0(J;{o8RcwSBtk=mXFN23%$z`#35 z$R`fgCyqyt`MkhcP_R(oZNZueYaEsWgL1Z<*6vQ`P%N1A5)jp^;#nBJ&C2!*2!peD zbcycG<&)3(o;6Q1b4#1>P2P@^I;Mq*xx&%Io#y$9P=sdHCRO}=c7jkqS-TN*o+9I% zpxD1}4{=P7n#!eq@9m1nU~**D^_($M*H%{#8vGJluGt|(737%w)+kUgQPT47|v~=e|?>la^NY zaA!$I(|+=LXiVAZ9j`b3>`ZFWxv3!c^Ma!(z18{kn!&b{zaUvac`ZI95`(+z>i{q|sk&x#&vU3@xo8+MV8ERrp; zQN9P)uObm&uj|jd(MbET56Pe+^u`Wu1`a(A>k{l)Z0s}D_OZP3VKVrssiH_%xJQl9 z@sy3a$!3e&pazS5{MXy;=h4gWN9$MjM={P?Lgj@8;xCt27$!R=*_3Yd0mNGZJsypk z9;1BkrpG)ZBNxnFD#KpXNYYwx=VO7kN%8hfRZTo&hfbj~7DX`_6?4Jk*?!PNX4vDg z3f_@qwvZ=m_?HX1%9K}Ux`g~72&ztKrgvwJP@@yMY)1tlYo${K31d!+Tn5X(U{E^D zCNuGgR{jS}8()Q`2VIPV^}(LVDcefP24YLT%;(_OcRnxzItfyPKx|-5I&M>{cYrvb zC>tI{KhP54nB;AB22$WjP@hWK;cQw~&<(#xP1moSDvfS^ctELh8v9@J=(n z3Kj1z!_15IPJLSLN0atnUJIOLKYxPMl5zT!D2c)2Q2>w!x=q!v^&*@C%>;6BtA1sV z#s12mUw=qDwG1>Pt?xLtkHu*j14`?38xVC{9BQ=YXQsx3cXdgdqo*uZW($ZpD&#Q= z!oHNaJ4`oRHCwM4QU;7n7QRq4V~c(1mXrjT@}B{lCxz>k0CCb?Lfjz16gGapm-j}e zuN$GYw)Z;k^T?I4(LG^CoNBcoR!DVIl}O~k6_wTsR!2CcRGKvEd-EuDAyjSy8=0zz zeMR67WI*B~VOeid%5v;?LbuiL$G`>!EnJ>V$3=|byddat+_Dcs zn@J3FseH2E_bEa-u<)kaZk<*@p1^V8Z8L2%1!Ou_Gt6S&fvGv{;!iM*Lv`g9LQ$W_ zI^5L!v?e<1W8KKd!M=VjWHuURG1LmdL>IA8ep7><3^e5%5=ix1ylal0^^}E5{S4ql zIxA-ii@WJvK~R(Ld}gaS6HYmm70KQYo#J$B_Pl-FNC+4~8*|i>wB;%+<-^brjM29( zQiQ=rwdi>DKNmll?n`?TU5UnKbE2`d%gzMg8Dh*(A-&p?t(VIWp^~$Rv;9~gvQ6^w zso!Z1mY}ft`{pn6qJA!+zrzY3jGskjdo?EZt)M%&x8#Tb*tbbINE|!#>{j z?J}6WxX-%qbm-@EQNf@pC{c|3Zil7~dk&N;&+#$oc`?Ve)1yln5(lc`rfuJ!z2X1_ zN|2ID90|cS2Vw8m)0ETP#XY0JK;DX=zImTUlM_P@r)|XM*AmX*P&)SPyf`hlAJuP!=*2Bkrg>@1XJXGS9?O>&L|ahO&cP$cwWFD-jXG6Q9<`A2wuAEz7*A_|j&$OXfpn-y{ZbqJRnAMl+d! z?nQ|3Ff)-}>u8h9o$G15Y_ejmzJzqUn>mr+I!zgO&ZR?DcL18v&$r2Q9AfsQD?jrbdd=X|=k{J@r|4s9RJ z#w&ClhKFXWZ^aV4Z>u+5zdY<;sZ1AV#!$~!_*v&us9;U60(O<|j?f?vQ>5t={^keO z-~SjOhPzm^t70qKZqc*KOtNc@=`r|o-0I7(bcu_{?pdVcwud_FCnSrV9-MiQrSv={ z|5u*t8IOaIz`H^3ZXt1wFT>Qpw}j_?8iv#1CQeK05o6{N9&Il^0Ht*Pm%HgxTvbYO`JO=BE|yaDt{>jb?3sI;8V_ zeNkb})5}lyX}C`Md$+q!(=-=3r6?t^<4Q3KbT61+i8h+DG)XS4+M>cn@$}~#Cvo|W z^BCo;)`cEyrceL@v8+mp2&y11BU&DmR?0+Q&)Z3CJR9`HuyCe1MLGZi2tpc^A{LPkqHJtQ!b@ zzkN#4H98bvR0+x=X){|#8MMjvQ80FapnGf!bzX zWKDl2*xSzp!z~|&Zw~sZMWyCUclGT8w}$Z^80%OKzI3|1ONaf zCc{lkO3GSQ+ellRyPKZ7H*NwWbL6HFRF&?&v zY)RYtFmF#GB^67_L3Xxy3rBOxNFp0`z@nf?cyVt4M#;^g8-p){tkCq;6-ERipmtdd(w@r5>42W--PzTm=k_a6!% z`P+2{1;hjA`V1Fm^XC=xN-<+gLGmHj+>!q*V4rYE$|lQ2`f=vnFE|U*8(RX)nA?1x z*Wgy2vw`9gH&oA4a1jxyA>eXVu>9l|l8JIf_1J!?N9sdSZP#c43q!uK!vr ziK&5SAfe47eu|4#uK4UR5oo`eM0QPy7Wg(EyWK(N+fc_7|&G zI9D9zTmY<&{=p<6C3aPVd9f|;3iVb6kF0)h}s2nhOJN4vMa@781zvQa0_>0o;ob;MY zN$Jt@A3e`eCbfxV)K6LbE3}ut3k@C942@lT6$})_Vbo*c_{ z!*%Y1m`i7pkjN+#B@AYFe2*JO@O9)-eof_|XB7Uv)v~H3aU#&DZ3il{ z?8Fz3-Tsa{4bzCllDE;cL=8NC9u=DIMHNwGM z;&rNtI11s`E9>Zcc9~dl*)nx&*&vBf*oeH2f3g!&35RqjG`TmajjYmu3M&1{I;#wV zE7!vts}HKUhf%$7pXT?f2SjX^BVGzEIxZEfZ6RQJoH0nCi@sgU;tp|YcuCB=T9anQ zZ`>j9;09>JJ-4NObIv~vF;_YZ?9XLq6V^-is>7A&Y4Dld2;m63bWgHwg2q8 zdy)k!A}G9C{*7Lj!yR96wK#gZ;f3XYrd|o4KG4Yaf=@3;WSxsnh+861L#bp)F--sZ zTaHmG#HpWiBuAOUh5i0}XQ{wcgA90DfUJtqa@JDb)H6KHs;eO_GCoiI#)tti;5}LU z`j^7l7jac(E``tXt0WIn9mW)v%z_p0mS}+KMu+l<9vqCHF!}(dZ7OH-%^v+`9titM#1(oZl-3mT#hsu@8hqnGo2QpiJxR#n% zO09PHe^Aw|L8tY#!INbpQ6Dt@JG6A7}WjO(!?JdV0UX zJhoQXR4Q$?-j9s#l?dnGs107P@*1FtVmservN>S*eq4p{?|vZ;aKP=V$G*67;*9N# z>$Y2|lXrBCH{~GqT@Z&Tu&?=5)6OVZpqF&{CyfLkV+Ja&lu4L;kUJ!S@mzj(5AkOm zPHLCiHXLL%TGQmMm@75})cExViZK%}8+cz<2#(kn)wBwh0 z?eaKYh`41u5b|}U>#OwpV@inWM=wYQ0?a7OFL=Y&ZYKdK*kL@_Ela3#8RoLViw^2# zkA9^Kcv#%_V^x^1+B#eQtz{_MwotdRJ*tH$olT06T@d^v)Iy|i0<;Z~Ykjz}g&X`{ygs}#q;=452G**lt62h|zN7Ze8 zE$$1fT8AcoSW9PHm`tbrQiS0fEHtRhM#L5=m`e+)7Jg{@tg#+b|BC(xop{QiDxFU| z%YHzbC}>74!*$2Ai6q>j);4|W=F`zEJ@|H^5=|_!Bz?GbCflsfQncjPM@i9=l;ssN zC^1GbZJ_UMk>!c1)AXCqnZ z-;_v>bChi@d0S2qy`kbgoFp>yJ&xn1*t-T9xcw;<3a?+`G1@v%wMjAvxET@nCPIyN;&9xaxKO@sfZe2K2uH;O z0d?Zv0F3y6mrWUs>2hr}wGh4W6B_DOZ>8#)n6w6a0$0Han%>1uqn29rA3Qd9R&n%{8$#8lr>F0A9~HBlI@D61SC-r z%ge_=@1H*S(vn{{m|WKwx}QNGBe-L%hq>c>Hj6Btp;@FQzT)E(X-+CG`M54U#6zL2 z?4ifM4A^yz#aClC5a!^3hb2mqQ5o<1jR#~a<_*akzvd26^NHQ+n6)&*alK8H_8b|{ zjQ>c?0(i7VdxjbV+C?xCHUHRNDqgJA*C1OR7?ED|4)^gAWF*its8?#ht2weduH)Wq z{ktg<(yWuSdwYf=mWpuN&?bJUjfN`7*7RoPB@1MIj)8xt#>*ClR)8bXFAsx0X28_h z2J~_E47(!%pEp|kQHbYjv=u-}!6g#l?+X=EE2W+8OsN#Jk?}-7qmzhJgazw0gI z(^}4J+2ur*JKywa6-V3ZzjP#K#|%M}`s7m9qcRJR6MPd%u3RJM_If~b+c#j5{_&%T zuyBY`kKJ<kteD&rRUNS`R*#r#Oo$7>ygrSz1$Hx*q@; zfZF{@#cS$v%UO($36D7WLnI|K&wasLyE-BXfF`mEEOmss>8vz zr`zyo>*js+!scjYlp*lC+_hplcZ7b9V4WNX3f~Jy9xyD?j+&7$2hp5`{bL|!C00(y zIxwBfg@q^;-1~JLBXk?NGGU*Ue@@)p?4#<@Q&g0MAz?9*6CGd##*^j6|1nJN8%xT} zABC^an3KhUvbyUB4b#g=Jbl@}9Rn^os#$=#~5mG8fD{e{<1BX1` z^emR`Ncb8oZh!Mk6@ApNzeu%Nt`jFH3Gw@D8A457MPerhozGZ|p-(QN_KK1jba!gt zL;3J=hUoST;&c>sFbM0fLPC$iCP{$dk~};)u>FU_PT;2;C6?BgS6!qA2>Ye zp?b<|I+#0RlkTS*GHr^(nD|1Tdey;5;+fN2{p*TK5bYkZcKZ1D3HOHgWP-*@e_n^U zMRZAxU<;hYwv6f^((!!H>tvbP7$m_kkFRSpqxTI7LL;i-sz7NYnyi$L>2$oJsi8iN z=UJj-rkdZM#{xax_=n?&vsj&Nr!4~a=Q+e{AH@zTXd{p5K=!`i-IAf1Jq9zB=Kv=+ zCdg$9*?3q;N@Ra@emnwVjUaRE2N2hW*EOtV!1?og z%I?h5O^qs}HUau7f0^RYHUz>$@S3}JZ~?S&Yrk(`>j5KNq&7VZK!wT-dd~y?8$x_2A%0GKq)jCin|yE?4gO*?=<5 zOEYQ0P4T_t%gkt6^q$Ei7M%)@;d;o{xtDT!DCvfoTc#Xb6M)XrqRY zgrgH%QmkF4IAo}|=|pB(>|nM5)n=Q^R{^{OhRX0_^t4jOl>?KaY1PdJey{^^Mw3m_ z9a&Wpytfa0olD|q*`GO#$xJ>i@9RZ?S?LTRr?k0MKBG>pL{;bUN3$#Sr5f(yJ8d9S ztjL$-KoN2g|I>`n&wDuzl?_sjRd)x9);c+j)m`_$e4h0U8oM5z%bD&TPN zn8GhVkQ*h$1_s6k#tPaPB|LoOv~G@f4$HT2w=Q?E1D(2Gyl%!boy_;g{?{3)Rc+gs z5_VwU4pf}4$9nAWEaNVb+;Q`)lGY7ua`#jhw`ZXl#Pn*_b@&k6d~XrBJ{`Tj&9iv|i$BPwFzfkGT=I-Pfw}M%`ILyv{Ehk~5t~#G z5tAu%`~&cOEd8+W;i(kp_XZg-qYJ79K2Az}wG7)wOmlC;5)FWI-Z$;bq2Qk=ZHJ9;1eN9c;3!et11^Z3v+fccXL?()(!V7s` zB#W!go!$2%EYqF)^)IO45EmzF^xYoCc~Tzh*W{m2C0yA1w_f+-qf|^cu{JubKeBy} z9hzPG?^=H3j(q#vu++1K7_1lU#n#F`K|f2X=#o>#r}=}JPEDVsinW3sL!eo}NH9s+ z3_JZ7-7fD_h$tdEhbAt zu2-vaXeT>`6pQ=h{Eo;YuU-3$PHVnuz^K=buNxx43{(>7W-*bsU~fUN4`l1kM-y}X ze7`vY?A;j%Gvlx}jIz+PLyGk_(uHP%4uZdhUC+GajFK6o1^9XaemCaNgoZP1U`f6i za=I|ug5#Pz{0_(DPoNLE3y?I~*lUg@VS zY`r|ob{hSzNq3h{-|ZLYJe4);%5ahX#0HDeL`}xv;12WWFvj6|KYxy)i%M;~868_F z!P7TOal9Z5l35zX`2JLc`JM!OMM7)BK3um{G+#)i zhkizSCJvDz-tRu#CD(2*Q6pW4AC9#0ZP@Y zrVlwvq}wm?OHLTnBvB%qksf4=zg&G%8SLJnWTHi*e)`{(9S3c>hcjClrm8HU`uKrT z#~;CdPf!YAlsKXRK(8Q(PEN(PkmWRGpYc(wOV~P?3Z1aj{|k{{6CtnU^=^_|>Rk38 zj4D7bT~P37g>3-4?r~Hi8r>_nZ(o*l7JhAxA~7+U@P%xAOHW`m&V}ZPV3{r7BGE1( z0njhun$XRtAJbFDK5w|@HMjZV{1SnEY2&INFmYR&Rp%aHsz3_J@oZ9Ru5IDe6?8CJ z<*E1^OG``x^u$(X)oWxxm6(G!s$M8~w!U|zROvwyiIQ5|!qM}Zdt%li5TpSkD>b4W zZYp{|-Q@Tl0fyOq9@|ll3Bqy^Co&Vjp&qheN#22mDIPto^1^_s$k=$R(9xKKJ*pRMVNrhEjtG@;Q)^eW zA3govb{;eeR`3|l`M>Q6BMS+d(cqEkzv`N_*Hcp~)B!M$jeECouFv?Z;~5cbUkp!5 z&dFO8%CjMaV6zo?Qm4A@SuB}lI9c4p#QyLLeo@R2%Xlq-I2sG;sZ%=vKHHUJr>GD0 z?O7Kx4Ft!NdDgh+W6VKD8^Pa&=$X57RjvNoB*rD999^{WjeF+N!%*{?e77S;(u6E^ zI3(Ov;f+PPp@vq4(~w2wOWP)B6eYH-#|Vvom(BLLQfq|}7Ud8tX2EI;Q9LL3I=U6@ zH5Q7=6TGe;M0xP09sY3ARK@WXVj3@a*MB&=7e?$QfSIHSLq9;VN6!=QukX;G0A;dK z9`EbI0n!s(0zcJnk%H<8Pmi!2<3AZ-rBq^9l7>cDW({Mcp|NgZGH&`8xnI7suB>P* zzi$*wxtt{;Ua#c{#|nLtvr7MXK1eN1+9a>qXQO4bO!OSG@dmxOS}xPO9S*w6et07~ zqCJ%sSxblA?rOf%GpX(S)DZ7H+TO8?YkEz$T5%{LW5dCFmN~A9cgLU9P%EtFlGa3M zw>wmfwVX+;w>4?!w&`Tt9&q_6Oc7wrK4O$(*uM!9`A#PSfa_t9(e-48%Oml!me- z{KQhDEleD zGDn471ErJ@+iH{qgQvVsd9!InT;utnQyhnD;uBsyQ3f03+CB%AliCr@cV}a}f2v zu|UclZyX&9no8tVA`bH11$~V0zx6^dQyEa1FvX%uEm(uYS?`AkeF`7^kY~goj+V9} zS4jEW(W3-qjD=$P4iy(twt*?^<384C6%6r+Uia3Tr4HwIDc!P0z106!Yu zi70|qGQ334Xt6Cnhrg!|!e0fCXIM=Hv(Ni7nZEbxWT1yhCv+c4ck){V#g-Y(jXZw5 z+;i(0vhHQvxtVFzhb3hY^#}HLexCfKv4gvs<6qA%$I9f7eyl- zb56;i-ie+#PzdBkLdmx$Fe&lWKh$%Ymj|++FLCr4;m=`ArWkXKmmE6YMq&M1sojd5 zH$@OllhB1CNwgp?d||r=BoPIlUDc8wM={|5v=6p}iyCC>ed(k=XqsGQ&B_?EH6k<; zc`5%qM8UoBN%iA?RfkuertYp-3(`*Upg?34uj#+}8LqQD2^5Vga|=WNoY>$1*JBGj zatnS1K4fuml)!bRcLLgNoJI$)@v!+c9xw{R`#1P8)0_U3WO!L@V1oNLAl4 zB00tFKI#{#E>f*ZRpeb|NO)=uQh{_{7piSYwM>=CG2OCRUN9m8Et<*QpUU18n1}M( zf+Bpt`6)2NBMi#{cp{E(k3jNh1AUNRQm;#LROa5#Bq!OjL|4U z^XRPmDP;SVHV`&wprH&77UgYX^m_hecR_12*>vd?$RB^Utw_X*-ste{+xk|zQ6_eB zHiWQU!9)C(+gpS)k($$7Jj?~*_s8J1&$(-DB?T;fB+W82csAmVVuD3W_l3bj>F^F! z@kEK9CEgSe;QaTf+bffOdD&*r-Q{>TM8Yn6hHW`J+4dOw`qX;Ai!2Dbj0CRx#3 z1u~}dMB~({y6;#=zvtqKdlN?2=ES4-2ZHC-$gA-xE7b_ERxKro#C^7RG$D~Vv|qLM zl10n7GiYwOj=LSfD|SL`a&!rNb$!%VRA)Ji)Oy7KAT^WgXC=Hi(K@##pXHg+8ivSj%pXY11$2uiHi2W9KNh*o6dtgEX+I`I$?Mdzh z{S+^Tlp%gmNIU#&@FcJG=x~F6&&lTtXPgmheivL<+04zB4+`lHbVVTi?Lb}p8y*bTPys_=5ZO|c@M}+ExjoO{#Jel z&B6$$Grcg6%sS_(bmn<8w z<~Ay3@rMYL(t~Pl|n-$C&)f6e#2b2s%HWd2m#re^WQ8G~13uZcI^||t> z&i(`Pa2q9fd=U=PXpWx!McAWB=CLD+2S|4+0h0vjy5PNC zlB87T&?&STpoj5x4P+=Jc1gvZq3S56|3C~8<;PV(T6~;Aq1Cn!udc|?aH(Sw7?Y3LM zT9ME&D5!t!-KZ1wRF4EBF35(kO-RecDcq#yk{5+pBMR#*Ua}=_Z^-UH`7gmjv~Hy_ z+=f2eH!>_b?!>60^9>lMV0o$-`A&+n0qLhYIFClf0HTu(9=ps(^zm3P?F@Yua0oAU zurX538}mLfaoc~_p(0$RJ*a*ALTigkG9jZ|t?zi^tTB2H(#@Un8UJw#11u@?hmKzy02tIg|!&=ae>GBBb$fls8A{TX)-}bEbjm~G^szzdXL#;hG zLD3qU7jNn1B&#Mb<~czQT8eKDichRRE3#@=%oqy;+ z5)01r*O-h4?fBNo9^9HqqYadmr|bC|6E7XZd~(<-%PIfg1fMqrh)hQcub`;FZ&h$i z61*0o5u#V?X6OEDgcng*H7SooO4Q!s1q93Xa!@uc9S{)mv?MnfJCSd*TgZ`dl1Kh2 zncRA?93v_B*0%|_k#bLi1_sH~lyI`&zgx^UBT5N=dD-A6ZGa49D6ytQ|GHW2-2ef)Qr=c8A<8UF5Zh2u}bs2(GLCFIF zl(9xr8VU+_*^CD5%G(^;udv`X_no`h-{=``awMVy*pYr*xRJ`Yo0mTV%Sa@Aa1h2; zq+QM@V>i2BH8Q*Cu%79^qLCUBo$o`U5n$X*J+_nF52jS>Q!2h6k?_J7_ZD-=QG#C1w&)&VU?@lnXr>COFEkWQx3a^uQe!@N@^G;=O68Fh4;+dN zR9uL)nFvb>6<5+1)&5pr3y$7DXd97t93pc(GF9_1&mg}^gG0J$r(RIjkZDnD$oh27 zKpi%C2OuU$q|OTU>J-=$mBNpyb9zP|td#DWr-YaqPgK#s2~h$Y6)IJkkx@1LPJ~}U zHQ;Uy3g**I(A8I4Llp-O@8duRLhi`P&J|=cORb&+e^+X;4s7+FDpV-M zi&=>7hK98wR924b^D%vLnAZRv$IOGB!x3=)(g2ZFy1AM*Vrry8>J!Y|s{} zH!$bi5e|yGoi-wGe8cJzX?u|9Z-iIQr+G}s@XG^=Y}*;dg$qs2201vclt&A((mpJY zf1tAucBw;B{W8yFbALgDx2b$z^o2Pq+s$)iq1vV$JvaZyElIkPHy8ImJ;b6ffP;kV})9{apB78#aO8u7o$-41AoZ2O$sN zTb8hWTzy@3VTiLk*&oD*8EE}x>lNzUHHtfKD+X)A3CiCY%A*V6@jQ{N`M|!@(IT3D zi|?02(h8>xFLl?{yk8X-Y!#D)-}X?=PE9XulfYjz0wU6SP_9h}`h#@zBEV%2Ra(OF z?In?kgJh|&YL47=;*o$ZJR3CmeHm29AP!RW&$T!GyDKuhDjO70)|gjPRt(nnWx}~c ziSq1n>yUgTA|HtF$agynv8l&pGK-@yKLO+;7<(H|E@`p7qWrLJ;{%bAczG`VV{ z@U0B_RBNTtR>N8cs0^G%U6i7a%qtU27pUt-X*I4va-2UJy%Nn!e2ZC33n8~O;+KFW zm(%=B)q^q#CHx=p<6OrsHtWVWm8$=L&Ex%lR)7qBXt@e}^V-n0!tR9g**%%5W z+Ymw_Pa`aAWES$)=CWBXe+zw!dUC@rRE|ehuEW+d-QJ!JiZFf0)4>p;m0qx0VP0OI zDht$#>wk6(Fulnm`rynTQ15V+1^AL zylyX6!YstXC5+|aM>p?xTz_4Aiis^t7&LjIHQ4Yl@ykq_{Ss5qdD{;1oCuN2M0r-a zH*&jK_Zb;+)+vOhscc5mK>bn4B42^?*}2k}9XoHD}EG4%O|x>?0}V{np?b z!M|0Z-8??gc-zgs*ZpZ2{ai#UfVq6UR45EKGBXXmfzfIunpBWRXN&fH99<~xJd5Ao zTwn!N(%rBxCBO}bSg^b#t5g~sp2(R5sKvT zW5dJFk$gl=ekyI>tFQvevm&q7`B*2%Sr^p zgG*0>&~H9bO(TQuP(ebZhWQiJhO#9Md_FK!Bp8-qK{_XUI@g3>I^f|f-``{^IJm6X z_bs5WRQN87*A;F;eEOW1*Ztb|!Vd+p#1~xv8xKKpZDJMKNEqoY51Ho4r8FV_%b;To z)*03)1N|IeIq%d)tKoN%XPsrzEsT$$mAKdrR@Aq+wg+-UkUG<)b0&K5Jl6st1Z_BX zYW{KNnrGU>w5QwIx*}Dpq9V@EXppZE*6Pnf-sT><79}N$>TTnaDi^~K!%ab`6q?V3 zeJm$YlQfS_b!c%D26 z&lc$%mvK|KgEu$BxkV$K^dN{}o!z^K{m~5)$=xfTR{4QQ0_s{p?M%_f@s%Kgn7_Wks;7kzxe#7kAd?FwjxOR8Z25}GF?m8MW@RN02O_j z&Grik%W${~kwyNd5(SBWhSGR4yhtSt74-mdq4Ng0H zjjo*JI-=tysCnpSQlN|wds1kNrAgtVpyZyR3g3URO&XntML@#DXU=1J4`1;YJg^%( z=l=Q-OE{s#=N+F$<&mZQ$Ix%!yXUfu+gA2qgIw391F%lv*vN9~#e}>{-7rKoH>+~XPQN#VP%6#Zo$ACp1%V|uFF-FiIjHqBx-dhPaw z-!VP0M*-iWs0d2$Tda=&1D3|R| zLxOsJG50=)`PRgA3E!kL;f)06gsD^M_$R~jp-a;29(mF-Gy%EhiAvDMiU;;H0R<%F zaz{j)<%1&1_UYh8$68>N4!uAXf&s&RIOZa*v|4koE&0&VdECot+tm^a`(S>~U#Uqu zOg3ZS7h-a>q}@VD!aKe?>%uk@i`HIaTtAH7Igi%zGsWH$!r4MM12mIqJoSA2r~nu5VQ6q67m` z>Ja5*>*;$;Xc!9q#DTj##KdyCs80VRi>)a-eJbyX*SKk%z`1$hsZB&9tO6WgiUaqb zWX*GHAeq^iS8{>*2VUFaua-)(etv%%LIfcmUWQ{+2aWKl0+uHb*e3;Of(;D^+*>3k z`Q-bWosNEPna^NA0>}T(&HU`#G35~Za#cKKluCkp!#Je|VKD6|4}KjTJ`}+33gEAw z|BWPzhA77QFeR%RiZEzxcE4)|d5rfR4fKC2jA8tok(Eo4*)a zHOW%B8Y)7hlBDPmApDQ2#_^vxfWH9>+ikh?@Ago~7=UQS`^6xw-aY$xmUFs6a6 zND;z^GideZf3{Oq-Q+KDUU7y3WIHqZ6Lw@qnx^YqQ|;iH&3~|^LRm5>>u2x8p+UJ%u7KE~4BsJl6knwOwKtmb=Pmkrm z#&Mb93?HxAj8yL2QE1jiGy-(yPPn@wO>1weHWx$nZ1xM|Fmdd%W~eIPoKFV5bj_j* zj+(u|aYFtU7oXjgI5_x<-wvc?L=zA?k>p*Q_#2TX}3EBoPw&dj1FIge|!&g%uX26FlT#Ia)>Y;c(xR8*t>>R|?(t3G#2%t8EW zE;1+7BR~)aVjT0Rgx6vd$}(Ynd`lO|#M4F($DP6b4yo|X{5$W?C^t~;$y3;VtzGx{ zr|5Ev^YTnaJd|2T=e>4D^Tj-xhPC&XU1}|BnjfPf>zKwvFueG36yh9m)2-seer{Q> z6U{HJgR5!}+6ze|ZV#Q{n9Y&(Y^{UOM#$;mFuOKC|OAM@N$Xd>XpnmbysFr z*ITvk2^+`~-AEDy^5MT3{{C&eu2h$@Vu@XKf2$MLhJ_`jvGJ`O4)4r)5j&dU_3WG{ zJa1GtwJf6D;F@5=;GDO8)8TQ#OEXZL*aXsn4$mw0XNwZ%K!iX diff --git a/asset/sample_schema.png b/asset/sample_schema.png deleted file mode 100644 index 2ef3db55a0d788e65698f824830912b816d0af6d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 88228 zcmeFY_g52L)G!(znn+VoP?4e_s8pp%$3l}Py*E)1ks>V+NI*~!=^#alNS7|Xh6qS+ z0)|c!q}R|w5)#NA^nLHT_doc4__7v@IdjhL`|R1X_nC;Ny6Vhlubu^gK+KvNj}1T| z1_B5~FLs6+xTB83=L7#}oF3>r0D&rF8Oipvz?j!T!$1cFf(U>>!S6tzBj8r>DhT8w z4g#&)fk1MpAP|Q~Mw7k*2=pJ-Q*9$v3h={kQ4W?VYlD=DJj!4+rNf&t-$Wr3DM$O1 zrJt0s&y=3Gl-UZ();Q&aOxZ<|NTj8urS0u)2?+@Z1X5pLKRi4f85x5s|WkrA&-chI=VX^OWP$A8r@7ie6isTtxjsla5Ff)HDTA zN2x2Nlx9<6gDD%UTT5G0J=25rL)G!M-Q_(L+ySNg$JmeGyBoWd&yh{(&6M0^0*>$_ zubqhSY-wpB5{cH<*0#2`eSLjlVPTV#lZlCm4h{~Bi;GiJQ)DuEeSMukAmrxeCMPF< z`0$~tt7~_6_t&pqO-)UI{`|?#&USTmt*EFdFE4-d=1mvW`&Z}B5bw~u5BY?()vWhl zE90si9UXgndqYA(rm{aL2l{WcHRt8!kz;~#gCM`cp}QZU6)B&(lH=AtK>BM3x;}<< zwKTiFdiCMmJ3k*EA1|+(nwniAe*JKNeh>R|8QnNPl{nfNHqdI`RA}Ci@BE`8dtzXC zeTjU0Jc(V$lS$yNy0Y1^KW%V-KmQ2u$I_@WGV)jPx8kUdhTx*F4Gj(L!-M3l&7`EH z`rkuuqa!OTE8}yr%Af{|ROy$g zsLwNq6fYfJ1w0F+`S^j6-}L5jcmSv73>N$_xA)5DGc>7Q7eXH!@T+F!I*n8fi{axiupw!iuPzu_<-*?3^(YcWgC5Qo49XGLko(FkjwOa`x1b|L^br zR1I`Bd#z+PgR!j}epqN0se%tVs6s5v7r!rW`;A)+ z5T)R-GqX8MCxLLZkM7Pw>nnN#H?@mXh^TbAeN81Ira#swh!wbZc`FjE^rdx(!;(bW z^lIk;+^k%wiR5-8Tkjo$wKZ=Gh<+;a*g)|>2p$erZVmuwP;WKEEw^Xk0A(ypE^rzz z`d%481r6L?a@rl)N*vyX$xb}CrWmar-W`Q zk-v(iL66L9uNvsHRDW(5=2P43Ka%*6PSWNyz z;I3-WAt~yLEg)o6;2W8%B!eQHYO5XEq8%Io0}x*!{Be8kj25(YtlHe}c!?Ju0aV26 z6F-@rz${nRZ z)5y;zjjMz;>q8vCoW?WD(x0`aeO410fs z#tbVbEe9C0(gBoZ@;>Dc{w`L)kr|HR0DqcsX>v7p%6ngEWOf0&*6vQxEdH&dVhva+ zdlgV*+$XX2s%b$B;qAz?^*GXj7t#UXSA05CuQp?eS#$qama|DL4RA+uQY9N(a2xw> z%BUlOAK>MDtKY;2ehf& z7CawB(xc5}1x@&1#r-T}0B??GQt+Se`CmQc2`Ylz0&dU|SJ}J@{Hg6tj#Y3^fg4fR zr{xBwd6yiwIgaoIL)?wU}{F7N63DE37qulf#B@FF0)_**frxHP4qHaq|tSEsPp+7bwG z3UWSsVJsj_P#}Q^0Dt+w=3bE4#dy1DiY_oW8kaSjQh^1;<8^LZ?k0Xq_)9p9!PC}e zypy&UOO2!k=={?5SX8y~grl=kU0tiJ~^Te`j2jK_tNlO=%Z0yg0tk`+#6^pA{RU@7DpK(83Oyp%I+ z6w4>|AhcWlAIW3{-8P)x@nK~Cm(-cQSBOsVUQ5_{fR@1nS>bqESpE~e0axQr^kTQa z*eifXZOj}$*e!+;CdC|L`#n}75%97f&rXC_1$(V@CNLbE%iYfbcmAG&9!dW#0G_5p z*p)nF*#W4JAMsA{L2%5lwlAWxb0y~h8X58tx(Bbg$VyKLmVi#hBW>xlHYKYYFs89I z2#9O8MVWM(Dp9lyQZ`*)Qxf@N4OVM`WvfMHvR$2-dmfn%U{xdDTN9RalTU#(U-Wc#|O-uKwxD;t?i z5|nBHuVPPGY)U6A`|9>=bj>c)B53Oi>Wy9@Id$_k{H_cSLgy^XP~Ki&f=bxP86-g& zq)?qOSsW>N39#y;lh0gO0l(e`L%E->s)7^97S6~a{_TW)eXZhsStxH(6UiQ#&gypa zh#8SZkU6#GJJt)^o42AEVDX-ISe`A_mC!Ir#D?g)OEZs%Tw_J%4mdDH#2VOXNlM!G zy)y)iV3Yr`#TUyRM$5p=G~3&RAEa9kZ24^$PPgX|`rD;`oXW@<5jEwRsPePI^qFwo zoelmIMZ-NHlt>?mN4svR(IFnn2oPnd-1U#Z9xI>fps?6eC;vW7@4Wd+Mvj0y=CWi@ zpLfcv4MbYpx^;kTSBv9^YMy$rnP5@~Iu*mguk0izW7cX~#^mXycblMpiv3VCA0u;E ziZapJoS(JvTnSXCMdV*>#fl}mzW`#=uQW6|?VzpJE!GW<3Sb<$V^?(kxpI9(?z9Zj z(rb7%X*wYBkh>cU=)ak;RThzD$yv>y1-2Wlt6fAAlh4}hM5d4Gh!CJdyIg>;i8>?U zcc3~=u=5_g(~T=NvOYiFmxkItsXg7FI`Bl3sgkQ->NdRiY#J%B&T>Sm`(&NS(AMC? zt1Zt~3X7~b0dvV?3y?ESzirW8jIcJ{fqV~QR)Cp%|4gO1#U(Q)CGCF{7OwvIWzS!&meJZQGI!=wWk~rPWOA6JuQ#yXWXDk+ zt+g51Dhs!bOZQ5VRY02#Eb~>F zjpdHC+Zti4ep}L`-bD1G#h}{#o102@*+#-O>!W42>69rml>7N5fXZL;*N(HOR|aV< z*S#Ea^^@_I8&!f)v%d7JZgZwX437!1aEJM0ebEN(gy z1Zdma(|yaHRe+p6|4pXY#<;>F^7WR%t?_0Ux7DX%{H=*%S9fl+S&Isj5}YOiRim_U zzyf$}IJj!6r~LOv1l;USk^?qtPGaY#B`@jkV9Jd7gfi2b=*#V3+5`%QH9L3$)<0%;vK-k-h?OEVG z0e~0-lmE!V>4@(b+<_P!kTp7v*|D%vGEoF}Q1C(EPe;*maOy7LWd186yCiL#_@{jt zZ~gz7{c%A#w!Q}&vu+QN$O~&(Af?g(aGo(&ba}i`KnP#iZg{i8hV1zDL;c!cvRhuh zcNw0NfE`8b>XV+gHj)1i6l}$}b;c%WKczSv5Hl`8?=zaouNnn5W(DlP%Vj&@?WW%N zI~TGV(cCcw7?pR}@vMB=$J~6#rY0$l&Iy2V!VZl7w)6@`=d4$3pc^l}sH~OsB(Am^wsYgmn z3*J2*xo!MPDqhku5@|EEVZsrr^8&)L$^6Zlky9be%HyK+X$bI_e#Ga}U4rvDnvq|> zgnn3w5vDqD7X_2kOH}K%S5d35r=ISI+r}klwB~PiIy8?u1jv|n1eo^!2{~b=<|w@+ z;2n+up2?mA(Q;Uw{WF4Sk%N=^kqMMOYobXm#t7Y#p3e2gtNt| zaJN$3)6E|I_Qg=fCvEOMwB2^}r}}oV)#mXjhGB%%XXGi$) z3qj|23Az_g%<&XhmPl=I6}?;$=oordjoehM<77Xd87s%AwPaGHziaq1+s38G5Dw+o z)IBUa4`diBF*-ax7SHGrzIpK6b)6o$ISv}=_%IK8v5N22$+!%zK=oYoXu zD@xl!TLjO&>fBvMcfFK->LC9-1MJznwTMN6%TdZlRLf;wP=9-aAT8oL$FL zro%PY7C~s zN$#UItS@`Z%4onJwcKXFPl(vU&0aKP3rWVTI`qe5Op7OqpLHj{{r>}9jsIglxvKwR zeBsFhrA@VV%exoIk8g@siP)wt<+SOYoXtP{k;&Iq!4=fDdj?3wc@YInu+Fxq<$7Bt zI>ZwXFDt*ucTGRBn&t=z2uH<@9Uph}#D+53fsa(mqv97duPqC`Y zfo1vjZSGYf>7;X~jsnxEkIhPNd~#1eEHe4-S0v2 zDZPj8YaP9(kp@w47LfS1(VHeUezvBuDz|no$7h{tgEpk{02_6F0#a!2JlFeYcUxp< zXvwIDW?5!4*5O<}+*(np6ILE4b|;vbn!WT9;@TCH5gZS*jyv7-r825K zE|b02ooQv76gjpmifVo{`q=w<={dWINo(OyuC*FFjIo~k%WpUWG{3hjj_;(+b1-oW zjng;TR>F;hKfQwgHf7;ntM&FyepNgEi!nw5M|D~dp(A?lVK}ym&j`~*UySw$Pq9El z~n$wF9$vZy%wp8urvOBK9uTHn5{?DFvF=Ub)$T$hr-!rt1sEgGIHL& zYCn(=Vw;M(x=j~=f|5dP7bRnK6+PdW*4}bG59eKKeb~rGgD{a{g26a;e$c8nYyEF9 zmIrW{mzi)pbHmPA;n@Gwrq!aXlPysEWex6?i?f?UKRzm$v9VrAsW6@+PQ9|WiQ$00 z{#9gIEazM?=CP95A2}Bz^Zn05v7_3Olh@w24@3B%3cHCw46)-D+h|GLG1hwI`;>c< zUcA_hd$)J>u~(&W>W>~qmNW%gz|B(yGp7a%YR*UINxez${QB&9_^Nl{z1Rmn5?PXio06Yv-$4fpsPeHJb@e>(7V7B4rL&ZAwF@p@rm~|j8_fMW2_u29oQ>-`a z$VyH+{Nq(^3I3=ho#&g7KZIEx0iJWYr~dQ>8gDeYD?R76?tfE;`Uojo&oQY3L$L_u z-xi%(f6ODlA-tP6iMRp+Em}SsOGow$`NywWJBK5RM9m`=eA@>Jj@{Cy`bS^bcrNlp z{=OExxzj?h#oxY9t1mXwF~LvTCVYRTCM=NY-kZp(KGs+5~yZX^$hR6$stXHn#^s?&CPLTv^=GFx7nwP_X1qqfFdTA;CoD& z+n#GEgggiBZA!2cWm|5-u-~}p;!2m2j*+1qxdD7!+~sB5iJ^37X!UXV@c1=x*SA))9{k+JjBl}#aY?rFQWUJL%w<%L7;>^?!=6?cQRUJyOtzmXvybC zSsq@Mt))luy|L;uh4m6nO7lmar1*6#jf$wBJP{hVDixW3Uj&I>_o!*$r60nxE)NHK z5_0!s>fwoFQ;g6z^RazA!T#{X_At@>CmCt$?mh!vJ4HYyRxRJ!a3$#v3#(X$5-RCe z(>^w|qeGvzJs0VPM5pv}Zz)|)`ZF{nnyG{ew`_m3f-ZD&%02t8zM0iH$QHzehwUND zYeH>{OtfkOQ`L}cJp)}w-wn_s^Xn&leGTCi*DA`nSbC#nT%IF7D_k>KZ&sX{o0^`! zw!c$WmYG?7y>a*G(Vsv{^H3fMxcJ!K z`VaE!d&dnzC86_J4{Kb#%e%qEx5TOEM{jssZCTzSX13l)bb-z-RR8qS>uam{@_I+> zI`O|XdG6Fdij#WZMeD1^5tM!8)fb4n+|1g$N|T!3Me#2K>?kW^fQckP`z?ut%1e(z z(FSEELRu72{0V#!Y9FME63nr?GRMN#>TyA>% z$#oNbka85mM!C^X-A~WjCH2v4Yq1pZ8=T;$>gHuf#DuaT(K4U~KRC0k0Jtr?tz!Oh z-DFToa}GnEWwNND&?mW*txSkVu2^<4T5#Z?UA(_!Bni4!Ys*vAuh3R3+AV z7>PH?(rhHe3@P*U~AJUbU$g4idtPJ-i9Y29SyDg$eu97L{0 zLRqU;8z(o@&6I$GUaHk-VrB=B%_m}iBCfU}GK>R~n(&i#e&0Fhn{8UXda*r&HGFDUIk*zQ7E_ zj!aWMdi~f!PW0ln!b(x>j%Q*ZXXV=)B3w>qW5y<%qH6rV!IVIkZ_HPjZpORXytGWH zvf}%;Z=5G-wiA-s5M|b5%JJeSf2i$fr!KF^hQX#`T|Vk#OZ9#>PQB5!msZBua(yea z@1}Z;C$)G4E1cjZlieQ;KTkHaEy?~?d* zOeR%x&}m4m<3mv?b9;^mn3FJ$TKp~R1reg`l``v%1@&55{W+ID!=}Bt&~fytL7Pzs zIz>}PlG=H);TQj*?Bc4^zJap;o2>P&j~WLKMLfZ~2_nR3xvDbJWe8_ms|YUj$WE5nKAeaO6U8c z%sqnC@sE053*VFwS8FvJf%h2oHrDCHK4{ujR#l~=%ZT0`{90X5-=UPMGKlR-)hUE* zcU>*q1m)(3@0;xB=OMR##n8`b`bcrH<5e?xEX)rAyi1UOTE7h%egs_Qz}!=QI+_I* zxozdHs8HH5HK>yohht&Fg`LO58x-y9Vu$qX-n2!m>t zWex=@^XFDEroJl>j=b*^>@(y>i^>#D1p<_CTLk`Vx^eYu!?dYl^7XuzkgwLN_n6b| z={w8Iy^8E5p>b1~;C`rzw={8O72DceN5Ei*C9*gk+5k=rc<1 zddc2I6x&jSz#)kN^r2a%>{cHZf11x}mbWwTi~2uD$OXiXB)> zKO_HC)QI(_aeB(~fzCUwTdW5$-$0-Cnyi#F%XcSlmtNuKHrIg0ZvAuLqX9IfnT1wvT2~YvNgwA-g-32q=;}g z3+U|AwXdNJk&mdp3TgcpzaGrURR89=W8!Al`v`xZhc_Nv8Ru_yeP%-&cRT1T&|z56 zm=H7xOc76bxdOlERy+I3COx3T*Ji<_Uy+IZXk?a=7o~2)e{{fyq}Cj>mS0a3A0Z~a z=_fsWW}6at6ER5NC4yy)fLd$7K!1pd@T|p#IV8I2bA-*R`l6Bqs|s6(vZNVZOdMy31IPzhN*(893g)`yd65BI3gZDaD{DuDGAHpOZ7K zzNSadF1B8~iKuQ_R7&6p9%&6!E@bA;TJ&e<6kyX@@Qfq1FFFyfa2FDLp$2<~iswKU zNB6UY=9PND=(7V!6{l!(5t&v?0HLMD8LGJN!+V$Gm4;ldvvu{WK26{{LjS5)%w ze&F0aupdVCoX?CYrErl$+iq-*&Q#Mk(@O9Y!OU(U5uJmTlH-Z)F8C%vXZc8_UX`o-y7X>|SV<5(HY%T@h z2Tb+u>-C4?21)YX;kq0Muk#MLsPS%T>XlEOzDBJ4%x$7l zjY5w9X*^a+uoZXG*$(v$p@o$c(C z4?OSfeV7qb;sYq2TxftI7kV%Ol8)yW%2Pgy8pra5V(%74UZxrlG2?T>*gNsS9&U%W zn&^~-4c;(YcG58R>i_s^GIZ8difTZW9|2}|*@o{svp@{vELR+}9Oa9kiGN(hN7DDl zUh|lrcc&WI=8~ZPbf(l1s}|a_C-@*1AmJKLr}L7O__Y~OLmd~RKn>LZ-+khrV?BhC z;-?7cb#_6R+IDEz;XXW3s|Q{3>)jAaT^8MY(_76+EI9t>vqrqK2@nCH2N8{0Z1$nN zEK7ko6ZX146|0{C|Mh;-it~+dyYzTtQyxT*_BcK_!?Y(v*mP%K?;g!GyEn2i*}Gfm z4NBPEB`s^?!|o>e+N?Tv<|T*wi;zf0CEVSY(Nqns!o%MJ<|umWGS0fP z-4-_{vtJEPQ*~X10YgC>Lp%*AIC7CX)2Ue&aKZ1a;%Vg z!0|)7ea%KE5@ygC>umF~KkLZKR551r+ABnUq%zLG7;Dc$`Ju7poI~MR@t8d)h9!}CezJ4tyF_u8R{A*+L4GVaXDbDODkWhYkIsk0M zN_Y7cGHy#J!8_Yura$@pOnQbHa_~PpM}rZpV&-xHBJRdo_q6B(n%BaFHQT-F?-=!7 z)*pF!?{MYPNwprt>q|-}5IHqz-M0xYeQ*87(1-g2`JJ%yvo!%Bo^s(7fkoMyFfxXybb{ zc6Y+r>kc5vwR3fl?yh8;;WF156YA7{4)D?{-fcX+Iq;2X4D?{Hoo2=6NY+G={K@ zypta8zR_e$oli37k5Z+m#R|M~Pr)X>GbQUB$&NS3kd3)ezS<*iB}UXqS`WNkmCG3H zj8jG6m2}}F3z@AIg#7hFP-@)7^qc&pF9POJeACwIpL*ZdMcm<|*e_hxAZ@;;aqKZ( znW)gY30?_p6+6sdcjrk>Es&}xeQA!ZX<7Dh_JXth7-;=aGf|Loj9SxzVuRGQ=$Wcs zFFCtj7nzZFb5$r4c^p6=d(IhMazdbjM)o!?h&6Ni$`^g;M!3K3*6l3zqbue_Pi}6n1RLU>%nK|ZY$EINUc+UAt?Kx?D_+VXTg7o3D%`$}Q^yGyy$vfn+jOHqdz$M^k ze}Ah$7OBWEtqqnKy#0lyq_CNaf3-tQ`Ac4gOz|y0JWDp~^31Q&$^5Qjn-|0M7n{$% z7wP)~O7LCTXGLXR--iQ|fhTjX$i69^*^|E6@;=72IDCHQxqo_xhk0(uiR|qs6I_AoR{_F%)N%Q*jB#gz5B#;rEP(ABUqrJLiCSbM)<sN4V<+??F6DCit@)O!c~l--LZ}^#!If z*B816a5pITD+cQ;^uYU9R%-pZN9LG#qQ>~($6?Xlv|#v-oM>CTRqhF-eU#U)XC7+{q6(<#gRdOnXlEpRtG zoa?Q1lRW7868e5yY@^L$=EO{84&<`j{a^Wmof}(|K>LvE_1N-%ynNf*VgS2}*x8sD z$d1$pMJ6Rl2`yixJN1A&Y{s^&Ne685^dGZ8ReL=Ce^F&fGtLy4lo;Kq87QxppHyq(V{{EpDU*tHW>Vb6qzrl84c*8tD}8~F z*t)Oxy57RNw0@MZx7+4kc~yPhiwdac#qy9&9FTleqcsUnl1fb0#x$;H^K+AHieM}i zo=HbpldsNtRSj`CGtF9*{nXqkN$z_iGd9l1%Sn+w1EZMWG=e4btINxzQ_rPEuqqNU z;C%HR&dTyKNy1N2ffHP&*gdm8Em=bCpH|1~_iLgt^t`SV&2!`vwo{smw20$(73pPofd{q zE}dkaUAno#Hlj*dI(k-SBAUlR9-ZsKlDd`2gt67;O%~hn@vPqUOXs^k!dFW1QS2%0 zIs^-ym58#G_UC2?S(7Gn$B!4MMOGqQ*$bFqL&JCW*zN>S@6vAuX6Kt6RBuz{l&3K; zIA;KsxhX%DjumK{#?d+#q%_*3_8-%p%&+K66?k7YCrOx3^$LpY78%&zWfI1@{kru_ zv^Q4lQr`pRx86k%547jmHthXG%!J|b(!_bNY`OyBQv1rt%)5Nl`QayqH4fhEdkZON zJ83CwjQ0G(;GfXK(-+Ph2lZ0scuWnY$BOten_bR3fZo>=O2%QB=8d?iqn#q5pX6iM+<^QTT?TF0M`$ z<|X@R)o+3+DA@kcJ2dNeM$6^d`}~AUlWkX2=$brtHj25K#rf7I2;sJWRyf9{svDFO zjXP4jR}5i&9?#yTEH*{fzV|f)d14nYzMp1{M7ee5@Awwl`c;D4h8_Y<&^fLQL<;N{ zx|4pddOm<1?^M+(uHXE*oG&B`e+|qD+sEd33$FB*j2F8WrA{-!n1;sYQbh^KoJ{#> zOo<3&qSU1Mk~7WFi;TyUZjvnxD;GmJ=1AVf$aT!JBz_8Da~%e`wmU{Pr7`+&a>LZ2 z%JxaP@c}Ku)3#s(-X}xOhuJGutS$!Y>dzJ#t~KPBdYUettJnNV`97n}kP*BT6`IA? z&8-c5NXQX#ql zL;BuF+9utJ8){cV^l{q0@SeRu2IXxlN1iyJ0Oz{sQwJAd)Gqx7#cxzYY#Xl&GB_vK zJKff%v3Y*$J@}Q@SfY(_iKvbR`%EuA;?sK8d2}o#dp1j)P|GfFZ?j*hml7ffH(&r0 zN_BVvefdHD>~yxg)@WS0`RfnwOp~0xWb(WW*x`)4&qtur zu$DOje{Qt7y#6K%eR91!>SAbC^V|K0;v*Bco@AY6rSV!OvFyb^w4t(BXNJjsrg5-T?SDKH;FmQ}j$L#H3CO*s| z8o9xP$)Cg-w}*deeKVHDbEp;w5~!Z87M>vb25{}#u?w10!;CNv&7k=czL`9>7cQRB zV#T6=)OCuV6Z_lG!nDuT*NvWqt$mp~JdWe(TflaPD9fzPb}2Pcdg^>7j*5R? zat=p`LWb=#3r#2L#JAOunfQ;kf+za(kwGFCs6#|!4EQA)aENQPi0&L}r=N5J)J`~G zc8k5jzRK8Fs-am8)(V3o#*j!uI%Pcl;!2m}cSpl_eIDM?n0t}&k$|PY9cJMvvM0mF zv1bwqM-w~<%GO!ctp|;KI`^ymd!5zcnikFyS@O{k>tzc-1&R+{ExKj8fK( z9;Pg^NG(qzgUE8C1fLMW@z`flYlcv;m<;^Nl*UepTKO`$JWFzJ4cVf;+k{u7}T% z$PKX_48ct7&jhU?E?*(H7k+Kv_i12yT$sO&VY4jO=->VM=;+?U4n}6Pr!K~?g>Xwi z(1c#>(GK<3w?R^b6oPFLH(M9+NZx0y*Gid8Mi1T~Tq3g%KN~1~vX$zAaXpmrsBIlc zCDRQ-Xb{*FAdzGUwQWO}S6tgK@|4<+5K;XiAxqF?b=Bn}Kh6}}++WqcMqaN(r>X$a zu5KM;`)8eOs#<&tuQRMT#MfDb(hv~(c=)#MnN9v^88R*3popCTmjAW{2Xy6MwPDc( zgqm0u*tS2nLjfc=D26>m@aEBNclG5uFJAYe8cFM&^wvM7EPJRBGpEZEVzb;WOqY~F z>&F`n%0N=HzGW$fKVDdIH;R2M=3roZy&1z%P}5U5J7hKT8L-5Qnl3$aFLflg?#mrQ z#8ZyrPi!^vTGX*0Sz$~5g^;;9i?cRC?(!`G63bs`ltE2I(>Fo!pEr={;2p)H3Uumk zQ;jX}={AgD`{PAJx3-G=9z58dy0;}sRSd9*EURmGg$bG~#ZmV&eZ_eqaQ^XnjdRU| zZR(x$i1xfeu1BF+6l%8gIws26z-96?JyE(~ej^9UtrG4_YQ!lKJXDeLeotTIHJ+z04_LBZ+2(%RprgbzOa|OA z_A{QmH8i1ng=~TH$y7g@>6c7kh83M?gC<2zwi?#;?Pfzzb==F7NdR^7s)O;(4uChrY z`s6dSmQ;{>V*%mQ2N$>N9wVCcWi~Hge()COCp0F;S>w62;8F1gJ6hz$KOcArnuFJl z)eb6~-ruD}lr^hoen@lXA8Q<2-gb5?>qOCS**$@3?0C5pp{DM@Qzx%nB_}_j4$;HI zU|T)pD$6~<%S&_86*=c$+~B||ebh^TA1}y3t&(#F7DhAU=<-BM z$4z$j7CTIG#WW+}dNOd!wT8kUJ{)YUu#L6XZfv{oiGjPXoZC||6BQO0v(6?a;4;tSYzCdotmAkQZ?FDh5 z!EPMfW8^OW^DU-c6$mevLb9%BVDRwu2dirnyWrgr^pkem`xnS^NIUzWo!kCIbV!gO zzAk?#6=V9XUNtUiTQh}5jKC#Ge*XAa5;k*PjIi|+dgm&+7@6<6Xt=khOmX_y0Sh$nCKJW29b61ZJ#2<=rIun9cs7jra4C5dV8MB5?=|pgl>wqJW!OZ{2Ib>lt z`Y_*6nhG}v@9!6Ls5t(+&lg#?5f$I24`;LAXga5k`>%(oZTK7w!rxCFd<}T1WUpap zLyd5y1MjW055d}q_O`E$Y{gq8?r!zXfUJQp`_SmNXnj1kq7ygoJd}+9BfbD|bNd_3dm$LX9 zdPf&HsS$kXABRh3DKT_Q(%#PNBCkhB&iSbriv7RDs-)kjOSz^&LhEe)u;{=+RgeD? zQ!;JR_9!kG$;01d%itD(Q$7FiS3`j<$zeV~bp-n~OPZr1f8Gcu{(B~SkXFU_AE|C3 z?=nDU2>#$U6!O0O^kp4&C0`FTb)9e}1S6AmX=vLcvw659dfnb{+sik6V;ahRc*-i* zKlt7fZ&#oM^@`M#F*Cn&$C?#*1Fu!jn4#i*M5FxZ9C_LM=MClX`b1nSfWX&$ab?H1 zs?`WM4V%pSmq4L$<`e7nmo-1d%of2V!0UhIPga%Wf4H4h3p-u=u5I9iXPw@pPxegj zIZ^$u5&KJgfXZN)g$o3!2)CH11BwN>xF*41;q!m3HStzSo}SXa8gyW|XP-(tMD;sLXJ0bT_s{;>W)%1;75 z0UWdaarK`#zIkK10k0=&$1(-&?`7_F{9RC&&p#y-5`Uiz>wQ1&X+BR$c zY;$+QWox_hWP5Pn;D3FU=sG**!qHsHwK_hw;b(^z^_@8YCNKZ>E}EC01MlTJZPzLg z&AtRs-BLOA5HYH56ZfV1b=W>o#2M+%+NkjsLWp3SgYtmyFoYleUBVzE%`@Yh>*U^F zG}v92IgOh6k2m12+zA*Cc+D&Kp1!7nC0)d7Z9e4MolSG=ZJMuie@XHaEQ!vw*_tLS zKGqmN7i^e-yswxC%~zVgvyZLJnsfUG-irf^M78W>H~C6V1+Q9X3Q zcXTuS$kFTib_yZ3565d25I0=IEwBy?DP&=J#Ix={(AS`E`?+jnn33YyDD@q!zUVq@ zmNoN{CJe_X^5(kdPJqQN;Q2ZZ2FU6DBX@h_YQ1cpDmEnR7SSBJtkT9Bd=`DOOoKqT z8Y{9ysqSb++3t-=)85p>|6THX<#fpfw6gE{TL%D%fy%?Xx15jE=M*=d#X~gJ zn+{1?hi72DyMju!^0m?;C3MY`$*${0{S?a~xPQD8x=#txS7eWRu%i{D zyk9h5Sz9#OaPMnoXBM%g_Jvl81mlc3kgX@C6Sye{i6muZavntCas(#jD`Sc88zu2S z&5D(;I96OsS}lLza57KVyRpS6e(do3_F)u|2?AQUlbFwy!2n~hkj|z{zKCcoI!;Ei z3Kzd6;&e9ZO2go=3)$4g$A?M#Y{YdLpi=)H8$`U*De?3SOw*#a5aH0VvDr{~{823k znSSY*D+PszeNVlHo;x;7SVUM9!t;aSv4bPl3)b(35h{r_dRV&>6(RxSiE*#0K z9uOGHD$>`WlD#9;dDE4=<3;cy5`i?WH^O%G&e3`Dg{9#j?jmmW^&zY?B1wxkl`?Sr z9J?%H+4w_%a>j-yh`@aZieHd{GAwPl!^pb0Z`$l+M^i(KO+fYIZ|}bU-blj+PXqh< z)rs7GXFWPamG0+CgUlLBKSzC$xF+O&se^UR>I)`CI4Cx;Mep#44Gw%CaQCx>kU+q3 z1j-V*0J=BNbuM}O`Q@+4^>NoT=e`5ll9zU3@%BB5s%s5DP$RB@7Xw&+X90C68?*LI z7LqaUR&Y(=mS~dpi>ee%N)2`HM|%l|zO?RQ`g^y7s_+lK)XVPnrvc3@s^@=74*ISK zv(M)EN<|B?krUYSZj|ZWOOFLh&%vdT3#qf_+j{r*hYJF}{#C1;{PhuF%okP61KkUa--PNfO9d7Mn)d%tmkY~ZnYGnX1@qQ4Ik z&2*L@-#k*RAX+Wse*y5-}^u`IH)uh2B)InG<$8!2GE#r4g# zPxri9OqqYhV(2-~k)tkbE~iwCqrPTO{F*Bo-kY4}lss65fM0!OZDW<@C49d5a!0(3 z;aLK9O#`Rmcu~@Cv54Vepk6?+`hUQ)^%MnDwVSVxa9885@ql_`Nle6SO357oah8AJ zj>=BG?h9Sym?U+)S29_2f4cI&sK+~@KyuR6*kioMD7_;gi+?Ylwa8D*Od#0_hVZ-w z0=3?k(|+{8>s98ZJ4D*X*f5*Puz$q7Kcz3G09|;u^@Pwx_3hBwe}bxT`fI{~-nzZj zJtbm1$xiGa9(~;ewC9d%X%V;Pg}r#PbZ31wgkJ>+B&W*JXw-nUDT%5Du_Y+*BkXuM zauE!6CN#>E!5Aljcrr`W+_9*8quvu(*ES5GH1`kc=?&l{0CxAywx?|>S$hsoIN!+X z%SULV1M>ZdMX9hI9qIBqjKNdKqdiE>RU%g*(mC}Hz_X8(RWJkvB4~Z=u3QQ(_*6u~z zeT$DR=~wpK9skKGJ;G-{^bjG%*0C=6IQE?^ynopld#~B|?rd7>@?iH}*GKP82Nw4S0~{ag(i%MaXJ^Lr zfC_JHywuBYUKa-8c;Ne-o9d5f4b=Y5%6<43o zgJ1pSGKGHShQ4#lgTAh`>qN;b={NM(fI;1L;>8}Czv3>ncfCE#z9Wpc-YVhNlJ?!+ z;4W_WL?2rxxUJgo=`R-E!EBF+KImxBpf?y0(yQ*<`^Wu@-%oq%HVwIL#=r zRtpTNZ~yfJ#I^X3u&aZ3QQOt48r8g)jwREubF(8-asS;pPqw2Du`Sn7MQ|$m_RYkF zKR0h22D-TA;DZk^!HG|#;LHCc7V8hUACfc5s+KnC=!D`;%7pO#(%_C9^j5_B$iXw) zh6e1(%_Sw?e?H}34a?U#i2T|5h??b-R{uCPG$-pC$&LB`Q*593kt2G!$Y7f9u=Gnbfqf!>$FxkMu}vrL=0#mp0A5u13-CoU=eJoz5Io&~GeO=Ti&l0>=D4i#4{a@s z?`B2G>)eS-t2Z@}H@3gWKYwc3wspJ;=g#c2P49%NT>HCQ`^z*An!%~aDD2*=gNESZsh1*j-v0!aWsioLa$Lz+tb=*PPRcw1)*%Kk{PksHF3)(x z8UCTGeF>nePFt_p*zGi4?0*q2IXK2~T&~BHPegKWq10MuKl?h-Q6K$vWJHE#L}@+n zZT?H;{LP81qkA@{FB9NBkY*Cz9cMzE+si&3`koEy+0 zJypR<&BGg9b#8N5`_YYK?!!VQ+RUH9=KuE5f1D&4=oEI{PO%a)+qbTdMq;_lTCMl_ zb#o+IhlDqMpS?$@&!ili26^Ok%eDn~{R3kWI=R zd$5e^v8}S($&0`tdF&T!bgT|k8vwoWHv%*RplZ_KVR$GKnUr)MhuBYAK-PBTUS}1= z4Hl;yKjTZU7RSDJTA3gJaD+yUj}Hw^O~r*L9t9VjJKYXhK6I8IB#t$%!_qL(jG$Op zkN%(N_GJJ24JffD4Q#0sSbfsxMDgf)eXq<}YFYKaPGus;V?#)v9#r+@YL}kjJiW%f z0IvTZ;=cQ@iRb$|NK+9I5$T|ypdv*&gr*2cSClSAkX}OX1O*Wh>C!<|r1xG!jM97W zBtd#js0k#%chS$==TCTk;k8MIDR=J9?%X-&+}Hnm@EP$DDD`*WHFG~S`^J%xj_hMS za=sAwy@UU>n+;@PNi?<9n;5OMe$PY)0t@?i?CR=Cj}t!1iphdk8reK3qjG_d$ZE zXQfiCQH-Epf9`rtieh|J2(1ASU(Di1DV_!yc2M2~j`DyfC8B3?~$#w{J9-c&jN4&O%V{0${bErTDt5!`7ux0KuTIS

x`g`8olmu*@%Kj&opDa-B7?A#ly7&$8Es0xf%Jh zGgDH53J2;f+e(h6jT4!EVa)&?W+JJTbq)H2UvA%oT?_u8yRGZQXhUrxyn++h`-zFy zTE-a(?)H}R?T#HRn#VOEtzf8yK0%83sa^zIqwdwj=y_9yXr7`l(47f`XhOz|v|Q5F z0C{KEme%dMc7V@l@O#cqV@3TNyF`f;9&wPfh8aK4N+1<9xllddCPr!2)40zs6rb!Z zp)y=M-!>s>%1RWp_1p_mIP3hx%xb@NZKLWIV#k(HAy3HAIbgN1GXH?PK6qym!PuG> z*ahvF@;kkX>AhOaG|x{f5F^XPa2N3P9Y`sB!n-De__Z;OR-0f1?bmu9G;g#XB-O+s zuPO$6WMLGdbvyR;po*mhJ1sCQt(n`__(*2mAZRO^G@N9W+|CdB2Yw zU7;EsHL@Qr>pm_lq)FaZQtl&sm{M8wWX`}@0XQ^~X5jx0y{<$C<_k8j8>{)i6!%!G z=!%2C>#WD`j~~;7Lpy$`?eVwQwR*d+o2fCG&7Q@-E8{zcn%8F+Y+xa_aLtZV+l_9Q&e^PvPlAKm*NP8JimhrF z1f&<$M``Uov^8Inpf`#t`Ns6|Z2El}45z&VQ)8o~HzZO&Ua$ryT`OO!@Oy6h(iI&Q4jdO~iwo|5G#FYEs67z#eZ1Jr8Y!-oSa{>= zu~4n9ETJ{~a{=G7{8>tCh7c5hC7_QB3C^4a96cbr{7hZG<332$y%7BOq!zJJ%Qu||)q(!c-e0XP)dQY2 zJHH1>`E7g^b!2stM#EOAa^W7Sz>A$=xJnhyLYXy88V~u%)bVpuCmIIWG z@-lcb?8%VvJg#X(IF(1sPje!$uCq~ zc6Y6P6yIJmcz8A8& zg!Pw@B8$pOzDV`>fu|ENk38Ho>#w$pAny0YIwkuKOYB!`aB>53*?lh$JMzYp%V6GS z`%lcTR6<>UdRt0V36ltoviRroeWqr`*~o z=E{QvJswcb)YWc6&m@!Y`qcL;i}%KQ)1;{*Zm95ly8ZPH)?0bGBq|Tf9(CcC8i)yd zdnFzwo^NP@dC+Vua}e3{gsXVy_Zw9p9H2LLpd6^+#QGY+L0&D=+vrOO-5p*Uxx?%I za)mD#6_Vy|xK+`Sx1dt^mT8UPIrH@R>$nW&ao!u^>3wGIr5%B7#avfzvBhz= z(Lv~Yz&nfhu9gonpgp(D&NeQC>5ejv{Bn3HIJbmX1)0+LM2LkPvn%`Dq|t6 z{Lz;2Gub%i@SjcPr(e&NX~z&sYM)2HvQDd4hb}Dxi1;@8`hGMjmTLF(8M!%QETPAK zIdV#-TYm@mzUNTTjKg#^HYb4iE(VzPTa5Lkyc1)K8v6=yG0`(gN#ma1Z{d%HV5GK@ z3zR?De1UELPPZZi^HR<&^AqusqlWSn(DBMZ)e0@3dmmKlI|F^ypo~B{Uq2_6p`E?P z2f+95&AC6x`~mVq-vQJovu4xT*zb{PGRM|rofw6E1(5kC*dP3TN&|nSjl6cfpaxW8 z33?IR?ii&FJ4IgTa6p|0|kxUw@Jdah#RrF>DKbxX@0B*;z$- zl}(GyGLH=D;sL1HMXzXg(tj}9NqyM8#E+K_>A!TA^vLEx&R){LGmW9?Do$2CLUF3JYlZquJ>P#Bxh-}VQ z8vwYw*xBY$^L*H;#&$A}Jans0<*bzKb24eP5rcod_Ij{}xH`ZOG^jK%M?;=?;-*PEFxx?O+6@2dSl$QCmF9%C==Qf!a46?1UFPRf_ z`tU(nI3*Q|-|O>fWZ>8xC(;3pvUXQFM!avZ6?7U1BbMt@B~l!A6H9!Qs>Yv|06)-7 z43ob$?c0I<=${wd6|t_s$t(j=G>457q_#*31@~ zm+D%x$fQHQJJ`ONPvhX+z~g~I2joH9Gii}kP;?;c#T(??Kl}NM<@ zWxXh9)g=Bkv=udV4-*;_oV+&AFsN431gDFKNjbkHG5^R9=J8{nUUHldFmHWS8lnHl ze{%PPCwW6wN0tV3Tc+&Qw{cf%HC)B%UDrH6wt&*d!=bn6VNqiK$ZnE>=^AQw*c+9c zJ@1)`fk9R@!=Rp?)d{tKK^njZ^Y&WsUALYrVLoOj)T0^U%3o19350lG zku~|3v666`i9YAuvA5~0H*Ww@>#hb}DMpfM=vJ8C;tNpfl!(=q$Q@pu9>>zDn(6f$ z@#f+U0=7>&&xIM9Ii&jTRu!1cL-pHY3GrO$nD!2y8(d#la$V0ewI*}h~_@po`1K*=%P^~MiTlW9Y8nu^0KfC5i!F12Q_zDweR)IH0H>G! zR^F3K*91TX_-7fwd5mI)G4--vaVc3*$%yjqThy2HB1;Mb3)C!bvGtQK3Z6`4d+)4VP^6oExQ zk5;^OyyS;-Lbmj~Yuvec>Rg1Z>}X|W0~uQ8NF=Y1=9ea&p}_gkj5SWPl^vdr1bb6Z z3b-1Yqu;Y%z@cm#l7jql_2)GkDdm^O#9C*_sod)xlj(pL#%R6Dwk1!~%XdNz&@n%~ z2FAN@hyn27$Ni+;!Zn44lkpb%Zz^m~w&}gF0<;P>URB1(oS%xl*Q8DF6P}${yOB5^ zvhy*uBqTHnRNFBGJ^L zua?@|MPe@jWZRCdQsqT~9odyZJt3y;2GPj}_aJuLx`$8T+p2b>H?s<`oP1K%O&}$M-T6=2y zQa>%yDevvuCPT<*mI6!E-gI$=Y2oPefZ0It8j_e;6&6p?yqtcwb}F2%xyimzm5buM zjk6uoT492kQtNnj1@(14q%T8~<(BT+u~^Z;tls);;)1{Bvf$R2jZ7-`hc0VW&~)C$ z5pUx$?OK6;^5=$H3o|r_v?`5L2KK#yVhsJQ6y1I#uU=K$)#nH{%YV+{9xdw>jMAJ+ zxEvCUx;xt6q?Xeqh7HSti7t;?`%K{B0`s;8$7C_uVEST*2r6iVH933-?JcB`wjY%b zKQF0$po4NUqLd#4FAex{$xkKAvyDy`LMzGPswWK}T|43^K$FQu@nQma-Dl^akw)+q z0kcfB_HHo`0wT4)FXt*RqZxv_NT^@X6;440(mz|XJ2){Fl{OtLmFyqQ2^zT>WYT7? zO8WX^x@J|Hx*nob^wjE|VLDp7Qh-qZ9X>D(z#j{6Yz>cZu0J|;x$5Aw%;@X-xlr>o z@2mp|pnc~0&v_jTj6@co>uY2|se-&zP_v+YO}}}MqVLVsArBCc5Ok7$=hb?O6PG%7 zcx58dnhHAmJ(yFS1@zsy!pdnr4cyfyqh36TTdwhTsGiDGUH{yTyu0F8tcPhtmZ8V* z?(?T_nS2|$2>hu6@YS&4>>)JoN~PHF~5 z12}BEt>5xIcm@Z=?zr>YuMHEIwJo{vZ$8Z|`;_ab))4s`x$uk6h2V%#!ZwN!Btc@bJvQeLb)aN=Iu2 z4|ohdL8#aKw@{;AGn}1YpTV3~=g7U;J+t|hg|OZ;$5bY?5VK3pz>M!6bf6i(Hx~t) zrLH%rP{Prn{6|@Q-r1~;(7}FcsOKF^UQ7%+=~?H98)Z-uV=zh?7yUrjk@m?)qN>sP zlP9Y1iHyX$DRb=ZsqUwf8PAANK>p%vj-7tWMyOEA<7mxl%d4u;&6%dR-`3U?jv8@a zGDj9iln31mqkv~)Q)#yKXJHz-IP#`-v7g=dq-w|QkuK?Pb-XDDOQ^Mk+GSYF5!z~j ztCa9PRw5vESb^KkA9X;gr+T+15*)n>sUiDKW-Xo}X1pP2l}5{xo0T@M0j=6(@~4|G zvHfcPFY}QuS*GLgMgn2RWaGHO`|j*k9eO!|vM|E*v@D*;LtGbDJ}M@H(AaN%t96YD zKh1BwN9)I3bXI>bvg7QDy3DBN*=(Qi`0z2p~xs5W{@#3Cz_TCIY; zcNc4zykB^*?>Js+L))%(2V&j7dA0}rY~=Ek_~~InwF_%(P)$7&+nIdL()lO@fm$q) z8J*6rI+oFkLvoJ&GOF2oSx?_QY{+(nc&!B`Rx4ymT_OW`eJSX4vv%h7D^2-4D?Z<(JmR$-py5Ob?c<7Ij&@cC2cA)4JltIulf29#v*0JXd}F>K?DD|5ZamY18tw!JP= zZ&lq?gSv(J*I)5<)WABZ1&yyE(gqfSIc;i49vfdll=7U)@{hsvB$w%i1PWldHg;Ty zy+PVKedC)-LP~?ocq4lGqu$=bSh3092jB86dYOp7bG0bhv>B` ze_t`a$b(Ve6>wq^JZ6Y~!u3_x=@?NudJTdrh!rb{O$m88z&&;)h?|WHQ2BbOv_@q| zIDIUDFHdApnBoB!cmodKHX6WXH_+|8Wm2Gc`lyX#atWGq`?ytr0y3BMjnTfV5^Un? z4#xY-4rVU(+G1Jbj&`mPmGy|y$K_@8fse+ucd|#Qpk(kSwOjaV79Hw_yI~DR9>p%d zf-qN(6?HfP<*>PegwHy&(>tL+e3ng>3~n*wXqI(lQs}{pFGkEYg(q0_vS|+-DtrFZ zpcC0I-S>mV;f2r0<$c}L=JEwg0WC8bRJEGMPQ-}P&x;Osb{-~Q3^voc#Y80L2P==B z@^N|kDd~N^7fS1Jlo|0^$`lYQQ;WqVkO0A3_nUM?VuxPj26|e;b^1Kgse8Ts+l-D$ zbu=#D8lM~R;Io_Id*~Z`mlVH+Zz5!HNDxQt(a-^e^HlH?nLOQQ)5&aKaFuo?f#~^N z3SYDLa6;AndkqCp?s%Jk8acjV{W~bd0J~1#BlZZ%esJ8EM7%K9tmxQA1~=M3(f#Sc z6=KUtZUQ-7y>v)Wq>=zD^5^{xY?xMzz=y#_uqFJ*Jp?9bf0tIxj0PS=gzK>qn>zhf z8tF!k)$T7&T*qfOw)A%?YQRsFJa2acJdV;E$g)SlW{tv^iFZTaoL+rL2LIBY1X$MN z85#UU-jQmKbM$xIduWCdrqo$OE14V5PvQBq0sk#N@J&iEs`T`Uk^P~mW~-p%+dEgg zsOe@Jk#l<0$;;qjw2f{nCaqviBQbm-I5YXK!Hc+{Vuzg#K%Ny0sAk}fbFYt8(3Ik- zoecnfjCs@@pcj}L=LKzZ-*WzC5?rYxs-;*`k^AIPHy{ndrh$^Q6VXWg)%+s>(Qi1P zD@?FrARd{*)pW4y`GW6PmT$^0la~%l(3*Y5`({K0BKkkLq2_Ofpkn0N#15YYGQfA9 zGr-Yi>In#TO(5EA3sJBbtUHT*h!_fiw=8t`O9cnS42dIZ^d)?R@d z2&JI)nKF*u24@CwN7bTO5oZCv&>7{IU&4*9<8NV9aNZlWX3a;c`;UhbhP(vg9%Ca6 z=a-4ilmx{?VB+v7@3VPJ2QYQt?iVsMZZ_3`azBlv+)mOwNC{2?rw)|}m5UW7cWW>b zkF*6&mXhqXLQob~U!=CtOS|)mV+8CiNP%x)*qPGzZjz`y88)Ds!BA{nD9^n{QY@yPr1=;jZ6^L^i zCEO(`sb;3tbni3*IK2{T|BmR7^xg4tLOyK@@%A`Lmkwfb*v|YchL_6RB-`$}aZ zZ^F31OZsISZ4Ing$&|BuGqgdn5CARRTsCRL6;Q@TY6P3%YxDU_1%dU;Dk2tF;!*6sebT#m1lBWcYQEL%zDCd@Bws z-HMGxFxu&JwV$sK^5i)J9;XlKQ6QnVrSFK+m5il{?i?c5KPV17s3g6KrMjnkcjrFWU_7`fdM3^)Uv#H*H<@Eu zh>*=jZ1Q?H{~Z2qyk^j(C`sKD?mr&n3>$?a4+68jZW#iZ}&IZf7)*>^BCa2LF$=I3NhDBuWoT-rWlsiqeT|=6Q z9|p>MxGsDa09T&La|a{X(~fOiJ%UxoD{v`V)oX!h?YASaAo5(Bqd?NE5VK3$`>>#{ z-1oj_U)F;RPc;eVt)&e0#5{~EaY_8-LNj2-Q4e^>!&4D{>m5c|kT)aIsa}1DR9g2` zPte5XGI7IbZ|mMRA#ClztD7m0YLSyAsH(kJkSV3cMnM*HDBC42HX-(OhI?lEe9SgG@psN>Y zd1#<8`828r&sXkMab;eJ_BDt(B@?6wl5UMHbti+jK0d!a(!$78p;@>mSSmDm=c9|b zDt7%)dQr>(nRXiD0e_6Jn?L_~(b;1?Lr;_V-C;=EhR%h$Wx z$_5*}JL*vFXJ?*Ut5<1ySKDpQuv$$$UH||ns1?V0V?Td{iH3n=FC`JK0veUOjFk_} z>Z_g?w#rmy`?>d2+HS>vB~1&?O!6EPIVo856Il|}A!f!RgUGkxn|-M2l_0TV*E4&3 z$)WYW# z;wk3U1cE)qa*FrWr2ETf*xjFf3Bbk}QNh6jy69vt0?Yp_H;hcwghahEywy^G_Fg=3 zlmHmll`hMsBsIZC@&?A@cE~wAKn$jv*=gjusYImN^{SBxo zTs0uxEZ@v8yNfTbm!ART0tOHdDB4BOz3v7VTNne|^Dj@ghbs`rz z8qYBI)tqs0McS1u z?8@*of-Tv_(fC$N3EKOCcUagJ`PY{+>?{28(ZefyXN{>@bU)DXG&1YaN$sPlxaf0*cee#yr?i>ih-Et;hPt5 zu49Bhg1G0;5@onxvpj5Why|O+a%z*l?(xko0c5YAXjJ~p4a_bvIAD2$6{DN%$8&%-Jn~J#sIp^zR^Wo`fGh%r z75w-^;{@Z`@iNHtwkT+%u^L)3p1C?Y-y}|(efjiH#8BAE1Pd-JarJ*48tMcp*ff$K zU+>*B1@t}0++pI)*Nqs7W5q!s?#?e?vc9&D_E>l9NS7G)FYsfc8|&Ynk{ot0!|r+) zRpF`oQqOAWnzgS(xM!$Uj9kZO4aGi=Fu4r+nQK zzGXY)gUx6g0xR1zJP2fOYq;GtQ0yfOi=txXGyQ7GYU9U$30eb3)R#dj)j-mc!!D7= zUNw6!=6#%TT8xx>Kp&Zi8_D9U@8cvYkfx0-@EColw#!P}U|U616o9Bsn*@k|CFAwbH(zA6N7pctSqL1GnJw?)pIb+= z@?iJ;rz^AbTal6NENsR`pt}aFM7^Sq3Sn0P_G-32g<8mgo%d|pB@auh<8~`MY^5;i zWQrFMZKr*32C`Q?;ebkqnNTUlB)lBAallsIC!1vQBC%Z>YnTUnc?jaeuf7`GW@o)- zZGspB!V%Xayf)-&gx1duL;|t(r?OE!8>?nmDlhT!Jb^O%bq;YFhY-}E>~zh>0SYy- z%P9_G2SV)YnrBB3`6uM^_b*uY-rwrh_i}zYUoH+fcG|@7k7oPrU<%L?HX1XqdzH!o zZUVWofvg9#ffjyLfG%4>u)8>MDIM9UqUKq?2u4xRKu5}jTV*YHaxHf4)r7;+_lci= z=oO{yM3hP3$!`g%>s3x2pHRG(J7_`!JQqpfa?7r`Hn9j(y5l~Yv}=bd25iJ&P8dT3 zl8BQ4G|EUoZEwFG9%iuD9+le8)K&0aDmFx}*#ggcmIw4Tk3`aTv2>5{lh71^A;=(lD1`YHsT|o=pADo1DkJ2070b2eFo## z0hHRrXTZkmO_sdvbW|W{q2);0<*N@blEI0mEDNXXY#@3-2`MEL{sS{r1nHNb5h?8^ z0-|1tj1P-wp`pisrW?4%yq5w*>LiE#HkiNm=KD`UTNZCF*Jd+eK-xnigu8&CRL7Eve{SF~2 zPkKddc#{U6Iy>BeT852+>j9qLOGwqVxxT>psEFH}OkRVfq(-C1LL}CQ z=iuiM{Rz{xBoBJ!G7H5($$f$!XH%JCl(D46Un%49D#i=^*U8_A_czvRzDFg3UT%qd zZZkG5dv_!4E)!*zf#df>LKJ9Wz^g9~l_k?-x?Ccc_={zU?L4&E+*DLh>P|=CVJeIw z2Ao^327Rz4(qwjPx+Tk3bS(vSz&dIP_IAucn&kfkRm zjN*qf*WoP7Bs1NHsyi&(?`HUw(%Z`7vL6lw-OR`krJ0YR$|d#&w{RpvagU)C*10Yl z$~j|nEo?(yeJAZ?nHpEA(^R?f)OA9V;1n4zkk|C30&85YF0GYXwDWE%VWA+!ZrHp~ zS)XtC5tg;edJPg`(B`KE^q(V^s1<3n;(f(&e#cyU|A~$Z)N(;I*>-s`y@rGg&Saq* zVz_>;g;nI)IPbZ#nS(%5?oYPsLe+v+m!9g>)fV{Ojt9$IoH*PLHcR)U=l=A*P=bP* z1$6V9kJs^&{UTFoGkL;R%IBfwkG6Y~c@83}M7lqLu3ylS zZ43hR%bxIXX|2}*3W>MxT%r0>qW7HE>dM^u4bK=!GPsWgF4Lb+eOJjqoyLb!K0%$poH+bXZE-Koe0J@Bi zBEnAESvM%Tvx_YO7VDEa!RRfF%TID9>rZne=47t;$z%26>033_%r;!&G_C6&$M!=P zUDB&O0U|1^mg9^3!xN=;{kA50(%0K9%|6ooU$(+aXyy;t(<3Js0XToNpcvs@lQW+Z zmi0zJSeEOc;62>aF^U-4v?Q+Q8Fmdr5ET#4WZ$-#OFu2%mngx@X4>6wbmyVmp%@pB zz}?Gs882HL~ez* z)%$jBY%{Rf=IbyI1AvH{RIyOJ8}ej}H4H8B`T5?kEo|jXI1Y5kGdkuBh@ZDrGaT$7 z;Z?;m%K$Z@;qARUA9c1^qtOx<@>jg83zYVZo9k5SX%&#F{y8g`j}Nr6=O(Hfw6A@h z2WSzYM=P})eb8C(6M42{IwDU+`Lz>MyRwo8CHA`q6bezYO`|+Vo#mKjIrsjH9fIKn zSpfGyL(*2Alnj2Mm`xRKY@dZ{TVsGgKn51i?023pA9I!WH{?V?cPzLQ&PWLQMSiK5 z5D<$Yu3&!&LKja!(s+rFKj{yy*bYzsZ{J61Jnuw5s6DoN9y3MdKd9a5FxUotb;+pc zmoNze;mQba@vdPz2K1>i znGg_`!HI-GPya=Qz&*0p8>(q}l7u5GL$~Sa`p6coG)A0~(ir<}W9OdOogFtHOw45I zoy)|WN;eQRECp<8ai~i+gFJ5`sM{G)a;1U_! zPALo3Yp;B`PW=60g7Sy;;EMOD=<)IM120H;V3rc^3lU#UnW(FTs~W;-QD_O+8Cjw3 zv!AcO)X~;E;2+dttD&&#HH6!(f;F|rXt{Do)5=>6%wHv6T)yN{#C46GzSn4ph?U>L z{`!n|&2tk5m{a=}IZ)!1bmZG|LT%&fJ>0r>U;*y(h!N`HcT<*W?PaSq{5K4h$-vNk z(qNQhA_3{~J>mH1nOR#mKq(gSZyX=l^0MRMbJSVtBboZZ%Rtjdts1+_CHe` zEnyfd98yFdQBQ8}m`H5hueCzyVmK`^C zj;4O3rTMF@;$lFr?|&l{;W^Ts`x8$9)OWfGpjK6AV(Dh#8-#6LN9tm9U2D>7DuU>| zVb7CBjR8xRo?PQT)$BWQ%ut5m#3^0J=Vz%wkDNZYSiwz=8fSbZeTmc4QDBXw zp_9Hx&R4t6K`g=woAQh?0al)#GXykW?stuIJw1P+`pFlECnvZ16t9@K9cZ=bowU`% z(#yrjezgc1XLb!54H;)oMgg2pfWm>(>|^-a@5i_9V96_Y6yz{wy>a{JD-PKiW=1^4 zIrJ*cwjf}v+{$eDbY(`^y*2gro$a+xL4A)%UX97Aetd{kZ#_KvxC%L_@m^3OQc%!0 zNTD?qFLGpTL>V)P&v*(xjX4n>?Ow^X8IM1C&5^#D{$yEw7C@q}&9wwgmCy+4MH`|& zUkNtzOVrmJZxfiOGApOm$MIrFlm(pFcq@38Y$N+>7aTWVXtCW;9*Nh|zRhd389QAA z{;uKg@8z{7dK}kTSzTW7yK0jrAH=}{B#J00jlmBAl;_#Qu7=7dd?)0`Ygp_L!))yY zT0YChkeo|F1@NzIK2?EIRzFzjPaZQDNWvsZ{3JYFn=2}I;D@La496)%Pw%!rK(|cF zN2X;YB($FrPJhiaun6CnWP`FmA#lSa9q!;VW9{ zL1v#L(=7uO+HYSclrugAILzm}FB2=q{ku2;IY7}08IQ{0Um+K)WfS$s^abA)-;v}Z z^uC&iy}T7%wpr}T_fx!T~54#w~9uG4FI_rJj$gPN7~=H z?@>((UQpCQZ7z;DWg7FAFT;Vf3~v})dgzE1Dp*DHV{@Ur-hhG1`2blU7sGp)HgBz` zG?1S`a2d$YfO+{s9lJ@>%K^|?azO||fN)>eN7?VA^_^3~i*uW5?30!S#Ks&do&udI z&Ol(jy}lhylaI`k-v?rn8+1f;mHS>;W#P^Kn^R0OvPagL6(}ZJ${ZiTTKb}HYK}Yp zF!f2=n!m++q|^;LPYe96KC$=REP#(B0m>Q6$K8~r0tTBBGfM|fLHU@Mzs+7jfE)_9 zj46LPlfi*82;(XN8vIk)i?=@&r_m1FM4e~#E)Y1yG36f6)tFtL96W;#Oao*Cxyu{m()wR9e8NSLI8_Us$o zVIc_5SXmv%tVlc(J+P|l?i#x@NPGQkc9N<&n*f=<($K$o2@<)}+S&f*C18e>RWMJ= zETvXM%jgh-fC)bW_^O`$DH}uQ)jFfzW`vieE^=;B=uq5Z*HPI>T8n`#*Od#%MUWQ2 zdjAXfPY;+0Qs;plOj%j#hVk!@Ev|UKJZ^^a{>?X#2;>`JR;fpb|Fzu#Ac6tJ@UOhG zJ64Nl@N(O0oYFt8x?R1m0DOvFsqNO0p9n=KNd=qLv}j=F+ffA^RKo7iJk>MYe*f=m zXj*P+i(DAnvB4T;7zbE4OfFUh?m5nbw5ddz@27d;d!VP_I5n zJNCcIFlU7zC&jDPb%Ja7wytlP&-I620yM}hkq^+aOMk3?Ps!SkQ~tzg-zTPImzx~` z;SY)KMVc;l>^Yj+OLene%>8PI2?cafQULc_EwsfF)PSM|E|_Lk6K~DfZ>t4s<*$Cn z0>b`Cy}!c#`6fzN9<%+eJdlwZipuAp)v2QUJW!KkS>Mkn?#pH`)Ail)e+vFGSC!z# z{1I^qaetf0g8&MMNEs>zyQxG11w|@u6zbq@u)0^`9xf3OS8aD`Gu>0Ca0*ScTadcO@a%T;tNV;I!RrNz4`b*wR{mb{10Hq zb+&+B0S|Y-fHW7Snf^}_e_mD?_KL?yM&q8rIm^E*NLMY2EWL^MWV?9H^@h}gt;Qij zOwH52-9+M}t>LpvaaCxC$X}EA_h5YG!e?ysz>w#CXDa&E)w{~E#2A~V>(;<6ARBBZ zx5$N4oz_mP*SLQdK;&O`mJiGU6!U-kJm>-Z%AXAWqDyrv9@sfkF)03lZO=1l9 zzv1_7EPuZszBaL9>Cd@Xalxw}@(lU~7{A-U@dp^*m%c$=>+JG*w#V+_MRA@U{m-GZ z|9*aP;pqB~YfFk_?@U63jj!zQjh9<`0zdu!wNS#Af+W8`^CllPYsXZ`sR0lx#XpD|~TB8MzV=I-#nQL?77ay1XfGF)N=+IRi zY!z~JJO?6YKnmf%a`>hN9Oq*${TsHt_(u)pc(Ax@3d*oK{^`I}x3K&I=|8J+hFSN8 zYV~Dy0?DzFehjGtX_eUsU5wgki-*yyReu-&e>5LBaE z4j{R4<(~nX3=VkxB2&+2Y@dnWXTiywje#I5o@2NEJzdS*e-G}R-vd?l6ZfKe3+%Th zo={UI9xm1uT=2&RCj+4S>2fJ!)XcJ2r7Lj?f+8lpII37Hj-C7 zndn_ZBtPwjn}&(eG1j;LYvN}lI}iX3s?a~RT-D-FS@BF?#2ZK(g#L5Z5Z!nqj-~3Y zz{)cvl(s}oep>$%xl!Or-V^?>5DEOeSC$_evy|vJ_%Fd=!-_4ZupGOvFF?XKkm{oc z&rfI;&i$JRQpR^q4l=n`^w&f`8vp|L#bQ(}u@3)DLFQrNE>sx;7b6Yca_Z|}*~AT0 ze2GZ^{oi4+U}!$$qM(jM1c;H{|4Rf!*`}!Pdth89Gz$C zLUpx9_0dG5P_Dp~{*d`^66NRFSt?+van7O`>&bE#|M+Kx4Tdq#{`F+_-aWkCf3o%U zKmM~>N*^D7)<74~1&;cwSr3G0V~b7Hph6t~jnSr#0p?()2c3o@`fH|=vQgr0|12sp z^JXl&bdSw`W}?x_Znay`!>BTp+U@;+#uQ0|hoPE*!2g!PJdD^ROBB$h<#tcoSfm`@ z{?Bi$8_NK3diSi&l2P6>>G`HMqyqr5w{H8dkIcQOu89&rioT&{S})UZVrYG`(s|l` zf%;z$s{#d&4GgeUYEXxQu>0uPBq)a#q0XXmEKjd6q8gx>ntPdcVt^x0P~|_{e_RU9 zqyo+mEp&$sodIEbt;0OGtY;TISaPi4{0=ttvGo6J*Qr;PvDNeA-DhwEJ}p&4V%G3Q z-Xq-zOOB_ddff%^Q6}1nL2~7P%bvzkwHI3f$GaiS34r}S@+oE?f3|6-<4nkIUbCoP z51injUuQ{BQ{AtX-fOf`VNCw;&s=}B>C+tyG3j5IbPqlRWG50zf5)+sf{SCLSJ#+{ z7b{1=Bu@WsIf`8t<5J*wnVEeWe2^nwT)a}URj4XaM-PvTr7f^!A=+YI5B~Fz5OzDJ zfQqWpx%^v=!<3l?&rlILb?@Sd3!O-dJJS2cz(e}jn>p(_OYKJ&p6GkB)nrotZr_w{ z`RaK<-ssch0CtZCkg(sp!!fu~h?7d^G6&mkI`R8a!!yX`*AfbKZN%%08K^mmQ~lMm zVN9wKy{)EsMFLW_Qmn1-r<0oeadNk|)}hS?>w(#878uL=;yT=70Tq1^m{t--+IA>6 zCRh+=Mfqn*SsE%wClr%zdR3-;MKAWOdmR-*)L#ZWToNTD{$y>sJ@Kg~I6uPSY9k_Y z_Tiv!S&;vL+LcufFL@Mp=nLWf!0!oPo5=%>#!1lKW2|If|NpC4x?(RGIndxW=1TYOTs|d}KrCC*Lmq`>vQhITd?{OAmXn|tS3_9(r&L@)A_<)G=F;J%TQ(1tPt}WW zZ8qC24<011hq^+efS0xcjzmBO09fRi9Spm3#x$I4B_;#Gp3e%?e!UBtqPC2*77VMN zNsP`8u1qamc-xR2SF6IT-Eziu0C@qwat&DT1e70My=ZYbIU6{`z>?9?_j?#9~a$Zk7^+_61!+OW@Z zAAOkIJ0qnZ)Le5{%)9E>Pm}vi{!hDJMlouGGU;X^i3>{Al(CV?mV$ zoLO9HcY*eTMoj{rD*8u{4!C)v&gVqJyNl_>vNjO)y9G>e=xTZQQq2W{}t7Qgm57r zwy$5!w7G5<;trF~=ZND!+xTtWNAy2OS_`xTGKCZMdbwl1Y4sag`^jw+`6Jd&4>md^ zn27256Ix;mrTu&=ETS{7m#^&|`Mq~QsQpj_GJ;*D|Ji%zV%(eceC=Y=tE|cE#eG4V z8bE)`%>b;d#v){?;;SE9!R-H15d6yL;Qls#x!C?yQu7a+I9{yxGFLE`HDXgAC@Uu(ffoieS8T#klW~cv^&LC+-2Ty*9%rN zWueq(mkm%E@*ZWqxb9Zz&n|%vk0i)g;FjOuC%i6(?=u^LFCs{IhCL-n?Uk1Nf}Y8; zL|i7e5mv)exJ=Vg(0eX=rTIYS1|UnCB5C2&pUe%78Aq;EJ9c&)yyKom$5f!gazrW1 zENzY{4dj|ghdLL2lv{6IWE%qXsdo=)kWBh`$*1^7j$Yo->Mx9X1V_s2pe~~X^f<1e8fy19=R>8gF9S4hc z6?$Ti$74tsL@{kED#Nh0boYu0aQt3y%^^c5C%oF%nPuaBV1mQY1vY2jfqu7JuIIwQ zjnkg-W`onG^>;a#XBs(7tz3Ri@IRs{kxTC&Z2+66XC|-e0-B#~l$j#hiim@ zdpwq3PgEG7`?jW*CX35f?vvxm{_;*|rw+&UvaB6NOd8)VOYfKf?DC4~h$%IFH#&&+jz@5)*d?s9PFNPY#?Yr##hik)JE&%ge_TK2; z`R$hrVX*7lxWhg?OsE7O@VyJpUYX1tjOQF@bo69&Gt9MQFeO*iM2qscTpk^1>Eq~^ zmUx#Gj(97F5gS1MhlSh+ES+m#7HbkD4Y&XRM$REeaj2A4ynIjD&u#>ck<0o9)|}zR z7K2u>UXTzpdv#*&QQ~$sWk&5ZdB|P)JStAEYXxX8{pePjK8OA$_V-SXC8P9ao# zS5Lhn z9=bt@;q`IWlI5BRqvYy}tV_4XzaQ1oe&4w&i9UactnMC9;)S{22aM#b3G#@=49Yae zlzO3&<8a+L*j`7uIMahLkkbf6=>YQI<-4Mu&6UDi(;|i+t89G42p8<|Z)gYeBnDCx zgK!$4+-Zd1Zpppj<(^q=kYL(UE0-muASpt$NLqzuLph{MkSTx zjAZNwJ$`<}zKG~Q6buB%AlU#w6x3gJcWE}QlQ?C{J?-5y-8LSPG~!|`Xv6j;X7qBA z;jU}`hgoc`_R~FT)MDZDHOe@4x^WF#@}jR*lp2g$_^1yZm^-!2h47l$S{}OnHs`|@ zFNbi#S>0GRSpdFKq9I3zcn|ZEP$^*jPk;$K-hw_FeC6STn{`jiwVsrt+c4 zXAhPZ>m9ov7~{oH#8 z%Ge;VRlT;Z>I34lkxWk(V4Ki5xy%nDE*YpLyA^e^=Qh7Sfk;S>fN}{w8%!TqO{sfn zKkorwPqRPsSC8Ov?6->l-S!-KpTxI|+b+%U$8Fo(M-D|M(u;hEEixcDF%2$vV7upP ztS*z8xoBF7KI#Us(Yf z5M((M@0*epK03qF4YMX#ASmOra5RZW{|;KYTyx{-)R< z+rIqHe#$+g?;r!|*haq)vJS^7pWK_e8EEp_-3yskQIeOIiTrSdz@v@B-DNmsU>Cka zSysb;$D}Kr9Jj{;>eEChFlq+pVbd=mqDLn?L;Abm3m$CDnfGm?}v0 z`ow2(t_dbP6>1j>ddvM&S;!Q02cm-sLslgKSs%)##iu?;kXgi{Px1BgSX zfx|PV?13WcNVywp=iLcNz3XIEna4O&JOixxxw`B#30Yp@AROd0>NF?}@qUJ`yN*0R zhVXc$G}{b@U!}yT5ZDB|XH4&s_l87qNWJ?vdR_Puee+S`66U@k?w-s}+LZfSsaS|Z z>CAFu{wfY$BgtpE7L4#nr!z!f1_pP5nA^*5-nE?c?Jj9)Q>aEM2H3+0OwG_)eSW<~ zZ?0i~5w;)A50%BVuP)U1wJ16?1;g&>$juPU9UL`G2qZz(R)*lY@bRS;`k zEsqvX{@^|#AI81YJ{pGm67~X2K$1@HMZMnol-fD$J#tVXD85Xu|9mMnJXh@gekiws zY?}0=#@O%|IiTs55qvr(@aSd3xrqdQYepx!!`KZO8DOZXLGn#BnEKrp_Mo(3;kA9D zzHD4&4E#bd$x=~-1W?mSnVLn{sWFdm4{q&vi$o*uRfl?LQg#fVRKDAV?lAEdGNxKo z$Vzs70%l`s#=Fr$;@sf?$S%}Lp8`AP9~1QjCOkdNn`9@pe=-hD7y}^l60&Bs3lOAf z_(F-RB|*s8rqRsv>~b=RHLXqgPBddGH;}j@Lc5`EO0A~@WG9?FsgIoGE}SvVZ_4Y2 z&w5@ae;Z%74>qm=A<)U9-xYn2MxMu$*W=dkLoffgGM%+`Glb$=m9L1qpt&Obm1vr` zIrT->pv?;Ij4U%N!SEEd2B+|s2H{;7L}^c1>0w*f-18*XWUDV9NHW2O5;;540pz-M zp~yW}(lQQn7tgjbJ#kVKjP0H3_pKa` z^j$;5hL;ms5N~W>6OcWFnu-_sSyXaHdaW;9Ta3}Q)2HnS?G-rg2E-eH0slqZXU)J5 zheyw*{sZR#SJM^e7yC2W2;&xXU!J26+^;HT5WCXy3KS82xtjI;(BzXCg13qPpyw&2YEN z_Zbvd^hY)GxA#@YXQGzW)3R=29#%ux zmOicwuY^s=se$Gm^Epo@#N zzi-?s>rvKY*AMmumY92S9|IrtjC6Yj;Q;evt-7$DUz9kTwcR=x>q!_S6lTapiclq# zHKITfm?$={t!^@}7DMJ5zqGUW5P5=4Px>w@_!SIDE52&sL{|dQ^mF#;LmOrpeF+AO zUZ!;@WgFxA3F^FO)RM}j04%Z6SNx9VbqUaR2AG8=d@%#S(U})^bc^LoV@`@zObXX1 zpXC@YcXgtB57Kc4DipIlSI@1HLcZylSsF};Cg|objlNOh65jc{HKLC9sQcZYVPSfQ zJPZQsR|8;*wVZc_W?BG%;^`H~(EO*>X0Z%vIY4SZYu9-S8hPEpg9V?w?`E9i?b(TO z=t>HTbx(35tb@)yn6#`F&|MnI=^P}}q-S;sNk#Hl?rRIsoaI)v#ZB<1kBvv0}kD;z)5~fY0Tgao?3A%yf2hYkYLbV zJ;-@Z!0%O*?b5 zTSM}pn*iPGIQ2u>yxtEFRHGZxFU>A*!Ia`SC$XuQV*$Aywzb(47|x55^v^RA>~??I zL4Ru~+!-D3F~Y_xkF1Ng4*-)3k8X<7{!hD|tz;rT4$iD^U66fsCeI`Z?IjhS<;Nf0 zKkU^Q3|5pFnV^@jmM5E4fx;rIKEEz@vv8=}e>b#%47?b*rWS9l6f5IsQkgOBbojnX z=BAz$Tg2aw;0%5x5&$AxnoLVZmp^r%p!x1!UK`a_ut`(%{yFeBI!&;*4?#$M$uD@b zgxkWoHnfJrl20MWEW7Dz5a+QBw{~%M-M*#6wBMl-YX|t=ehNV0_%qNoI*{{H5GY2y zdy{ojr{oT}aLMajZ%solu0?d}B!xewXU}eX@dZU<|NrzU+izkM@Nt=s?zBw6deTjd+PfYI^by-;-MWfH;K5&;A4;DM?!fM5l z-Txc#!IR3Vo>u=tJPOM-#t1$Oo8*YUJa7)?JReffrmWpPUJu(b2kk2x!T&D2mx?3+ z4;?w)lH=Wm`_;gfJ1uEFaC~Fw%Eofc-n17$ijzmP6^-xmxeN87J%eZxa@sZ>=s?~c z%IfLwkAJKT=-+`RD`a7y7yhoS^y6Qvd2*7=a^JXgc#Jq!G=Dg{rI^#XCFz!Rlp>mK zyyPxnjv8!*N2(?Drq#MoL-9r`E&?PkZWdih(o~Ipv-M7|`0dyfxpJ*%1{^u=oGck7q4L@{f{Q>*>w zM;cZfy+%=LkbfN=t_YRSU+o`cE8pj1hpML!O&lx(rUZO2>jd4dCIaBcQ0c45OX?d8 zzxN(_Z2h_fkPqyGXSG0626-mH*6cF*cB+#kHZNH{c6!7?=S}1V7}iyIzfHY+Dlv2x zJCiVHB#LI&z^xOqBT;hQJ0B_Z3^C*PK!cb5PKQU<+Kr_EYy+zQ4~fDv77mmk3@?IW zaZSaTP}|X0K!&M(UU@ZD49I!~^GVMY5`SiXYifn;<4p+6T{#?_QaovMV zZeYvfY|)ysx{o&m#1B)ssIvkL+;Jwl>?aL;Q8GHji@>%g6wnOi+u1{Zl;V|`01?3K zxYqneWFNM8>%es|M>j=iTT`TShS(O!;d-@R@OIs`#x>Gc(Mu~crk*G_voCtK7N530T8Xw=@lDO3x~thWhVY^nl9d^k zvw?7WSo+_QyEOhh8qqn?eq^~Xrod$IY(YkjrnO!?ht5(`4=`|t6mbme3 zT5ovb9QZon!{c4nKFQ18844qy?}C5ZLD!z4mm~7Z(|nHQ-i~(VpcEgey0ua*-@B)% zhdr-nS@%CC`D)t|E@OtP4Oc+ft9fpnOHDzl2VS9n2ZF9WUp;(c&}?AMY*g!oo5e=7 z%iM2i{AlI#tDpn`-*i7-t6Pmo^$H{fBy4Ye>}6mWI(nIv)C53U@2Ei-lb0AjtxmJ> zU;ueIus!|Vd#WlI6>waGscv}HV+8#o2u`-~w2;2j(JHt*!Qg9;LQ427@@L)K7bX|xd0X6U?ESQSO`X+jR|LTLq$Hr$Sf30 zKPrp$B-8>BnQXO%dKUN(j*hQqPs0~v{;e=CS)5nqAWQB*Mqq>H4qHvRV@ZQlU=Jq# zh{LF8BtOl&;;Co{6!}<$m5j7Rnjk9{&C?3tw#NS55ZM`Zu*LM*ab4 z08d7-_lifX-HgA&Fj+G&?}wen-05g({x-1WUDTI zaYtmdx9@G|zW=+VUZw-G*Vd#5*fy+E3A60#FTS$~h=XL3PT8~nz4k7z4-@w}99RO0 z1}p1Qokjb7EUTCD+Gn=t`%)h5FaW0&F4CO_#JVCtrhfG=*$oOfezDv6MTaRYD8+zp z{atNT* ze{h5{{~hVyy+VHMz_sEPmgIQ(lgxy;O-6o`1o`L~)L{qUv*`a~8tWkd#3fY;Di|+D znfvW+ABz}nofMJ_CVww+~jzItfbMYNlGm9PhGU**I zDP`3T+GR7cTRSVG}qP-*}RQ((E(!wWyMn{P$eV zZ|@dKD;aaZ%IrajXUi_&?D(Y|Lzeyo_%c44%|nn2AcVhtY6OVjvBp5w*=Wr$+n7&E zaw>o_Zy^UEa@fXhUxzPOo$L!Ua)|sbwTYk&Gw<>J-h!AN_To&rD%4vnTjs;bmOW2Y z=nbJQ8-?d_e6soXDbFU5JvZDndh>^AFyHPbt$MKVZ^3F6JzO1I{&I1O0hYO7|MWaiST;@dN(GP$ zwPe;4yQ%u`F(=*Q8(+oFW^SqcW9JC?ulByDWB~ezorFV00KXIW@3~F{NdeG!blqYF zVpRG8<{pllkso3wG)BPH)-%iJ;t$6Z#AXydK%0i4@bAW{ZUQw&Ecp0KA_ z+qVWfncU~{(l1E*Z=c^Ef#e90_*ZBP-UHnia3p!|am!0f5F_B17`eAO9_tnzE#s}T zCr9bnUge4X_gO!oke8DID-2igX`$tRzz=_DEmK*?&Wk}W6)OOT#aaD>`k(t<*8e)R z2vtaYLZ1>2>O=b5hb;F;K6)nBr)^IcGiRS6|{%`J1hOx(@DXRFIH8-6m zz(EB6_ouuyx7#3cPb5Deeu!Q+AwvHi$6vgc@>qIkzufzJ-CqMTsKU1N@Z_e;#Z@&M zYl~t0479Wk_?V>rmHS9+%t>9`SsG@xtFd! zDY7enA0j=0*g*GeN?_R8?8>4Gmk+$El`5(h{=OGN`XYA%-vWS{zXY7%R9bHMJMi?e z=@;JP^DyrO^1pVDmx8X>6#L!IBlPA-y0j+KSxeLyDMlKRmirfBt*kHGg-Gb9il3d) zrKtZYR($xx`D^Pd*s`fzYTpDt8W90N>v(8Vm;eHfroYXyGBcz=_z+k|jzmuv!IVV| zpjv*t*-&0N66$g*3H7Vj!ySZjDbBq|P?;;ZM;f!-0YZMN%}loJixt@U-%T{I^r<>& zpN*ftm_m;M2OaQ}on!5NF+hmCPTXma1*IziA-3vy3rh zK2JgjSlrTIYgj-#ax8d+&wu-xG>>-tf<}4&F>dTl?`8mUTYh!&!$BwO)g#N29f)3p z;z=;or!SZkaF4nF$wzAyXzoSt-A+>{$nW1N%TsyL4^DqaVvM6~>&y#HIFH(g+j?94 zU#xDkQ-k<-6a$X~oM+Z|mktL^F-VRFdzbF}Bo0%Hw-d_&$(`hRR)w8G>dOK8zy&gO z_0C|TB3Up_%WH*Sn;YbGgi60R5sZt)occ++EX69o0q1N%aazk8R_x(rt^wC~d7wij zkE;u89#ylH_>+eG9^?;0Eh?ZE0vW~Tcq?S7?3C>lCK$yUMte;@_LUpK|FE?zZ#}@x zn!GQQ1Dar8xObR_lf>A*k_S*eyVAw%LQfpq9-+#E!4v0TX_{MRf^jENd;kvFAWwGe z?mesN*G`1v69=vg(F<@RN&)jm0Fc+m;yh#TzIF94=ZA)k6n*^V9zN&Iw+gAnndPSP z^mELMagF7s=Ot&p*^tJ@n;Q4nl3gq+Ahtkj);2$JL@xK9?y&kG6wdKc;84nJ`H>P_ z$;O)0Yk!Th5rpDt0kj#n7gNwC6xG#f5@)$Bt2wjX&2t2l* z%og44vh7!NmVu4`#j8n%*1I6E zvxZFNqVBT}*W1@dEC^T3s^RP}-0lPfpq_mK7_>S?b<)2>Js3U!*XFxQ+_K7Vz6tw` zT0+y!E|abIuOCkIf|EriJCytXkZj)%<&gi%=C3hqLdt| zx|UGE;ut;^_H|_vUXC2fM`?lwYKP9fWZ0RSTp1QPV*0{m*;o6}R+$IWobO?n3vF)> z8f}FrcO+AuC3BGgITQa&Z+|8MqFR$XIPdpv7jTJ6ro$xYgZFak2Wv#FDSZhiiQ5$s zfIwA!>n%&_L-)9l{lGKPWY>tvGorHZ7ATQx(YHfB=B7tudy`xWeX4te`zxlH)-#`& z{_tul)U#aALAgp=CYdsPw@k>~WY@#~T%XKa(YsmOEO+?u;%TEF&#$hNkjnt7iCFcA6T{qci+?_0b!pXhZ@G{DWflQUHS$t*I1UAM!5d2A?p}`8|6% ze_I2_LMyy?)rH*-(JSAj_l3-{@qfu&TOwfdJ-=G~M;Veya$J_4)75qnoasMUlH zKNg~n#R|&pwav4%k1Nq)_8GXM&==R$ZaQDifq*T5#bCR=-~B@x>s6 zL;bxY^(Md>@-@pj5P;YAa(gPiukcI0m{`4rQ2*$DlWgsDwJKeM+xNu)+xdzAMUJ5* zoXqAt8+cwlqS4jab=P4t*d6qb_AT= zRnqiFC;bckqY9K&K49xib4k1_C%M+Lg3-nSUT$6L{G(8{^PWY1guAO;L)d_4*THW6 zGpK7#&4eo49js{L^`h1SvkPR02`&s7a21R<_1#$K{0%m7x#Cd4Sn^wccXc4}X{v(Q zH(9_2?buES;W`yI$K%4T(ff%P=6Ye>sp9VAU~tA0XX$ak0(OQDQzfJZfiz#EMGK4? z@D}j_eKDs5rp#W!0=12r*eOqo|BUhqC+{$mfiNShpuu#3uZ=-UE5nh0VTs6`mx&@Jc1E28e@}EJcx!jS@9%a$>q%rjhg)7q993g5ePr@!mD}El<-VMp!D#TLmD#s zW%8-R^sj@8;c6|d?>f6&R*(?}nBv!3eU0 zx6wM?S>DZEgCJDnfz}mBy~qHwmbLtZHThBhAWXdbQV`gzOY*)^AH4x)~==#wF~q57EQt0b*B z!jGx+wIECB z8k*#8MSmq{_gkdL*p3bV>uq7JZONUPNVeq$IqMtbwYb-Ty%P`lT$$wvAu=qvNBl8K zb<`Ms(fguA--b+_ht+^U43P4^#lCj=B!2OZnaS2J0MoPh1JnB&qFT+g%9i5Idmq0UkfU>Ys=0kJirweF|<|DygR0o0!g2U$w_v5MknSVUWY-LQ>3N!57mn~N}YU-2E+xFE0$Oa|Q? z_6%vdi+Rq!L6z4^2ZE}7Gir^keaPB3gOdFzsS7IRvelL1W5VM15$fHDk7fP6K&461 z(po00K%PGS2`(b1?6%9-g8N5mHn_riHf?VEbbV%T^BnWS;m=8Tg-84mKjMy{twIM| zxO>-It-P!H4GOQFxaatx?@vjE<>c7OiosFKmno&|zbkJ65MZ@0{m%IM7Y$0+xPt^^ z?)+g6KKaWWbp2oEAmptt>H#TK%syb$yK9Z(QuQ!js@h)EyVb%=y#pHm(C-TP!86@L?FtDv0Qi2|KIcejoSm^xPC zKyow{Skn&Vz>}+=Y+Y~fHR;>;CT_83fBowWuzo(G^tu@YdP)i2{8|czuXXxZ-I?)q z#3G~E$a3LVCwIGneO69=D&Sn}hihfZrxVW-BPI{4G)vKsf*Ry0&vfW{!u@gqe??ql7}ZU*~RKoWX=<{TQyP>XU(ZvrxS!3VYV z3YrBRsEXZh_%)&^fWOFBK;0(kx{}Ja2SkTjV}Pj$fv*R0f{+}|IzWEh_cT+v1UnlZ z5OQC?N$vxtV%~RTX8#ecZ^p~eN+Xfm0$?!$EJ%I*C4j|P;o%Mc%VMMk0>kYe71%K9 zhG3;2!~l@OYcZ6SsOG#SBx;i8K^Y%XH^hFiI|uuK!+jw;SJuHLzW_qfTLnd3HwH`& zMCU}R)Kyv+r1fbf$4}OQtaizk^2hwOOg17uTAZR^cQP;9)B8*VmayQ{?TPs_OLQ1S z8f5-4P(}MLc9K7h&nnN8b#Y8oDJ3ZgocLT57B2@54|PQU^aeZ;x<%m2tw_4ZB`oH) zVNi4b3Gt`hn($hF3&VTufS17o#c=d*0mHf{o7KYnmAVTKa6-6lAdu4wh7(6eha*vE ztBZMCm1K^FBspR|toJW4aUaeG2uqhfVAut6<%y<%^RbBukTQM|@=+muPzH(K9Bo_L zs)kmVl~qV(Qi!kDXM48Bg8zUZl^FmtA0ATR$q+OuL|mQ}cpS%K4>wT!Vs>Gv-mX>G zfAfQRnTZ7<6UU=C$x0u?nTr~8{z-#^M^Der&SD)`z7g?_eM}E@>Yk2@1PfQ4gzxqE zc|XeS!8`^nC^zB1*-%DH8yJaP%ezMRNd7eF9N2lW*zv%A1wZKe}Y`1 z<&Oy{JZeu@yA(^g0C68CdSy=Ca%B{=^e`2Xa;hK&9(3%R5MCh5%Z*qy!khaITxN#d z^Q=uHkYQtSeS<<8k#5eXx4EmQS|UtnzEFw>H~lJXsHXa=!j~$&&-ok@aH6xOUPuQ-5YN0q5HG?4`)QvEBtK82IMcz%0G_ zodt)d{Muf$TJCe-;%#0c5^le##zHhFEx&FJ?>l1SB-JRA1T&0z27hq{7 z1RCwaNRkmx9__#g90`MEB*euHSySCgr<#h4x}G0k=2u=Z#qhTA>de{P`T0A>N-GgF z{Nb+45>=40=XTgGPY4~`oiH~VP${4RYP&h>I){X&Ri8#*}PD46lkps(ueb9JNOU4qk@XHt(J) z9e@(OV(oeCJBebXw;uw;Xt4UyKcjH^Dgeh&{~gA5C%QHj;=>stf4Oeu`@KKM4rp0y zqg>oa5C5rdhq#UykLaHP&vwGSd#pNKM|bl-Y)zpBtWhCNy}^LGUM<88xfZ|(+&`1U zYz_TVMK#Kuvm>KnV(za*sLrJ+^my7{9wrJeEw0uznrNTYNa^ke= z+H{5B5P_=}Qy%=@GzQ|;88AJ?lFcyO7}cD^GWnkk>8=1Nl=A!Wc;)%6>n2X)&dWgPxz{6#{y#Jl)r;=`{_}HO{{(E%d^~ z!|Ie&BM{HLL2(Z~Q@EZIOq}jkrVemrhu{Q_JENbKmbrW*>WC0c7;r9V#&d zAcneUw<+N#eFTh^s0W@7;DeG-?`os)%Nusde7`S7Hct` zcUV!P0Z)hsU^kvCOOm~F*B*$_IecvKg9ih4#e53r@? zLkh*|iAPufi9CIx+-eo^Q0&1&f|Kgn`y34d11Bd7IVfz&W2pCj`Mx$@v=?|ep84ft zP4G`9c^O`W4FO6aP%BXue!R;BcvvWMmeRU96NHBI*&RxhZ;%(d6PmAcVT;*LBw$8D z(p;T4GO@+@5v42ShOigT1cqU;(iF~*Z|rX+uG5I+f+Q{i5m>N=dNou2p zxnV^$P-M!TVbJ7ELW>R^6_T6|SSdoacHjGF2y4Fjs%KU68Kpj5c4IQphHxyj&41Rz zPbq-8{OUu|f6$~xRYTAc2nBn;26UvV zk;0d(lD3p;(j13CvQAHHKJS{>R;^5JbsdP9)KUUjS|e=*;kozm5r<^O%xir3AGh*- zL!4AW4_pYbf&-Qt2?^ZMx-X|wX;ja}Eejp8YhLlg_FwF2 zto5tinY2{&8FDdd;4Tbf-1gg~#mrEBcmtjM8U!}4HM13gN2H!{VTvb*xttOk4>vEb zt7-QwT`bYW5rC*$Ky(aTdHa`N0D%E6&ccNi9Uo#lovTueTrmL1D7sE3#yBLh?(kRv}xpEHH7f7B9M&O!v)b~9u zZU*oIUj^XY@pjf=yiN3=O`0gF=1??pGU}UV8OB%b_vCh-`mj-G!qnFw9Q6e0j)(Ov z&h*wPaE0ED><+7F!a~;A$AUq`3yvufvLmR-+IXssh$7rA}+00w296K;JEOkW*OQ>Z>A_U#T#Cd)Id1A>(5oM1F|c<>)h0Z%wbLo)H^A zO>`&tH+T4*kkZ&8>O2`HGCuZqyo2`On2zbf3N=)57>NEo5<$2)vsb&pfY0~{TF zt$F?XfcpT4V&~e{JJj7V+HvnZP&Oj5t^|LTG&LFhIIpUnBevmdftu4v0}PUuDzhj? z#r;19Bn;1$^apk$&wy!v8`uWt5)1xX1y!p`u=PeU2n{Qxah&9*jJ@Sr^9 zu3TUeCsVONrP+Yl$FB9zbvFBm} zc*j=MtqR6Q4~qg$7<^Rf+T1%$=|gPkU!D*tH(gzpaQ%6HbT4U!FhFE+0*a>2z6=+S zGCjX;y<5QLGQ_&2{va8e%)l(U2kuQp^qf3WFWibA4@2i{E$DD zI~rQw9dg?5e`)wK#}2K~$Id@r$bYP|f0b&su$w2FAN<|G&gm$%%xRQ{`?$$zm4(RcBp9z~l0MHMR# zK;8pZuT<^ZV0)LTmPoXuXKagugvaYRt*fr0Nw@qq`$7OfvNC+kUxB%H(eZA?Zr1V> zq>*4n0p_j;`V#8ikf>sb`E~PB zNsIZQ^(x(d_QRsZ%}geMC=o*jp3CQ;!#xN(+EqZS>k-zCkU-?3!hF-Oy;#y?CqIa`ijg_WAaI+$YD1pSebR<14Dw4$r1ys*mE2P zy>x^(1O>W}bb$6;UM%%Piy{`L7pV{b!g8}FOfTjw0uqEOfDk#T-}+Q z7hqoYtkNW-k))V?<^`!#=gNiYX5k&Fx?1_&I(C`T>$O}wfUMlr&v{S8)RR8|z4t5) z8iByXB6LrcZK(eii6q~1e6xSdXEvJ~x9owRV__i!MZvgm*UjBg#TFnC1@5nuQR|3)e{dI z=vUrQ`mPb3Xc^AP?~K>YRn4$yMm4Y5bfdi}GYL(Oj~D*XPIkx~Bc^}kMl#X|;!b$Y zD-NwzZKgZ-hFGl=)T{!u-q&ap7R}Jq$8+vCSRieK!2B^Bi;ugBRzCQq>Fhn#MzKG0 zV>iW8IP{Ueu(Pf7FWi>?;#dHSfHCb*jy?_1=|K0KDf-E?Fb+bcEFY*oq?(_6Q}))+8B;b6GHvUZhlH#`79ip4 z?R}C(OcYQh}rcL5P;F`Y<)ZEcL8B-+0d} zagwR7+zI#5j|Z5D5pw(^fPJLVF3G@*In=(Hv@+Bis|q^?G&8OZLgDbZ;9o4&zN54~ zQ#0UhjqtWd73N+XDZr+PrzAMm{J^Kv36 zpysj9OAF=c^DUdV4vGe&U5n_5c`Zfcl#6VK%73a22aO$bVJ1S*uQE+7+V7X@-6EUo z=`c9@FY0|6n%rB;t+c7-67<_7e2%n{xd7WK65E`acf^^9m-*-sRNeyBOHp3p5x!PS z#IR87Kttp5STv{ckyr~OwBif&|;Sdnl-LlVp`SmTIFJ(e2||8EdvMpb1-_~ zEtS0z1z&>7fZ%i(ka5)65(B_-{+<4Z7kWLfwXyFY`)!plZ1<+!o>sRW;ao|HcC{ju zp#7Z^YWZhu2g)X;ed=DlLk!29g8{B1MjSiUiTZKt6+krebhzIj>nW1un4{lu*iS}= zu6R&zU!;@=vS4{{$g!edAq}l>_LT}JgPtUwK2@ZrdT?N`M|3-Yx-L~1nOYM%CyJ*E zWD8h6BUF;PF9v#xJ@XgolrKs&R?9ag#;??LilqHi3>SI&qJ&T>YKj*U_br`_Z`~hflkxlJDa}$O_V73$~`;s-<3ggP<`i$ zL12>vm~g?uKx2VkW#Itp(bItVNeDSM_iV4{o3 z<_gaLj}+}FbCIpm=ZqLlbjk%q!rTz3J#gtZslD#i^Zdu}n(6{rt;UF46}m*qijYvf zPseZ%6(64*H|4%t#+x@(p{pkX@ji2u0(TGdu2hO9p)+!|1(2llkOETYFtaiK6OKHeCF!*=`VJBxKG ztf=bwo~YLwovtZyKoa4l6RNI9H;Oiv^tZrPv%1apf&TL@?R zw*pV;tRy97h%Mt=aC1z;)x9^84X}hpj2RrGoI4u8h@TuNQsC?s^mqn z%;g>E_Q(NH{B6ipdin4n9}i0dO~Sx^YFkze$dvQ=R&?n+8!V4XXRhLzpv^DO;>4vz zAL}w6akN=sQ_6gdRuFwl;dHM$(B772r|p32;MOKTk#$5qpNy^a}Q3MLE> z-?LU}1bbL<^JDQjM7GpzQ?($9PloOvzfpZ6W|r#y<>9_`7ym_|wp*Pa2*e7JgD( zOx2gL8KMlPQJnz?r|jwn@^LIkh1~yqHu2%85Ynknl&dj|1`U%VsmLNyhFMsG{1lo>(vC{oi@chm%k6W^O7I zhbm>%8Fx|d9yP&A|2YnuIX&|j<$C>1P_ss?NIS68H;~Ti=I9bj=^EUVhwT?0{+~%8 z1K#u+P^evv89$gy+=B1mjdoiY6o?ZUbFJzoj!)Q3=y-R`+YD2@Q&Z3kHXIeqBo@&( zpl2G+#$)g9ENeG-Xv{(C*@xj8PX!x#DIt=@3hl$g0VvIhGgZUGRLhlv2n3JZ36NRf zSZg!7e!aB{6U@egT9`@!6P5GoJD+PhB5tt=obXUcT`g=lejHiNm-Xbe;WA*e|MZby zHLr>w(G#37^sT;UIQpZmT&ehH`=S02omOy7V!G9oEABcKgiw91jH30(7*1-=iP}Kd z6NbIq+hh^w1675d2i7^G+JiQ!is- zBB%FRUO@FeIS~249M0{h6+GzqYx~*266s;=n$IWFIibC01b28WG0mdUYWn=DM+np`KmSt7NoN87V5hb{TAU*J$;vd$mZ?qu=j<{h>tJ%i2Mv35 z2l59%>V4gP9_9b{%<7TERBcgYZC)^dq$u+b+L(ysK?<9E`xR|R5Nq!cs=D*-`)xao z&rZVRUzd{q=roQWFBL}E7eap423+`eMig-4gZ=Mp=Pm_fM)A!NjDOFvjzXY!2`r~W z7p0^Npijj>B>VM}9q%3ium5t5r-LOtFy;PRP(+{3ncps_Oj&)a|B4!4ou&U z%}qf6Qq7{+u;|ERWkJs6wj@z@<+W9LxLT9`#4!Bg;`qFH&{a_PKz8a`9Fn)g+X~q9*jm&^BJhudm>3vAshp$U$3ZM zx=y@cxR27leC6cx7%Xsrw;F%E*JLa~5K~!B+T%Sb$tUb|N|o>1?{?h#Se3Fy{=G7@3boz$P+j{1dek0 z*9aRvzpQM;ms{fl`tvsivA*lQzqfZ1c~;WZMk3Z#Zmx+2XJvhHT67xf0D|?jvFP`B zFlydM&TFakuRi8j91?J*x=-;niRWxc#eeqL5$TN?4iBgmIOTj;=`c6CAoo@D0o+o4 zbk5kD-slRllJ_3^9sbn6u91Na8$1*Mmw!_vM5>IW{)j~*>T@tNy^?dMMEL1>+L1Y? zj%Y62J{KK*BtegOb7ET_-{PQ+Py57!^mC94eDr%x9S^`LtCRcD{~C8sEfMUSvAbuS zc~s9O;5`<}XLTLst(QuZSsmfO#PuiI%`MuQkT_*n4)1PjQum`8c zbA7%8t`nA5J_hWa^qZ@sg(Bl??*&4UE&2)~-EE|AdCT#&!|?z{BRI?4)7L6yfi!}1 za7{i~vPW1rlr*4dsYqQ8fPnAeAueuwUe1LaH+2w@Z$WJ7_T3Ep#_YoZ`r19U@<2%e zdwMqJdVU!-=Pi`KE@F53C+uZLhGW4}tr)R3`Um>B%cLmJ&T&^d=5L^DGOhQ!$X_S) zB}dtv&^knMZTNzBOJRhNmnx;8ds5wR6TYe`NdzwV8YNQxWMq!^7P7u@26jB-LpooP zf0>6n*h`9!<$IxDHIZm-_~=2e(+)?DP8zu5%6j;n3>mN9J!eOXj7wnUSJV$W(xnIW zOE$#D-k3o=DBAV)lBfTm1*RWnoGuN^+Br(@%M5_AO=Wk~S~n#rK~?Zm`$vvUCooo! zKAMM52vHrFf!g0(Sy|rvriMiFA5CPLn>g+q$K1H2i)DE+R>DbPQYQRhQYZ{<3DZh5e5rghj81ZeW` zq@Qq5txIz0pnf6aKQmypzMlaWbk+UE7b<_U5ob^u_excCAhfrybZ&Ms1~xWHwu$^o zfm(0&E3c$#)i+a8x_6-)#cgozM7<^!{CBJ`7Fl+Q`S^IOckmOEaaZI>_DlM`3fyC_ ze&8mW@b2@XSjmt)!Xvw9>+{HnQ8E*UG|9Qh25PK4GTRQ5Dd%a?0b*AXH!rsoesLumYAE5&_;40|ZV?;k-qu!wIy zlZX96f-iY}^Y9G!CsxsKoi7dZxhP}$Qmy~1^wTcpe`PEO!9JX-M;g%@15%U~ZOi(* z`Ug|zD*j5oy|CX@5wqabhMaIQqSKq=H;yy;jq_8SnzW#lrnPI2&dS)y89sX(uv>6x z#q+L=sFkeiA?Um5cMF!6us3q?lRp|JNX>UPB2qqC%AKe6IO5)e+vLEd!XJj683u&t zvbr)nO2-Nl(g~i7H$g1?ex5R`NXC@YKm(ns#L?ffQMun}WpP_@xvaSKnyUd6u%c@n z8P-ZdhIZ919@F;4BKeC2F4=rc6Aaylm$dkbjM-!D1d@dwB)X5}XIM-a>Z%`gv-zb4 zqu~3tDub7Q&$jXMWu>S4L+A?&2)h1q%_sfR$U}TxAu?cvu`8@fl0vlb#O0k+Nzx0v zj7LeR<#-q52Dxd^(cr*(!Wa z0rJ|YNfAIU|Dr;-=5qR#s?g*%gLTHZNyL>UTR|} zq5irg$V%VtKIZ+YtXFhm$}=NMl)Rwk-V3Fl4iY>M#&18TMwX4}&&N!O-h2&{4@}>e z7QX5L{s{D z%E-ty#NO+ug>|!aovVl*tnx4&VsEFgU0?jCFFGC43=_;ea3@HeVFH}U$_ag zPk0TfZML~(W3iAL)IZEcURx*XE+3_s0dz@H0QuHCUa4tX&~yfDaOPo+!ENuXR_H!o zkxl8>e4ny>w`mhhvNVx?g01Iz)rX-CQ&c0WaUp5r+>?zO*`I4 zIemxp9Fu4p7#rhZ-P?7?suj8DEzitaEA+o>h$iPGU`{R%xn^=}7!1lZdv!PLR(X<1 zz9CthSldw%kd>XvL))AYYD6C{Y}Nh=-TG|*?MPY64*z$$AUbTBr2`Knpn!$EG>f_$ z5oGE1Q|<=&g%|E$yQIq8(gGX(F~Ww!4)nXMAVFqJWg6a5F^l-}(AXEbff#83OCN80 zc08Sza@=6&;IH1XZlRqUrA&`*^_$QJfA}*kcAKHcRleqUYllN?l@9R?0s>*@lQij$ zi(ZWtlp30k&r}j2-|4KDZ#}gSDJup`{c`o&rnA_SA6ZC~CDPmOl_?vz>OJH9uG~@W zqcB-|i^Tm%Nga@{p+Ssrn~}i47ItsS>3Tt3WN?Tg<#!mBqRj=Kn&74n0`$wODbBYqWkB;SBjlokm}0Rck5>LzFU0O zJquIPUT0r8l{F`?t{(WUDJbq8yTsSkx@*-f0I&hzb z2BDSXoFr3z&nG*{PnhlYJaWQrJWup3kW0<8dq}K$CxL0EGAc&<$I zBmG#n;E_LB3|M$KPa9-^V)>oi=ds50H}t&F$+CBEwAvEu(_Mi~>m)q9$bYP|lFEY@ z`aes$x(BDv@k9*(`Xjie96e!b*YlLUGsZV{=Fx~B&$23JqbT9Q{)=dyXZYf_{cDKO zFu!i(AR4RxJt#j3HWdAh4`j7&sCuvAJGSEo`iFy~_q~wp`ANhFu|8>rsIfmg*MG(} zRldi+jAY(7t0k!)fv8$Z0wi4lx z$+pufC95Z+^C{j^zo*q@!|^V!Os2BecS4n+8f@{lBX&qTJW`z18Rrre8ufYSasG1w zNZl@Qpk9n$sdTI_=>{}?L|#`u*gfZIAslr>KRH8o?uQmjgTv~ z@*dKOf;e~_6>Ut3qUV{@~Zv^BvQF^{OqD3Y7hN{LlNK1c4Xzj!Xo) zs7lKls~w-@YU9nbjZ_aj;v|2LSywLPyYbPto-R1Wv&o)3wdDjozdS*a7N|GBv>;UE zKVkyQE(#0}*ycGZwL`U8jh#jr;%tVV-o(U%e^!Ov;Bo z-dRvcl>qrou2DNsLfkM21=wK$8EoWn3H1Gu5HdYiPp8h&A$Gz9W+u6m^y1dxntrt& z{$=AXMd>&AqE085*=mGD*55Wdkm^{$N>cT4m18+Nv*>`b2|k7L!t)b$Qex>PhcioR zT^T?i4Hg|le=rof+FE8=4(o58ZC~n9te}PrQ-f2!f#Nd>y6c5PeveQHa?gAp)Nt_nY(!k^C-! z@?-e2^oxtCE8x}taqPnu@_oXpKT7Z-W{d@ zZmq=8WnJ01lV9+;ykwCMBs{!6R;{G=E61!eB^QSqdYon1C3*KLLM?7}_N=na;Srt* zPChn*cF(1|vP`&$757udaNkY3%xJ`OgNR6EN?M?}aJvK&c;Rp>E8x%bPft+A#ygXu zTz*G)5Iy3{&agWUvQ7c1-xsD*?+zvD>Fp{o%0D7IAaJ6_6EE%&-h2p^OIcX0EPF${hYPCyOmPUwd;a<1B)& z9C-hc+7;PpIbC&c5AXx$ymurQZ$+_{(v=+*bCD-gHfcb=sW$Sy)*GtRo=WHI0gSRj zG>K=LQ>d02Pi%&FH|ezohwqys@|TtRnv1z|7yM~5yptOQdU%glrS$l+4OHKqPAB(` zUIgW_B-V&pKYbQ~BlD zP9xK)ywhu#zFk$F^qG}dby`;XC#h&ya#x&FwqN+XDtAtYEyPk@lTj*6l>WrY^&>>v zQC(izsBB?`j{nWI+GGNriGKN7FNRf&oNR)YVWioO zHI%;FOm?gr3JkY_7SS7K6n~U=+Wh|TlY}I6%x8rU!(Hc%V9u|q(!SUMD~s-bj) zeCT{zZ8=rb`7`~BBXko70vUd)tmvPym;FV`NPmed4ha^g?}vkd*L3pVd&7P3mc}$5 z7V@muvvG`~(Rn(jh)aQHi2K)z)cSo13eeYB>9n$IBfF8V3Vg*6$Wi_(Y+~-ET;zeT ztSBaiRjT7M*u5!*@F26{24#j%I}LI5%VlUeAHDV^ArNTJ>iaYh=zE7Pep4=eA1onX z>9mS-6c;ju&6qbzp8sWY0Qcs={oz~LoGm-~R{+Y^uF52r&CNZOL5z7(t7L<{G++cT z>%1>4OG>HSO&9!gc3ili2^VehGaLF>`^Rj#yb0GbX^wxHpbQZphBE3HUgoio^&_*AGWBxyvox{ zTltj_lwH+k!h{2PwP)Tfh#+d z)NjW$FEO~|puK=a!^@t34VC#s+V!JY^7SAC3(D=ljm`pwoD0Wd=T7O%@%ysU9bk9_ z6RS&MylgPE=1#wGYX8)+hAoU4A-?5hkkyMdusK8BW%xkho`#ZAkI^uqT~Y%teKPm~ zdq2(w_fzswA=a+`l@b}FmXWunv;(Oo1e_m=-xwIugE$r72`8vi#=iJKG22a5$KgFr z2iC|q)K|IDG!OM|MOXcD3@-LvkT{K&W9u!kjqueo&9?H%HTPEg{8gf*e*ri9vA^9c zPqT%-F^$Hp^iQ#wUdxX1eB_*jAE%*~H$>$rKt(utg`0o?=lR~-!I7eJksrI*xvMrU zwFmYY1!I|W2>vg)O_O@vRW`lx(jSk}e-sT_E^-vJ(jT!?Lp*ODTmc+(rS67LKnwNq6D%hr;CV3qn+(THTgIibwh&z#HGrXSB2bFCv$uP!XVV z?&0Q_>6Lo0OSd;UJ`0qb6tYp({+LRe#^ifZq+!a z;9NryV0NPErbZC*+fxU%T=o0JSAd9Za^SWsCiz41l%n#`Zw8A-fD;*Is8yf&J-`*@ zwY8leyIwqMuE~c+&W(Z5PXY7qC~fh{c8SA-^;7Z3!$rNBsd$?hSGU z8xGahJWy=L_P6aZsQKu)pTM|%SH)m+&S=A-_8fc0JAubsoL{`CU6_(n^v_?OA3N&0 zDHH0oLvw@tJiwU2a^7MSU?osJ;10Z(cs>+aM`3rE1WCy7%6=t9?6ds=*K3x=FRi9k z-!yPFsaXJYNLOtm0^a;d+Ms?g3Ix_M`qd0)2>F1EO`o|_H*zDkghGuaB+I6BDk)!A zt(0!8&owG~AuOU`$W+

N|||D{AGE_XLd8t*yp%ma|Tq9c2*#aLRZ=k&e3 z7h_`ZGyPrxZ0BD2o#V5;tiK4_Y(qDNq+6yPuf+*TaZe(0gLQ~kuRsiE#H;^#iv$Q0 zI!gK7Tat44K#PHydF2<4pVrTMkwclp@{1S8{aPhO&0`GQy?u~Z<<*+Cl3kaF3d%(F zqYdWaI=nvZ$yCIg7>8#0Z4Z}A^elQcr|wsWMXCHQo*x&7{U~ZL=LQ;@Oz=r2Z2nb5 zuloEU0AkLWg7a>6mXmpDRaD5|`t?Hvpo;RR*ARhM-7$gpPP;n~!wYN{P^z!G8+V0Y z-jFOKy^=ne5jXcWYpkMz4C^Mru3df3C_sq}Oi?Z+#M&Z#k_%i@3IcO-JaBMV$?Y3j zEP4oyU?}Dwr;wXx_#kI1dskfH%K~*5(8z_+D2)uOjkoDb0W*t_M%#Kp&h#kSZ?d8| zy|@Lrr#g;mQx^UJGV4nMGx@xW4*B;-?8IT4U*sG}CTSeoX0D|i{uvH67da)xtK<89 zqmsT&&vNK5-yr<+0ea*T!N=b^v7fSFCoe$!5h1~9o}@2;%&mf|zKB~$NqNGN83U*GytG>$K|BoH;pZs3kjiy`^B?Qhh~DaY$S%qwfmjV9|l9g zo&tqIYBEyCroQdFz>q!erZbV>v~U#U%#ADsP%;GEoVX@hmyQ9b!+p{36RZ6Iy2xfM zddJ7Iq3BfI%Ts-jCup6%GbKpoLWc-h(+HnE z$9DTRgX8TN1AYfp0sPi~?o7>9N#=422j&Fl1fh*X-)*6aR;+fb7KECVmT-*asH)UQ z{;lbeuo{O?SN{!su>SDip2s^p?K)+Xrnu{M=fi2%VX!?V>AUjTM=wc4&ecA~QvgaE zYVTB8|IA07QUmN|F6QM?Q>0US>}WyAH{WaqL zMJd?_{oL-2*Ol^WHu>j*!F%sTN@URTCjxl!IL3NrD7x6SRs9kWRa{(LCbnsN zWPS0hUiUPmXfz`}P?_8r%xnFdho7wu=$vRdT{bcmM+xWcDVWeNmXwq%D4AGhFN>lqmDbDDO>zs5Jb+DbiRnyx_B zz&U=ICCXa6_KTSdrP`=Q{(E_^GgxAV+<`o>@N!|pbnTqnxAtFDM0L-Fjv_n*#`+Ym!cBjm3}s@#lcy;U!vWoiWb&;#krhB zI|_R___TVMf5%9OaE_5oT??fv=F}DYOz%`k0$lSCpRYv#;o_AC8)xauLXlbPV1$uX zVaSoORibQX74(b!D|}?XXG%y`X0cG`?6_Z>I^KQhf!wI{*tyv&p+Ec|e5Kkt&kv!w%osK)b%bIKTPS7XB3> zh2r{N1O9n$_u&E?D|MLV0FH+0n7W-Y)p7f*EhT=(6Q5hi#!?eBUKfOIur5vozi2b# zAbV(O!4dKgImo}Z=y;k(-$NIr6z>;ey%~i8NTu#%ql)9&+dhCiuXs-(lb6??`b|<^ z`vq7L}2qnehJm4hP5$@3JkP4(-NyG2^llxVmm<{3D*f21 zG!#^Ct%2tU3Sl7b(Ym6;D zBDcWS=00-e>3!m68^)Ua_Mm+@GKO2A=Vw2H@bL}};)_$&zXX7w4w^~!pf5qDE%PKs zwMxC}kWmTkgy&9}3@)(4nwc{ZTR?^#JvtY3?ZX>9m+60&DNcUee{ymwnHCYCz>V$N zX{B8wIyoO-doB^o_hwg4zE)hEc+?SxauG9<7?e89Fq+#NsAW>G;yR*!B&UbilcaJj z$L@3eX_@j)-<*DeSkyYiN)p1?ORGB))3HQXyIt_t?h%sN&O@7+EVA=0O~3fLqCrYz z#kd?RpM=dp%4ix`Q{3v}2KhR?zFiw(-wwl)M(|GZ%rY@5-{8kfwC}2zg#Kj_#)y`$ zIleFIVbc^QC)z8e_s#whB5}-qpXCJ1)-kM)3VC9 z*$x^l+J`B3R6Le!DXH5Pi{mc~+#$B(t=;3vFG8};e|n@BVjk|^uTyFKI*OvAo z%)MI!f=H1_uFdqtpf zA5OYS?qX(=)iqJx?cyO^&WCVvP9QU}+S}kq`twQ`)|6O^V7_N|TxqC8R&vR3pWXu8 zZ?H|CIN1A8TGMe|Md2sc*h{J;L&p1&?-Z*d_=+@Rj@s_|lf@NA;d$68>wDM9rZ#8E z@(n^0htPT9(v+d(`mL{ie8(*9jK2m$GgsU7cZa3-l=RnI4})gELt5%N$#01P$XWus z@KcFSV~=L^Q4IQ9P}WoRI(#HCuE*O6x$T>)%3G>(pBpkiY!LYIS;>f*cruT)97w6o zoA{NFf9Lms`SPdz6dt6UtDXMPFP>@W6-aWMvtzQmJw9HhyjgEa0;Y{;YThpzcD+Hq zi4^hhFhA&kz?>Vnrc2=Wn@}Pi_|@hKhn;EOUfx!Vr)4Czxh_nss^<-|)k#HRB|5KN z-?4ph03~b6O};F>su}QA5_@I-x6pCYplkz(OOVZ~4PN;B2isdatvn+b`c5q+jCJwZ z&1(54c(vC5jMNlPS3e-qmm+(Fw#w|cO48q-N83C*aA>AgnUKSW4?dAB(#fkQM>iVd z=1b>QTFsaxM!tEb(L-Ky-zEM)(;zDRQRAxX0E{0|u~j%ipsR15 z7uXoqxAKm=r=9a{{jY`Pe@T3{GovHxLDdsJ)r^ZyPzrML@vDEbiqrmnYH3(pZ6it* z!>{tPiGz2}HQnWzeC0oxcE%%3e8^7!=VMVbPI6Dr+pU;Nn$ABB%3}C%J@A=`68pBk zJNztkMnrHw9(d2k!lDjy{^rQO#-RgJZ4}zzBQ$${TxYnewN|R1on+GwdvB7T1Nw>6 zDp`>$2ezmKF?xih(j~ilTwb;(&evlvMSd3=H7ARR)fuHmNWyy>U;q>cFl%E3)2vlC zxQ%-?`7l~E30G8Ma@@W9_;?=hRLhkEKUYC1E0nK5?)qZ}ULbTrvZfu~Z`NdNV%NgW z(=`UxnS9IGmy|d9T&IvFPTIwggr3mE7um~$a;>a0FCpuGZwbVIf*R4pZQ=4WJ^3X@oGf{ zmGue=8(;_GCD2yvLYT*HT5Vn5+%HNCkSd^6@Ku77M4tS59y9if-iZRl3FrV&s>s@F zC+QD5yt@Q|sM{GM|E%f`p%Vjmz}FMx(tvU)P<9Xv!sM>$=&IVxR3!!9>%Wwse?TFL z7_)2>;@+uZ9gZi12lVmXEu!_0_n?~NkXBva3=rr#xnjEc9o;d)@dX!c<%unF zJv`-Or-2!YYXG-XGuy5{FTGj7)my#WA;Z#8RZzK{o?s3F(F1{@r?BBk&g&K?d$j!X z<3yDxmw>$qVdu*nUQuK6Ic-9eTRl(CX-{j84>drbW7>AcVvW*rO0!!4Jk#E!F%r2G zCM`+Mk&&JY?0w|VnQu2Q{l1iqJYM{U5&}}S0rph;spmIL$HJ7+p6)d@lrZ^m)V>~f zKI%9+0oqY+7hWNETE?TPSueyWYP4h49CH=)n-}2Jk9^Ow{uo{_?u^?owP{@zreaXX z#k-%X@0I84GtuM!zOiN|T%&^2ex;2DR+{QI@iuL3mgJ+a2jw;$X zi8}yVRG>f5$&>TdYP^Vn(d9@=;O0L$8^r+nsH}!p6D+E&SKZe^HL88Ct9PV&I?W29Us{n=7;?WsUjH*69dH0-GB%%@br9pQ0HQ3dXwz8w@}Iu6tlJWIn56x z^`+E4&2tQhA`fjG1?T}FD>U=J%Wu-=$3&!0Gv@?jc*tL1Ds&fy3=6m{rx@e2}E}UP&RA240_H4 zSjY((I~@oEo;6MilU!YJAL56z#JvB~wg2@^lb?7y%lkvYq0OYSWFi$rx@Rut-1te^ zauOo-P5UnRzizy@#-G+_Kt$VNv76d%d~NT*R=#*zhk!?OCb{Lx4~;xC1He2#Gk!e* zFvm~-8`2zd=+1PO*LlemWWcX@XwMpYwOw+)GMB4BgFL>xTxmJ%8FqFuMx3;4)%&%5 zZsc{)xBIBMXT1i-alvY4X= zsY!eG*aac#u#lCLQU3{zzXQr5Akg?fhTI0pgBwtHj@}Mf9t7N$$Z{kwgtD zrn3rbjJ&R?yNDP{3(h~F_)xl7mlR>v>iAdmE(|>0-UuDV@d?m|n0&(z@vE4k# zc|HU-gFKJ-*9?X#cfDM{XLcCbAF!S>nRyA6eSA(n%{bYHT$}*<$F(;Y37G){4Y9p? z9IGmyeGMF@GWmE_;KJOzXrxy)cKgzEr@)i3*>DN1<5P}k8-I&e5s|G|Qz zLb@E*;#>2l-;U7VrUZI*oOIE!BZ9d2buIG3I~W?>rGdN$WE>qDp5xRkELzWrBW{v> zEfS}9Em8(I^LswwAt`d5&%C!E9iq7YKR2`h9!WU!?mZ#}#a;ELu(BL$Tq zO8&_-p^RmT0j;1^tRyh~euUCHBg0Vvjw7G_nn(jh8x~o7gRQ<7K^*63DKu@idY~Y2 zB2F4@W_ox^?)ws8XPd+uwBzIS{gf92H5qJv_YTR$<&S`A1~LIog;Tj}-Lv4CmsE3z zn;^4;i)2dVMJYD&_pg-b8oPCjH)ip4Vj$B_z#LRTOITK_&#@!&Q=( z(+EeoH(G{Na9(`%HZtCs8KeD?sZD%#jdWZdT_Ss~=PPVI;;{2~{6u<`V|zKqGr<}8 zJ8}pRku;>tl*Jt7_4yB~4xVMb1Dw-~2L+kSFJ%G(PHT$WCx@Xb*H8b^C=!&19>R9F ztoK&x?-P$YCBqQ))nujie^1qx?QzByc?)v*v&L*4JVoMZ;9xK&P~`MZ1&`O^obJpn z@l>IbanJ>F@w3$d79ohFY?ox6+-oWx8pRE+{;))A2fez6IQy;x6dm`U0i~m!?_wJ> zGmseEw|g*5AejI7$8F1EafSVw%`L$NJB2JKTRg2Dv9#d0c9zLtbz8Ud{>-hf@t1`t z%kCQE=a#sU27mLq+{qo^!PYT+Ky`9)W4RY44m&qlg_&alA7upCEvk%9PZanN1lLms z7;{#V+z-&t+F=vg>Z560SO0LZ35TvSAWrnMBI^%d7`55`_n!1&*Zuz2066kQ8ndik zx{D=$(z6^lWnQpc%Ph@j82HEVW?CK36q3zq|5K>ndwP=8;(cJLQEkn5MAm6a_Oe+&`U|;cX z{6^d+%63k@)xZI{M}p*b7_uDNHk?6Bm$n>7PrSEJT-RVgh{Y-oxDV`?S>}d4`gAKd z+@{1PvO!#7tW=K9U3VXQ=7WPu#ptal>?LeGXP2ax(T>&vI!C->vkbrSbdE$G$)oPE z|6E#^k7M6-m)x1FuzWQ==%_hZMn*FYgGWjwdIUDPEMVKkZz#a(c!wyUMrJKNP)R|e zWuSjAKNhp5e-ihGk8^>fnC3G7y>QPq)5eVw8PoVKGDd&-BslB-leZmZPqkgAM;rTS z5HUwe_8Q1gwbsRMcIv3{TKWN@@o>HLo{cBCLM{N9iRa4QZsdt}4bxx0JIp;AB=_Lw zMi?hB4S&`p`G#NkpZe@eK%;Rx%|QLdsNT^cty%St8)#;WFRvRikx0PtsSfkgh;nbP zhP};6i3p>zr-bAc^Rd1td{TFbv3V?;O9&}X>>lRd#Ywib`SMRa{h{XB@Z}p5lwo;` zBmB;{(0T$zV2NeeE_^1;FMj6XmrRIKH&#Q>@6kUlu3`2+`GnPYcF&hqyEn+GwvUc! zmob~C@yjf$$l=~zA)$Oe)^zLm1FE2>&4Te}L01u@YOcgiZmkmY-!orKS{pnBKDgNg zeBR3$BXnC8<-CZKA5)KV|N98#)lY|Ll@|_Q=NQWJw#ILp7>^lfMYafJ8BCs2kscH+ z3I9#uKl^rH&U<=hGBP4J(_o!RTCS@dFp+C(o=rJkIs?XuQkswi&a=##_+JM$?J*fl$+1am z&ToIGkRQc@w=T;9d~QGMvs5yjhBmLFxZi&-4ozH=Y3&1RNC8qJud8P|r1T8@? z!o(Ct!G?CD{m6o`tr}aDXwEo!=ys0mokzSfvXXY}6UUeN?f<5&n#z$(qwLyty_!; zXyFgcivvBGS9xIeejMy-<&VDmsb6}QbZJgc3mPsz+?6@Go|n))4@7z&%RZbp=x&Hh z5a@onnG(pq{nKgt#r;;hcXb&BFMTGj*E&D++F_H?BGDbJ< zi*+3>X&sNdRM>_74Q?O)Q}^=h)g6Yk_Th%N4pet5)&VuNHr_ym6GW2|2qAxl*k4UX zTRg~c{M%aB|MD#(BIEm=G>mC$P>LQ@kwu|UtDt68%qNp_N=|F|E;B`m#TmYalx^WE z%1I{qnJ%ge&8nM6zayWBur1Ym^aDcqB95!dm91dQ+ELnnw|x6OMVwb08T)w2tdv!S z4j#3?*b8ZXz8vui$$HPQSs+OwEzZ#*#DTKb@YrgH@H=OUS0LkD00?8h5Bx~7+j_`i z&W#G$wNYf6W;>%^`-qv2T9Kuc0kQmj{k2QL0qH}DcgZ?Bl#wrwjt(YFEOZ3Bc?!Yq z5F77Dk-~pi+6YsKZkw}bHiAaZvQ&`P}8f%LX|tQ zCAh~#j}owGExz|9d|T&+_=DUq|Dz|{=_db-o zybO!3g$-r$t!o;JP6Cr*+8Gq}N~-joGRGEhxre#Jg_5Z1h~(~&tbsbUVMFm{8?Ig@TAGEl@eCt2r2cZ(WaVR*ewhFUYz?bE~QX-zDaF2`bX6$~ zkyKUbx}c{iwj4GKutlh<5Hvo(tB*PAs%_JRyLEHKCNkMaobOHwj5IpmdZC|=>X^5I zomxAdZWqL%8U|Q-afRy~P4;G&BpDIbqukMh%81VRSa44@73BH8t=sz5kie#ju#s(Kn$K6Kkh* z>;=t0?_UF`%^M331GSUDq{m;zK{YQvp@|}P!9ZFEyut32pxy^Z!e!n>0;zEOX=oF&1cu!xr2IN z<~+sG>sp#yuI_2*@i{Uxp9MPcZDH!+n1x5Yozg#Z4*_wa##K2jyhDmj!`8rF0O|K}4cHv7L@HYA)*d#N?D8*c)xMt)Q=-KoSqq0%8vPOx#m1M&}im@S&c9iv{$<%GA^ynRWsmAnRT@~ z5Y;~b=u>VX?KGCt%sa}hDqV_6&RgHaJV$l)-T+~C-~S9i9~ek_@4sv;ccW-^s%$x^ z!S+uJ+97Hm1@ol>tX?2Wb%#A=zm#GR!Y#!u>zO1NZ4Q+Y?Cj;Mg!RGF57T#uKf(bs z&%5Xo$YpjaD{D5M&lUwh_57xS6=uspmU7~R8{~hgz(E#Pf3?f2JixwOh7>atn7?#` z{P~ z7*KZhw&F>vIiN8B8ILVpcq{?p3;{Z-p&VGaJfBNF%%*}}TTCOm4g)cfzyHl*hjUM8 zi&m_qN<-GotumC4qlzG0lm_Go1W7@YE+}DADBM>G+!Cn!5N_V;wS#ZB*+vfp+yY8< zfG(NzPEf~I`MCp*8|6Qb|1JyfUQgMg7&l%>g8mPG^%XO>?)0963_SdioI z13R56%<}fY4#jf-4+NTya**Tw71}m#2?YsL01uotw20uX%AL)`8d;CD%!76BhBTmzF>$>%??#g{>#-#!1=-g~w+@r7-_u>dLp zDk>dRL{Nkvy~l>AGy&-)B28+f1`-GYDjfu*Hv#D#q!S{&_fCM&TY%64fdqKQ|9v0( zIQEOZKf%Krl1bK@Yt|%_S=Tzx-<1(UJoTJlq5^spc+G-80TR=ffp-~f!-MyMH;6WQ zefZ4;0{yys%JvFK8-wEKFy{YlV2J=SBq|e37yq?<6HGj0+(Ei3Ln86|Ud)^h@n zZ9D$AHOr|SHyYSLH}_sXBHOBu5>@F+*L}_9Si1CX+q$xTQaFzb+inkSU<}{)6 zZ*u{e!2DlV-~XS3O8%cPNHYI>a9pUfLvtu_;2_s|!dc-4Kk(?UDrFk-)AL_o^)Oar?`PLT->zt+U)EMz{rH&>vY_kEN}YnEZT-yN=!@2dZ0EfCSooDu1q(_ zo!P;Gyjs0FNrW`*7jvkP>k6^FiG~3u+d1FUMB~>7jqz*%&YL$s6GQS$$4`=Nkv-Q` zIz`7xr-(xynfP#r$NEbjeUXs>dHx*e%Th-{)mZ!)G$=Nqq>t*Dd1;=(bB+iGi!1+`;^87eag(}7zq%N>{x!sGPW{fjHUf-Sq9Uasc?tbuTSl3A832R*aPtQ1z^>XLZ(e)Xf%CW?F z_?%;>hENf`?y`BGAsPbBK<>>RBc5y$PkvU+N^~E-XsVOzxkhgOK>(Ccx1NT`y2t1D zqN_aJMUFN)(MMt?_9Bb5iiL8qT+wJuoQJCj4@J(jovCNXy>JqaK-}|`RS7*qrS~!Y zwdlS@-eR|PeR|MXfORs*1dC9s7v>#Cc5Lj-S#wwvh?AWA?Ds2zn9 zy`}oFAOt)NmqKtK=!<(uW(Dy9sz7}yL0HKRIQYmcf5Xs9f@|X*Q6XZ2z``$+zFgl* zTa}pNVRX7JZm4~MZA zo4A$}dd29?mJgA{Bz!zuHyBu06IV|(Wop&A(%5AAIueYGeq}`!E2aS;}tUDP^mC}Wr z@uqct=8J`S+9Ep_Zo{8#dORM^O~pPdeScZxB4+2dFV1&#^|a!lBboio99u?J8*!T}7N_4f!So zkT#+iiEacr5#sjBtKOh)kvJeb2guJ{`i>fYhCX57Kb zt}^lflqKg?80vuEu)#l$_gwt!pn=~XO37H-_Z`Lp20Vo#-Xm3I2_{M-oOhFlCWpXM zOsgZP_2A%J_bR;E4c1XOldZf#8#h2;(L8y_rH+sRoE4qH1Z{bMW*$m;6x+W1)!1N~ zIl$*Fj;)Kp>*+%8ZswK7CCaQ2<{-yE^T*SD*H{q(-!G1y9>H=d=*+pwEJyXKPDfUT z4c!|BL>pZ(g;FEnSWk%I4nlt@wa)Z6><=#B{$8OFw`zn=zdF-umk82geiZCJrKDE= z*L7ONNBgJ*?0od?`pd5RSPSH7>Dv;pSvL{sj@}45C%$MTYFs<*yE!{XXW+&i`HY`u zD_CRFT;bh%FwysXS#3EJM91qBu11LDGEY+j6cHLc_>fO{y*8oCOv#)tpD04qP>6pJ zJ0s-=?mz*}ThG9#HDRkxTIEFdEgpuPW0RFaW_I%w3g5 zWK>oO3M%crp?gIwp{L0um?nBua=WcMUH({nkj!R)&3XkfT>Uz~$2-WC&}5J8_i`ie zGvi5%Kx*tJyWE0wnqpAy{JUhhER@-RnbPPUXpcR0Ev68^UUp+Q^2T+tQC?QDwHy)` z+3pIyt78~oRGS=C(l)8Yop)U~PAq;@?h{yDaa92m!Ms%OS8M#&JT9S+?>N=Cepxq? zr)32;G3s67i<4wEi1N-Z;7Z*ME@}!CaW3GlEqf|XJP}d}`21kGJ76f7W>+`dF0%}F zj4lauvd|c6D3TkUP^90GK9+7R!!|Lh^_(}d-C>9HJ-WVr{?`uTf&FgMwF`qBJ2&Pd z&5{;Yst`g_qR}wRpPaRGO^Nn~l{dk9C;_Fy;De7b!H&3~E_0}IV~fnqlegx8UPB&C z{&=Ghox18L9Zvhii{GQp_Od*5WZZ@Z<7VcXZR7H4>iW^kTR9#{{u|sh%aXPpta}zv zttntq;zqdtHYn}=@*4hg+W_C{aIaQ)O9QyL6~Q}@@z_&tXjKe5AmZetNa}Q%D}>#8 zmkxj1JpYFB{UuB_OFALv*ImKZGP0o7N5_=S=)za^Rayt-qG9S=wdw&Ko)vDnxGp36 zd*^Li+~<1fAL>)wwOWL(Q(D(up$uBA4s8-ZjZeXTMf#CV@3@Z4W6}C>I)&Nzia~(W zFOs|@9x9x674_&{#e%oV3%8npNTL2P9UaMcVCM<^Kmm)~3H{O))H43o%4#3F%}#D! zR-86s4&HRTn4iYsO~Kprx)NH4XS=RczE%Y-J#}GuqMdBDEQEzt$KG+LHpkzUvsW=c z5|`jL^XdV!S^O#v_eJJ=#{ekfZhF;`iV3Vx)qX?>=LejMozc09xy}Jj z<+PxzJM|x{;C}Y4Hiug9j75CS>S1R5#Lf2A`-j?gZ(vKhgg`|epOnHsQqmSI4!lpu z-9c&bp1&1%W!eJmoZyx7{|r6%&J(Zou@(@4TuQyeANr6kB^CmM&=FrY7J839j|Lq}i ze1~rw2EyiyH%c_aCqpYySGZO6p705fGL|0Q`kDW0eqVOs?tEc=0xyZ>D93b$QxQE( z?{5y6hvuxUM{wHp$!QI0SMTF5f1hrywN9E$z|i|}x!x&EN9QhY$Pax>M`x>ywvPBA zMSYI-!`?t19GSp8b>V?2@r%IunQ-7`p-_~c<&1DS#bgJ~oz2MG?<=sURgc5QdDYrXCLooT&j zhsh@wKbqC$DTXrk zw670890Sw}qjK&gu%YGo%K=&jcEzrRISoT-JszIqA5~zTtM}iFkus=m$nXU?bj{px z(5aU~y+W}9a>5hytb}xgc~;G7^d|8pATP9rDrTa?cVdrtS;3Ge#dL_lsh|@UDYT;{ zH1qyeH5n<|o!2bTu9XsT={Ht)+hBmJ^f8HNp?A1E0dW6e1D6-6_Tc99C~lcSsUY8! zvJGda#)~Yj8p`{^`~%cAUC=((h9WQ{^w;-j1FHtLO(O}rbG6QTuz?Durn*iZbn52` z9SuuAvRHSXY`*}fIrQ?oW~+(<*O;TKEq09A)SJy2J072iSjj9lorUW&CZkJ+9>{Bz zMS~Wwo>|@-(8ROS`eeY8wI9#>%FxxwX^xGkQ7eeA{Lq%XW%Fgwo7ihiHHuEVFdR}^ zbUzDEX3;(OP}AB#P}8-Wk)n~NGG8LVVzIGwun4#Xbldi7o&jIiwcorU@89i>(V@Kg zc;d4GvCTNrl2hXA=cb%!J;Kj1{_Sr)&gz_j^U|7pW!t*Nq6}aN z`YBy=?3RKxbrBH;yay|qnRH{(?!3L*b}i`u7Y01b9LZVf+Qt=|${gnte;%8VW>mU; z4fj|Uk|q%r`di(*vu^$8D-Sq6CKX}^0c(m!*@lwd-timMxP)UeMF#vyu84I*TJbzaoG?}Yyb z$$HpfRQBfT`U4ec;hS;%cbg%K__=m?gI{CJjEEv<9O-ER!cRo$h zDtT;f*P2Lpe6P^O!Gu{mTuu{74Sieu-? zqMsa38=?=wBzJm$=r!#7EK>{5(e&R*BU{nK@#rg>rGG}$YfZnx|M0e^s=H|f5hO7*1kWte#$b@{ zghOqNmb9PifYAa#Hv;k9Yt*=AN}qxw61u%zv@%iiLii%4q51L87Oau-@m|=iFddUe z`3nkRF^BuZsmb)OQTp4ALsN!Mx_`>U0b8C<+rfv;&M{LDCL`@oK)d}JJ8D`t`qXY< zndQ*gnVtRcHUsYkyask}3o9)6##%2%2bl!T%Xm-?Hun1g95i?3q(Dtt&=ejHuvR$Q z9EJtiM2-&*HrTY_)`usBT&s_MG6u``Nl2^pv~? zKfig81A`Aa4xnLJ#NNI}E>=*pi6GA}R`zj^-rCCt!kcmby6AUuqa1#dZ6oRen%_XC z*gU}Q_241+oNK0hb=YF11A}!WIe1a$;2Wsg1I#%Sn%-YE+RLSDQ1u|S{2_Y-Lxyy= z`c_aRPJMz&gTbZKbLRmk(M{}Aa9?RjDG(Kzcy$my!vcnBH!;~F(E7c zpLib~Eyb@KZh0h4c#r#63>_-$+2d*^$`9^RyS}k=G)S3Z;>Da`9jiVXWAdRAU*Dp} zI_t093Cor^+-U0Dnu>iJq(JsAxomA77w%bBZbo0MWPJ{UDDZMjSfYr}ttT8W)obH- zIAPMk%g6ZQ=aQ6nVeqD>1MvTP}&x zt6Fw*TORMoRb6Dr0ke>J@3LiO3UWq!-hC>FbJ>-VF}l4n zQ$JP`MnwU|3JyzhEQ1-0q~dVjy&CAr6B=`V@(9x_VOQN1f$?Uk!sUnXmHhg#_s#N3KQozC^y zOpd1-7VJC~U+-DP>TYqDkwLoe1#7epzS;If#`9tYG^(@f$p(f1dvHW-BCS;1`c zo~P7p1a<$_f!C{>rA4Me_9Mv0$e`pAyQ~U#zX4rWAhCAyGsV+tg3!c7?SAh5w9RwA z8Y5icQbCO|;olPTsB?on@=zYV1wY-PZIK7<6~5%JHhfHe?iMwYPL06JT}6~#FS_A! za70>M$hm%|s-`w`{TTEcIT@@Un`;ATNXbijBGx8-EE@`P9`f??54O+gK3ZQzYaY1R zPV-13ootlvSik;D1)9~;JT}rI2zNUwJ<0flM6!D<0qvB)t&4(U`q@zZ;z|HZ)u6=)c@`G8nsY^BjT{pFQ; zYIfP7%D)`q-vh(^v%#KBgC3T#nJ+d(Xw8IkdoLgftdY6#Y*oSZm}9GbOsNA*J2A0S zI;tcgaYQ^1VqI|@dh|j4+nyDmeP!5^C^&ymCsptmNOK}as-$3*dV@UapB~0Y0J36w zIkPuRpr=U+R2X3L(?)OfYcXcPBvM-D_gGK-@FmcvcpbWlvL@$%2z*dH?`p_gq z|4(A>IgpBT^&6#XzJUnApA_v+9~#uahOhr|FcgbetHWE)@n9au>u#qn(txr{Ufrhf z9ho~}0;nKHyXhrNpm&g-$0YULu0<7YT2$_0n?4HkWwLs_+b2cvNX&Al@jR#^*rc@ zUOI1$m@C5cm5$SUG=-0-Fwuu2mfOpc}p_kB&AptE^VQ zkp;jNT%7!~H1(*}9$@@{3|xj+SDoSxLd`iXr!i>|`m&jEHmcB}Fl(4Gq=Barn;bdDmK+-!{!?Q^R1%8AXLY4TUMlX?HRnL%v@~?~o0sJ5R_90J zrmi)t>~yx3H{HNV3c;>-6y;OnRKg1c`T#c1oh;nxb`uPBpR(PAi}rj1dh}{*60iq% z@%1pe5_C7rTLX2#%I>b*MeRN>PV6k#!w{$s_Zyyx@)S)UUJ!dA8+&Yo8{lp(Q~b)y zL7FQFSVQ(sUEY*J+3`x-AzkM1=dJ{V+@Z!qBEtAi&RyzI zpxDQFvhdWjXrO}3IMx!sKeH~32}qYCd-6~ za~KOXdE3x;$24|@?C80YQzzDKGUVWf)aky)-pwh#75eg-AnU!`-nQfXF|^Vah*ArLr+$LWeVOe z5pq`h_fUfzt%xC`uB!Mq*w@g9)DB5%SFxey83NrVrW2PQcYa>T?q3OZ=}^;NHv65} z+JaV1oLmgwb$8wtd6j&@VMdY^lJ2lr!NgU0(-b{U8~2fGfZ@yfn%(Z}B367x5xh4f zIEWli(L@aGFJ(9@WH7G{!Dd8Mjw+u7>afk% zdiJs5y=TMe-RdVU%ZlrN;Xt4NWfQg3np-HjTjv&Ak(Cz$<0c16-+{h%Isd76Z|S5dQDE@eUiaWL@j{lA_?my0d;P!<8-bR=!G@k>8D?gI zh7!Oz3RT5kL7C#K9y6VMIYFpfLC(x&{mBU#8FgI2Ur7n2U?e<|Jw@&5g?2lB`p){` z2qRn4G7qgRWBAJPI*mmxNy?jRUB5vsW=huS+$VkjPdUWw-5VFksJiTf8{T@tnE)UN zT|Wjp%=p7OVFn!Y^8bpy%KNo(boZZJit3Qg-ci9)eMR|Z_Ti?u*wUiy^ zkIuKuW?K3Xn9_V7b2BcB)NwBLn+)7Px8a@VyCN1!-Rjwb0OG`s21s3`mJU~K5*#^* z1|#&ld$$_K^(k#V=Ow9dhX|Nw8cnjChNJ=*bHR8R~f6 zG&^}Vk(3x-@N;$r;7({M?*y@je~GkT)~&9RQ+;?u*XZd+BMg*jFF@NifQ|o3aXwFe zh4GffcP!T;Hv5hmB4%Uk>_ZY4;dX~-m%n77jI(E$G&*2MqL||GvCG)`6|BtU8vjCh z5|v)=g5(bLyKgyG-?5h#={8LYFEE|_ZcSWwHE(Foo)vC;wa~PlUZnz zj(~cQQAPH`2L-cCDFVoCA=T7W=l?KAAL zO|Hld?4om)FUeAO2KPk%;&E=?M@cc&2~{caI;q%~VVvY$#mP@UbPO?Z2ZykxfbQ%m_C)9Ti1M6}JV$U8gLcz-q9Ct~dq zg9gJq*_aBSlxinMcFI04mM`~C;I^cw{d?S+(^rYmbmoa5FNK?QsIk?pr?)zKL!suz zZ7m11aTJs;)Z!*B>dP`_G^y)-tkJo9{ZYhNms~_ z&)!g+SmhAoj7G&}vJ>&NC|cFkY{u_2nS{rGgh)L4Bm0%!f#vfbhf}KO)ke1`WZkJ2 zT=`+!j7@iTq_cfjFK7naeqBja&R=B0n2mXKJbktA`pFr7zhfpeL61xD!e}vGX0w9P zIi!7*>ADc5bZ+~zu&>rwE9 z25r&5@5dE#<{K?nLiC<=f!{&E){b`fyYDrrI=AVbF{?PM3kt#Ly>2$e-d`adN#|Y- z)<=^DLarc89rCk^{t{al;2)H^#Vu*Tk|sJSLV<6je4uTlCSgSNT;46lxFy~nkkIt_ zDrfkV4($DYsg5n9W#3L&t5AM*cbSu4imBaoP+LdL)Dx)ikLzd-)@hD6?n9wcln)Hg z+Zlv*#wZ)&N0qfPVkGx8iQkqd4*a~6{)3Ghdm0aV!`(wyVNN^YxJuV8zMB+IO9G7A zb%}|;%{nINRUni2!Fp65woq!}9%WABLeeusHA2-+Sg}JHW#_%-{7W|tp4>Ivj4W^U zzIEoNMk8(?s+}>QI*uyyIB)QG8}r?+p{Xa%At&7Ab$8E&do(+V3Vr!CE$)Ik+q6)F zDZtN=Ki|EcSfJl7#L?zE`|d3^TF9sA~v&_x{0(e-;PBywxK) zj!t2~ky_q-)M}D22|fCyaKWPAQ)$hzi8cD!qath+*WN9omC@E z7t%dB)J~7Gp7};f4f1<%f%t3NLasw#tM$h9*AT+E)(P|#Q0wEl=aXI_-|li_e*}NU z5{#&}%cJ6^2qu3>gVazL*0dWos4?XF?eDj*G8ilfY-V5hMyc`Y7IB&21v?zZG@S?LchnvffAADCw zu*SzdGWL4ok;TP+=Em00=Tz`r*X23?lZw`@v{?y7XhXlOWSxKW%V#*TW661M6CKac zC0dllFngS3DC8!2efRqs;qDLh)Z(Equj(-5iAS6L-wGdS!!<)FRa58!DL|p}Yeo6- z$Q}H#=76OU_A&(}-$_GTYKO-_q` zf%O11&gsU##f%qXZg{;5f9+h{57-1aTXeyWKN<+y$uHQEl6^_GrsFPgc2pQZ-iY-z zR6!fu@VZ&l&{h9_{rr(1+S`t--cX)EhXIUjK8xEnKM^+qUM(e8oRm$v^vq@n$I9uw zyIq;6y}Ke?UFCp{JDFG|H~pNAku`Dd`*3u5fgh0Y2HW43>zppP2|Bhbwh5&I$OOoO zh4?N_9)z`YfTNkSQO4DJ6X+7`)AmT8Ulz)`wHADAM-w7g;inZlrY%ZxVTYd8vYT-Y z8ojiiJjk3nJYP(48?k2vRabpw7Fl& zG716Q6va45)5*{N8{Hf>-I@5Xp&LN;v_aL?zNBKp@>SG;vCBZJ@Q;v{6f55bFn#O` zx%vc7N+#gtKMIfuaEO$%W{#-`^;w+kIy{ zTsm@@H_{V;7${UC2eMGgGIJ3+%AKbbeQrqG7b=tSmnUnTM`Ji5b;eCV)g>?TR7cls zPjX3j2XDU0-1k#_IKi>Y53(jV7ScpEYi3@HThKRt6aPN*UhY1{RItwF z#uizqJYM1WV~8>StG8^bG!njXG--zTqJ$Hj(`S)BID*$18FJDE5kR)% z-|mY5r~lw#ikEzxPMeKc z_!^zj0M6ZK$y=P@Z+asqLf0Xt9bQLCbFYhJtVAj6 zV$m&O*)s-0?6*c!_^hEiJBjGZ0ya-ZSkos=tj??u6|8lZIS4}ml`Ass;!Pg=&qs(ieMA9PHF$-b;x1d$&e35Hv>pYS$ni17p=7SE%hQ7h55M^I_3? zt+dj@r&Ol!RhgvI`==kSXFP>Z(I%dLVul37nhCQLEqOQAOsMzTl;uq@02+O&#JKyl zY^qy?cioSC8C{%>*>=K?Mj6g2E!2C~uTyU@E_|fX$_IU6>c4N5qgTszBHW^1aHOLrdQTz^!bf2}ix z53rB5@&+WsSiRgfn2&bh3b?>nF30PY<8$lyfo2 z3rMD#CcZmGw6jqfV`Mhtc`B2ob|(ig`pdphC8LH2oZQ0(u_E5#(e8)Y`r zyCX+ZNs?OMT4h;yCnn?=tDdTRjWXHgmoP{jy7bMppl#JMvH9%Ht5-cumG~cj-%|a{ zU&!YSGeCE&gK5jg7;)6ojYPkr<#pgcwEhy^Pr zC1N6M*0QG60-c=*Ev(jq{n3JT9``i;>FcSFH+v8SGc8$s4nECi7=%@L!TcMeWHye{ z1JoNe+XCrQWlw6K*=s!6L4I`B^N?7}O_Kl4xbutjHQBw9Pc)=CwKcH+`>+6Z-X>&} zU0KiokEqAp`u6=(R5pOZX_bTx5W4@&^5VYM6Jw1@h1r(DsG<((wkcpj5dkJqZ4J)Q z2hEI6+AEj3iVZ_d3vDXwGZf27rY}!oCk(zFZUPlDVs_emW|;3Mk6jJ*~>JA2hy6(cdhLE8X`ibI(r?<7f%n9~tzS zlT(MLNO(2QP%cmZ(!5U!81y;e@0(P7X{QO{qIB!2y8EOgj@m<7pFkClk-A|n&Ltv8 zzR*#QYx9oDVuq0fs=yO)?pB%IO72q3@C6E}^o2r3kR+^*j%~nPqo?bztW;mxf~#&rA4o z-as|8Hl;522uH-ytkE1zCqu?UazkB$*R_!cG9MQ62+~7$YJ@D;$tS(LjINpJLWo_L zO>7Fyn1e=K%z~s7Ib3=ue1#n98SlCd2W*+XRGQs1Rx!h6s9U+p)AST$a4fRyIEQez z_-*|x0K=H9`#ZD9OOHBEIR#P|DJ_>!n&htl(WTr&!$YQ_=ojYn>=KF^$kgyz6Zpi( z`DqYivG@3n*2u5Frqn4&I1p+lVxR?)uJe^q#4g71ABjr0Yn9GeS`|L`?LEL#L7w!a zrmIROCw;D}ojrt`TNdtqGva(tQ3kM(>NjUlhO_-#Y5k;1s!%(&mRu9r#*Z@_<|)jg za9c3hc2_^ZfEfTqRou1QHu$5HdLNm0{%Aa^sAWovPc*tYMcN-BnN_V$d%|I;s9wUw7Y z-6E#@uQhmgi~*&Y7h+|;%_c~XVu8F^T|YrrM~F1^z1)7mqu`6Kj!fzF#PqrwzNV-M zMy7lMheV5o7gWqQ)yp|LS=?6hn(B&LV~vHs{G1$L3E)HMqgA&o z!A?)eLs1!_A9e-xU`vy{3Jt|+_N&tyDYY7kstd_U8tm9VQETkD!(j0xe?sX)usA`x zSEcj&Lmmjgd+m}PE#r*iC$+e16fCU7K~Cpk-BEaw-96Iu%%+!51jR&Q-Np_~YEk+U zmwO5qkb(b7L$UKefE?RSb<=S!}kzj;jkVtnPU31qYcK| z44yU^_)b|}?GW{zS5QEbO=E&=nKH+?5n6ggfF9+rsoHaYScWmT?7mORjFiTzUIX}) zuakokU!y~SMlei_0hE%8>-Q$|m~Lfc0+kfnW7b!W{c*QEHS~rU%ZfeI;bQ?Y)%Yd5 zHKUbrG%u8Vf2Za$YMh$IMbI4b)c(0C1%I&%Q4l*rB|B+hX3*JQ)QFXjzNv6{Ya@F zNb$fFPgAuS)%~##2GjMQSLnG(3SJw3h!x|lzn>}EAIVkpU2lHToU}vj1MOuALvAwg zYd~#~wL^hxDX76#?M>_2ixAN+m{&piRG5we26Hk{G;Hdn^*NF0uY4D-cQ$+4w5ERY zx9Q`vVmrN!CF=Le#>svbhB6(23gMc! z#;JKln5y3Sa(qpFMKzgQ8e#7jAC|g4RPTa)*+b~LO{ohld7~%_{Gum)&R1nlX_B-U zG=`9jZV*~qO3xBDUNTco_rYg>HtrT3ija=5mr+1vqwHeLkyhd=>KKp95bUH#U`PQ+ zzn-7efew=WrQuE-g9kmH!>z5|Gs*Q&-lcYTNw@U{!G}X7tTtGy!SbosIbC-5s0)a` zICbFmJ#d)mhWbiRYYJk{D||6JdDJ2r0J+uUDhYIFn=YBCv9M@52H4Fr`os_kH(B70 zxifgqSpASZ0YeE5;utXGmaqacFu+JC2uaX7on_jJpJpW1rH+*`5uIv2W{Jm9(agP^ zo&XU4XZuBj&sXwLZjwn_9&>>%derE-$MY^(tygsvH-@8uah5gVWpxi)LZ_w% zR_Lg9e?2zau6;M-6%u$|ePy6ktyYK>VuT#X6*}xX`_Y;L(%chOtt)`4squx_l5)9V zD?K3bCBGDk=o&AC{={+XAm!BGk*i-<7%%(q=#ls?a?${aY-3W;_#D!a@8mL+WyDND z6qa|q9nZL~;O>NY)+edr_2KG49$O_NrQ)ZLchAwZ$G#G=45(dvK05###XLRLPFS0p z#L5I7&MR~f6u#M7IXeN4>Z(!IDDSsUo>K7;5%Eb((@^k(vOl=JrA-qfiEUZpc#rYE z){e34%s%p*?JxmVElfOFXKI%JHDJb#FNi2g`v9!M!m`Vf7cw^<#B@bcZ;nTI&|P;; zh8_P2vNY0Hat#Wf$*Ivht>p06l^DqyVEe9WXhfKPUS8CqjUH!$oV651x70%cV+-xY zccpd63OmIH+=!>?Vi80f`|66-V5nW3t<_o$U=NmPLtw3WUYl0GI>NbUbLAU!(jau* zlo|-~2E>n03Po|v1(QPAfW=PI+&O!W!oJ&wmuH`S?Mh;(DXcwUy{n?%3TCW!X)9An z5qCLf89qU?tP6`hY}OWdF*Z&sG*dBEc^ZkV(XC3FZ9DE6=Z8?1nINHLH?Q4=l#^AO z=UiX*@Y0bP_3^>?t|K1J1a^$<=~PT`+ee5t<&EX zoxb%hcbZW5w$8ogEB=97zIAG}$-J4|%;u>7)^w`Q|%!|~O1GuwKe zB^S+n5>v8*{kJ|I>koK+sp!j0+}8N>lK3CLBcf;jW=U)ueK3i=H;$4OvXPOo58 zc%-0i`e*o@s!iK149$E}RsGH$P>RB|+cxO^#|qPO!|BfM3c$(B6tbTC=FVq+=8{t}KIJyAL zCy-~}xy^ZDOl3M47SlkWCcA<_)<9C8@|F8wGnV&o%dL7MQV=lE_Oj7+yL%>!b@Z`> z9zV#0}k^!=ia63;T7+B!sscqZsvEsBf3D&$XZW)?8 z_)W$oTfyjlNd!)mk}Ghq`2(&}kQpXP`bO2bW1)mPY^mLLYa#$(GV?pTU;QyT71!N0 z4%fBVT-!4efI2UYPwl%_h!5t^+DdP)KE8xP&3mvFhmdGNPoPR{o%nJ6C13>t>U#Z(5sNeG=1hFzT-s&Ux&H4Cr zc+Cv4`ZrFR4bX@Ch5#-;F-Px`nNPoCS`3%#GE>DSF=pvn0PnujDQm|Rtu|g{cU_}T ztQ26ZY16YpfcjROO1E=*bzyp(qNg0dIi)p42k#3u>k_(vMSGw(?luGia2^H?&3?n5 zNR1-VfC55gcnCYYNDInNFrb4r9X*so2X&Op0MeINR>=cH!1lOL`IR|cOo~dnB7+H! z^qbtv$i%I%)g=g+EDdN!60sqI?j1fWK>!dtms7lE(PezwhZpoKbW5xTy>N@XFX)6B z*yMnXXZ=&P6tPatIFtA#}!$<&M1i0xU7d22|)`>Ah^F36TST%a#ZV=#_ zv|fGgL}p{942lFEv&C0M82zznD+hplpf5d$$?;o8@#IdCNb{mLAlS>vVpZdpLD^+N zDg{VDtYe`H>&`B5DWV>e1`ZrP+}}7hMBQiKtz1zd|ClW7s%k$(F5Tz?cD=x=zneZA z({f_*5BLWaA;$p?4s!v*UsnE7go20?ieWqxh}|G2L3cOk3Q7%-_~hD{b%X(i&tJXq zy8jvvBv4mW%KrvjpV=kZi}1V+{CFzwO^R?0uI_Zy<|g0?EqBdwpernmI-;V&jHE?C zeLDfz90)|bEp}iG;PzaXIR(&haxnc9JZi{T2=vP#lvR9TQ)}UEqK*HR4}fwil%x8% z)VqxJ9H{4pio49eHjBU9BU#6;skkGx4_r3dLzY#nFM>pSGad-6-4IxE%mSidVu=6q zy9=)vVpx3*)c;t=SoG7j2qgrzxjjc`6nv#V(%1k zP6wKM<9pn506bak&`;smNA}{Gk>(d>L^H15z9sPw82ApZ?$wqz$#5`}S;k$NjahKm z-0OcK4blx>m_KIdnVjVb9Mjr{;$(6 zN!*8jxWM~5)=+v)1r=(9Q|)o)){>Ccu-U)u*VHXxc4N)~+K#E|mHvQUYye*hHd9KA zGUSv4^nbudH_CvRybSltNErL9i?>4Qq+dn>Uyp0RC6iZwaZ6P|XAe2~Gvi9-qDj)q zU%5}68s9*m=HYlj7sDQ~enVK4VW5Uh&2S0d?Sk3E(!vW9Akc4(Av&N zkR`C)sjDcu3i^`q-e?1${Fj=j84p)RSBngYVkQd~C^fC+${9dx)m)lwlSU&Fp72Rf%SRVkez4ghWI{w_la_s_ixraIkWcHg?cKtm&@~vr`MfS*+ z_3%lq6BOMU`|p*^rV-vFxq`LkmK$}2M!U$TrZk{m6KP33){&I=TX_UyXBp=+b#%mk za67jcn!wKhrcZLZk+@1<#Pt)a2BOGu7}62?>qtq z+h5u&Q@ZZ^+uUasHOHUNpn&H$Azfo=W@|3>0DtGuE?E^tuGuKjuZ)|b0e$(i(h)XQ zsq*M(bL~4*QHLi8bU%BsGRD9-o9=bHCpI+_CokxkFMj9l;W^N?+TRhl3_zq`+4bm< zBb;3Ef(N5G)>R)dHGFSaOgEew^o33X={AK%TkfnqHIgK|*!HA`sv)(-Yo7s}ygN~< zDENU@8ZHXESkKY_t*j0Bw*6#xmLa4!l>$YUzvK>RE*VeplQl}Adr6+jiB*cpc{z?? zKymso4vk88#L26w0=2uk=5LmR8BYWWCfd`Q4uO*~>k}?64amvRtDwKpMpA=KXr|8k z=*Ka6$Z+W9(CU8jVAq@+qKcsjt&_4gVC4wd$Jd;l2tfejSl&HDFxqt{slNw}6q-V+ zF!7~q>OrmmKQzB-)cA4JdA06`79S9VwcBBx@WXZc88k~QP@d=J&P&YOz65M!SALvZ z8ZN=c9NJadHJm;O&hv-^3NbyEbt-IBz*J3DGL>Do;ZBx1TzzF8ghGrooalQ1qvY7! zguYdf+47Gu2Uj`llt*ax#G!UJfT!m`?TweT6-W59jRrXQwCFO+Z-=GJBec;zlYq(o z;G-xj=*y!3KMexsd)qpRLPc#izM9^`)+9!7p0;M~nXqa`6h|NsqGNCxF7HvnhU1RE zBXlizU>GP{WgHX~jx3(P3K04}-aH8v1O$5FvMz}*{%FcIG0J=QCMF;rZ) zzm(pQ-n?qrQZI1`I6Q&RuuTk(0USqnjMzosTMkGmS@Jot&zU1}=OA4g_D8jtlag{#L}VjSe5z zd8X47flZh2t7Y3kBThnYkvq49fSUxtV7tL)e}a7{imB(x%v*adKkbiyhy&@CwR*}+ z@j&Hzc>o&FUU)q1x#NsYDrch4|Hw5`f2H>5FZ;IV{%&i9j17#l{_ep!P_atDD*SLH zW78Z%Te&sGbFg_GS%`D>^MDz9*~*P6{fi0vp|1+7eUPB!>Abt;op-o-Zz^5FAcu@r z{avru{5u2q-4!T7+P1_J&>@+nK`}Sx91lI4`Bo>(6a(HJ%pSecoY-Rv_*_kv{|N90 zc%%*mgDqmj%j$5E28!?%N1j^ejqt^(+PF5q1icx;f0YH0EQ4GZI+w6}Pd2xJaxdrC zV9`hRn72~cBm+_Bjf}iQ&5x7m5`YDN3EV$j=Std8>73w%W{uN;{?cbyVv3dFWB>%9 z990tpIQTiyujMZ1z>22plA})=Cvm<_D`Uc1XN5*G9^2dI>u+8Im!6*=qE7}*6a4M} z^Uy=<+y+!X>sVAy$w*c?JO9~kuT}hAe*!0-mhF!}NW(q`{i8Apgr6(wwRcBJ(t0p3 zaUQL*A~3>wxsuWUU)PkxROdb>|)T~JKew?J3>0CyptI$L&p6f^=6rvdGmsymzQ zISv3{^<>!6T)zPN#oAan#f1M_pH8gtQ8zKs+3Iq6j%#A;y9`A4zWW<;?WDR3Eh$~q z{n4`=?i78xJ9auFD_e)z4DmV#dg`FI#UU%#8F6OJJ}qBW7nhMz!doZ@@%ZuZ4DC*u z-9EL5M*WlK!k6Y-y3&yFR}ANa2+5r6Q*Xt|Y3hZ>wXSE|01)5@>qXSV`dSA!w-fye zDky+1_`iSucLe@_Mj(FejN$sFjlsmk%f7(dc$^h Br7i#f diff --git a/cert.yaml b/cert.yaml new file mode 100644 index 0000000..6b10578 --- /dev/null +++ b/cert.yaml @@ -0,0 +1,7 @@ +apiVersion: networking.gke.io/v1 +kind: ManagedCertificate +metadata: + name: google-cert-for-analytics +spec: + domains: + - app.opsbeach.com \ No newline at end of file diff --git a/connect/build-connect-trigger.yaml b/connect/build-connect-trigger.yaml new file mode 100644 index 0000000..45c40d7 --- /dev/null +++ b/connect/build-connect-trigger.yaml @@ -0,0 +1,42 @@ +steps: + - id: "tests" + name: gcr.io/cloud-builders/git + entrypoint: /bin/bash + args: + - -c + - | + echo "here we run all our unit tests" + waitFor: ['-'] + - id: "replace tag" + name: gcr.io/cloud-builders/git + entrypoint: /bin/bash + args: + - -c + - | + sed -e "s/tag/"$SHORT_SHA"/g" ./connect/k8s_connect.yaml > ./connect/k8s_connect_latest.yaml + waitFor: ['tests'] + - id: "build-connect" + name: "gcr.io/cloud-builders/docker" + args: ["build", "-f", "Dockerfile.connect", "-t", "us-central1-docker.pkg.dev/prodenv1/schematalabs-connect-app/connect:$SHORT_SHA", "."] + waitFor: ["replace tag"] + - id: "push-connect" + name: "gcr.io/cloud-builders/docker" + args: ["push", "us-central1-docker.pkg.dev/prodenv1/schematalabs-connect-app/connect:$SHORT_SHA"] + waitFor: ["build-connect"] + - id: "terminate-connect" + name: "gcr.io/cloud-builders/kubectl" + args: [ "-n", "schematalabs", "scale", "deployment", "connect-app-deployment", "--replicas=0" ] + env: + - "CLOUDSDK_COMPUTE_REGION=us-central1" + - "CLOUDSDK_CONTAINER_CLUSTER=schematalabs" + waitFor: ["push-connect"] + # deploy container image to GKE. + - id: "deploy-connect" + name: "gcr.io/cloud-builders/gke-deploy" + args: + - run + - --filename=./connect/k8s_connect_latest.yaml + - --image=us-central1-docker.pkg.dev/prodenv1/schematalabs-connect-app/connect:$SHORT_SHA + - --location=us-central1 + - --cluster=schematalabs + waitFor: ["terminate-connect", "push-connect"] \ No newline at end of file diff --git a/connect/k8s_connect.yaml b/connect/k8s_connect.yaml new file mode 100644 index 0000000..99d6b67 --- /dev/null +++ b/connect/k8s_connect.yaml @@ -0,0 +1,179 @@ +# Namespace +apiVersion: v1 +kind: Namespace +metadata: + name: schematalabs +--- +# Secrets +apiVersion: v1 +kind: Secret +metadata: + name: connect-app-secret + namespace: schematalabs +type: Opaque +data: + POSTGRES_USER: cG9zdGdyZXM= + POSTGRES_PASS: T3BzQmVAY2gxQDMk + NEO4J_USER: bmVvNGo= + NEO4J_PASS: bmVvNGox + GH_CL_ID: SXYxLjE5ZTZmMzBhNmZkNTViMTM= + GH_CL_SECRET: NjU0MzY3NWEwN2NjNzQ0MTM1YTY2MTk3Mjk4MWM1MWVhY2E3MjVjNQ== + CONNECT_ACCESS_TOKEN: QmVhcmVyIGV5SmxibU1pT2lKQk1USTRRMEpETFVoVE1qVTJJaXdpWVd4bklqb2lVbE5CTFU5QlJWQXRNalUySW4wLkYySldVNVktWkpXdmNfbDVDRDVMbFpGUm0yanFEQnFNX0s1Z2JGdU9hdmhNTXEtbXB2cjBHYUtNR3FEbThHblZsUmFXY2lsVG9CZDNGVDllWllVYlRPR1RLRWw3alJ1QkRmMk83YmJaVEt1Z3dkQkEzX0M1QUhzbUlBTVIwdWdZMEFCNWdmTmE0bkpRaDVPN250ZkVNdDRsd0c0dFpNMXB6eTZPU0RSbnhrQ08xWkVxWlRkcENEVXpqNGhTQ1NrQ1ZCNEpJM0Q5TWVsNWlEcVEwaXJacUJ2ZFlxZjFoQlpFVTk3M01JaWhYVlNmdmNZTHByRTlVVGNvTm90a0pTR21pTWRNdkx0TTNXWmgtV2NpSjBPMHFvRzZFb1B6VHJYZDZSY3dnZmxuRWdRMkhfTDY4UnpPUnlPYmJUbmotMXNEa2dkQmFzQjQ1QUtIMllpWVQzWFFKZy5Pa1FQeGd5c1pIdG5ZQ1I4Vy1MVENnLnVBdmw2cTNOS0NzOXJ1YXlmZWRmRXpXQ0RqRkZJSEw5anJiczc2clRhT21WZmFiYmhYMzh1NnJsUlBjVFZDZ2VMUEVEQ0lWdlNVV0RpT0xKVW5ZSk4wMlhaR3BNS2dOQVR1SC10a0gtQjVvZ1FDbE01N0dSTHNDS2lwTDE4YV9GaXk5c09jYTRtbnIzQkl1aFZoenJfQS5mb2pmN3h5S1JVQ0dLR1A3RmdfSnd3 + SMTP_USER_NAME: c2NoZW1hdGFsYWJzQGdtYWlsLmNvbQ== + SMTP_PASSWORD: cGl2cWh6c2J6b2hyb3V4cA== +--- +#pvc +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: connect-disk + namespace: schematalabs +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 5Gi +--- +# Deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: connect-app-deployment + namespace: schematalabs +spec: + replicas: 1 + selector: + matchLabels: + app: dev + template: + metadata: + labels: + app: dev + spec: + containers: + - name: connect-app + image: us-central1-docker.pkg.dev/prodenv1/schematalabs-connect-app/connect:tag + ports: + - containerPort: 7081 + resources: + limits: + cpu: 500m + memory: 2Gi + requests: + cpu: 500m + memory: 2Gi + volumeMounts: + - name: bootstrap + mountPath: /connect/bootstrap + - name: cloud-task-key + mountPath: /var/secrets/google/cloud-task + - name: cluster-key + mountPath: /var/secrets/google/cluster + - name: compute-key + mountPath: /var/secrets/google/compute + - name: bucket-key + mountPath: /var/secrets/google/bucket + env: + - name: POSTGRES_USER + valueFrom: + secretKeyRef: + name: connect-app-secret + key: POSTGRES_USER + - name: POSTGRES_PASS + valueFrom: + secretKeyRef: + name: connect-app-secret + key: POSTGRES_PASS + - name: NEO4J_USER + valueFrom: + secretKeyRef: + name: connect-app-secret + key: NEO4J_USER + - name: NEO4J_PASS + valueFrom: + secretKeyRef: + name: connect-app-secret + key: NEO4J_PASS + - name: GH_CL_ID + valueFrom: + secretKeyRef: + name: connect-app-secret + key: GH_CL_ID + - name: GH_CL_SECRET + valueFrom: + secretKeyRef: + name: connect-app-secret + key: GH_CL_SECRET + - name: CONNECT_ACCESS_TOKEN + valueFrom: + secretKeyRef: + name: connect-app-secret + key: CONNECT_ACCESS_TOKEN + - name: GOOGLE_APPLICATION_CREDENTIALS + value: /var/secrets/google/cloud-task/cloud-task-sa-key.json + - name: BUCKET_SA_CREDENTIALS + value: /var/secrets/google/bucket/bucket-sa-key.json + - name: SMTP_USER_NAME + valueFrom: + secretKeyRef: + name: connect-app-secret + key: SMTP_USER_NAME + - name: SMTP_PASSWORD + valueFrom: + secretKeyRef: + name: connect-app-secret + key: SMTP_PASSWORD + volumes: + - name: bootstrap + persistentVolumeClaim: + claimName: connect-disk + - name: cloud-task-key + secret: + secretName: cloud-task-sa-key + - name: cluster-key + secret: + secretName: cluster-sa-key + - name: compute-key + secret: + secretName: compute-sa-key + - name: bucket-key + secret: + secretName: bucket-sa-key + +--- +# BackendConfig to do health check so above deployment will become ready +apiVersion: cloud.google.com/v1 +kind: BackendConfig +metadata: + name: health-check-config-connect + namespace: schematalabs +spec: + healthCheck: + checkIntervalSec: 30 + port: 7081 + type: HTTP #case-sensitive + requestPath: /connect/actuator/health +--- +# Service.yaml +apiVersion: v1 +kind: Service +metadata: + name: connect-app-service + namespace: schematalabs + annotations: + cloud.google.com/backend-config: '{"default": "health-check-config-connect"}' +spec: + type: ClusterIP + selector: + app: dev + ports: + - name: http + port: 7081 + targetPort: 7081 + protocol: TCP + - name: https + port: 443 + targetPort: 7081 + protocol: TCP +--- \ No newline at end of file diff --git a/connect/pom.xml b/connect/pom.xml new file mode 100644 index 0000000..249cd64 --- /dev/null +++ b/connect/pom.xml @@ -0,0 +1,251 @@ + + + + opsbeach + com.opsbeach + 1.0-SNAPSHOT + + 4.0.0 + connect + + + + + com.opsbeach + shared-lib + + + + org.springframework.boot + spring-boot-starter-actuator + + + org.postgresql + postgresql + + + org.springframework.boot + spring-boot-starter-data-jpa + + + org.springframework.boot + spring-boot-starter-json + + + org.springframework.boot + spring-boot-starter-web-services + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-data-neo4j + + + org.neo4j + neo4j-ogm-core + + + org.springframework.boot + spring-boot-starter-data-rest + + + org.springframework.boot + spring-boot-starter-test + test + + + org.junit.vintage + junit-vintage-engine + + + + + javax.validation + validation-api + + + + net.kaczmarzyk + specification-arg-resolver + + + + io.springfox + springfox-swagger2 + + + io.springfox + springfox-swagger-ui + + + io.springfox + springfox-data-rest + + + + org.flywaydb + flyway-core + + + + org.projectlombok + lombok + + + + io.micrometer + micrometer-registry-jmx + + + + org.junit.jupiter + junit-jupiter-api + + + + org.apache.avro + avro-compiler + + + org.jgrapht + jgrapht-core + + + + org.apache.commons + commons-collections4 + + + com.github.os72 + protoc-jar + + + com.google.protobuf + protobuf-java + + + com.google.cloud + libraries-bom + pom + + + + com.google.cloud + google-cloud-storage + + + + org.junit.jupiter + junit-jupiter-engine + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.apache.maven.plugins + maven-surefire-plugin + + + + org.jacoco + jacoco-maven-plugin + ${jacoco.version} + + + + prepare-agent + + + + report + prepare-package + + report + + + + + + com/opsbeach/connect/**/entity/** + com/opsbeach/connect/**/dto/** + com/opsbeach/connect/**/repository/** + com/opsbeach/connect/**/controller/** + com/opsbeach/connect/**/enums/** + com/opsbeach/connect/core/** + com/opsbeach/connect/config/** + org/schemata/schema/** + com/opsbeach/connect/schemata/graph/WeightedSchemaEdge.class + com/opsbeach/connect/schemata/validate/** + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${org.springframework.boot} + + + + repackage + + + + + + + com.github.os72 + protoc-jar-maven-plugin + ${protoc-jar-maven-plugin.version} + + + generate-proto-java-source + generate-sources + + run + + + ${protoc.version} + true + + src/main/resources/schemata/protobuf + + + + + + + + + + + + org.jacoco + jacoco-maven-plugin + ${jacoco.version} + + + + + report + + + + + + + \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/ConnectApplication.java b/connect/src/main/java/com/opsbeach/connect/ConnectApplication.java new file mode 100644 index 0000000..9b30d8c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/ConnectApplication.java @@ -0,0 +1,23 @@ +package com.opsbeach.connect; + +import com.opsbeach.connect.core.BaseRepositoryImpl; +import lombok.extern.slf4j.Slf4j; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.data.jpa.repository.config.EnableJpaRepositories; + +/** + *

+ * Main class. + *

+ */ +@Slf4j +@SpringBootApplication(scanBasePackages = "com.opsbeach") +@EnableJpaRepositories(repositoryBaseClass = BaseRepositoryImpl.class) +public class ConnectApplication { + + public static void main(String[] args) { + SpringApplication.run(ConnectApplication.class, args); + log.info("OpsBeach Connect has been started."); + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/config/Neo4JConfig.java b/connect/src/main/java/com/opsbeach/connect/config/Neo4JConfig.java new file mode 100644 index 0000000..abf5690 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/config/Neo4JConfig.java @@ -0,0 +1,22 @@ +package com.opsbeach.connect.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.neo4j.core.DatabaseSelection; +import org.springframework.data.neo4j.core.DatabaseSelectionProvider; + +@Configuration +public class Neo4JConfig { + + @Bean + DatabaseSelectionProvider databaseSelectionProvider(@Value("${spring.data.neo4j.database:}") String database) { + return () -> { + String neo4jVersion = System.getenv("NEO4J_VERSION"); + if (neo4jVersion == null || neo4jVersion.startsWith("4")) { + return DatabaseSelection.byName(database); + } + return DatabaseSelection.undecided(); + }; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/config/SchedulerConfig.java b/connect/src/main/java/com/opsbeach/connect/config/SchedulerConfig.java new file mode 100644 index 0000000..eb69d11 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/config/SchedulerConfig.java @@ -0,0 +1,38 @@ +package com.opsbeach.connect.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.TaskScheduler; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; + +import com.opsbeach.connect.scheduler.SyncScheduler; + + +@Configuration +@EnableScheduling +public class SchedulerConfig { + + // Number of threads allocated for OS related tasks + public static final int OS_THREAD_RESERVE = 2; + // Minimal acceptable thread pool size + public static final int MIN_THREAD_THRESHOLD = 1; + + @Bean + public SyncScheduler syncScheduler() { + return new SyncScheduler(); + } + + @Bean + public TaskScheduler getPoolScheduler() { + ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler(); + scheduler.setThreadNamePrefix("ThreadPoolTaskScheduler"); + scheduler.setPoolSize(getThreadPoolSize()); + scheduler.initialize(); + return scheduler; + } + + public int getThreadPoolSize() { + return Math.max((Runtime.getRuntime().availableProcessors() - OS_THREAD_RESERVE), MIN_THREAD_THRESHOLD); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/core/App2AppConfig.java b/connect/src/main/java/com/opsbeach/connect/core/App2AppConfig.java new file mode 100644 index 0000000..a433e99 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/App2AppConfig.java @@ -0,0 +1,37 @@ +package com.opsbeach.connect.core; + +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.sharedlib.utils.YamlPropertySourceFactory; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +import java.util.HashMap; +import java.util.Map; + +/** + *

+ * Fetches value from Application-url file. + *

+ */ +@Configuration +@EnableConfigurationProperties +@Getter +@Setter +@ConfigurationProperties(prefix = "application") +@PropertySource(value = "classpath:application-url.yml", factory = YamlPropertySourceFactory.class) +public class App2AppConfig { + private Map connect = new HashMap<>(); + private Map analytics = new HashMap<>(); + + public String getIntegrationBaseUrl() { + return this.getConnect().get(Constants.BASE_URL); + } + + public String getTransactionBaseUrl() { + return this.getAnalytics().get(Constants.BASE_URL); + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/core/BaseDto.java b/connect/src/main/java/com/opsbeach/connect/core/BaseDto.java new file mode 100644 index 0000000..78691a3 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/BaseDto.java @@ -0,0 +1,37 @@ +package com.opsbeach.connect.core; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; + +/** + *

+ * Basic fields of a table which every table extends. + *

+ */ +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public abstract class BaseDto { + private Long id; + @JsonIgnore + private LocalDateTime createdAt; + @JsonIgnore + private LocalDateTime updatedAt; + @JsonIgnore + private Boolean isDeleted; + @JsonIgnore + private long createdBy; + @JsonIgnore + private long updatedBy; + @JsonIgnore + private Long clientId; +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/core/BaseModel.java b/connect/src/main/java/com/opsbeach/connect/core/BaseModel.java new file mode 100644 index 0000000..ba3c10a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/BaseModel.java @@ -0,0 +1,58 @@ +package com.opsbeach.connect.core; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import jakarta.persistence.Column; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.MappedSuperclass; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.Builder.Default; +import lombok.experimental.SuperBuilder; +import org.hibernate.annotations.CreationTimestamp; +import org.hibernate.annotations.UpdateTimestamp; + +import java.time.LocalDateTime; + +/** + *

+ * Basic fields of a table which every table extends. + *

+ */ +@MappedSuperclass +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public abstract class BaseModel { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(updatable = false, nullable = false) + @JsonIgnore + private Long id; + @JsonIgnore + @Default + @Column(name = "is_deleted") + private Boolean isDeleted = Boolean.FALSE; + @JsonIgnore + @CreationTimestamp + @Column(name = "created_at", updatable = false, nullable = false) + private LocalDateTime createdAt; + @JsonIgnore + @UpdateTimestamp + @Column(name = "updated_at") + private LocalDateTime updatedAt; + @JsonIgnore + @Column(name = "created_by") + private long createdBy; + @JsonIgnore + @Column(name = "updated_by") + private long updatedBy; + @JsonIgnore + @Column(name = "client_id") + private Long clientId; +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/core/BaseRepository.java b/connect/src/main/java/com/opsbeach/connect/core/BaseRepository.java new file mode 100644 index 0000000..3a22fba --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/BaseRepository.java @@ -0,0 +1,19 @@ +package com.opsbeach.connect.core; + +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.JpaSpecificationExecutor; +import org.springframework.data.repository.NoRepositoryBean; +import org.springframework.data.rest.core.annotation.RepositoryRestResource; +import org.springframework.transaction.annotation.Transactional; + +/** + *

+ * Base repository of a table which every table extends. + *

+ */ +@Transactional +@NoRepositoryBean +@RepositoryRestResource(exported = false) +public interface BaseRepository extends JpaRepository, JpaSpecificationExecutor { + void refresh(T t); +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/core/BaseRepositoryImpl.java b/connect/src/main/java/com/opsbeach/connect/core/BaseRepositoryImpl.java new file mode 100644 index 0000000..5504892 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/BaseRepositoryImpl.java @@ -0,0 +1,110 @@ +package com.opsbeach.connect.core; + +import jakarta.persistence.EntityManager; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.data.jpa.repository.support.JpaEntityInformation; +import org.springframework.data.jpa.repository.support.SimpleJpaRepository; +import org.springframework.lang.Nullable; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.sharedlib.security.SecurityUtil; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + *

+ * Base repository implementation of a table which every table extends. + *

+ */ +public class BaseRepositoryImpl extends SimpleJpaRepository implements BaseRepository { + + private final EntityManager entityManager; + private final Specification notDeleted = (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.IS_DELETED), Boolean.FALSE); + + public BaseRepositoryImpl(JpaEntityInformation entityInformation, EntityManager entityManager) { + super(entityInformation, entityManager); + this.entityManager = entityManager; + } + + private boolean isClientIdEmpty() { + return ObjectUtils.isEmpty(SecurityUtil.getClientId()); + } + + private Specification clientSpec() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.CLIENT_ID), SecurityUtil.getClientId()); + } + + @Override + public Optional findOne(@Nullable Specification specification) { + Specification baseSpec = Objects.requireNonNull(specification).and(notDeleted); + baseSpec = isClientIdEmpty() ? baseSpec : baseSpec.and(clientSpec()); + return super.findOne(baseSpec); + } + + @Override + public List findAll() { + Specification baseSpec = isClientIdEmpty() ? notDeleted : notDeleted.and(clientSpec()); + return super.findAll(baseSpec); + } + + @Override + public List findAll(@Nullable Specification specification) { + Specification baseSpec = Objects.requireNonNull(specification).and(notDeleted); + baseSpec = isClientIdEmpty() ? baseSpec : baseSpec.and(clientSpec()); + return super.findAll(baseSpec); + } + + @Override + public Page findAll(Pageable pageable) { + Specification baseSpec = isClientIdEmpty() ? notDeleted : notDeleted.and(clientSpec()); + return super.findAll(baseSpec, pageable); + } + + @Override + public Page findAll(@Nullable Specification specification, Pageable pageable) { + Specification baseSpec = notDeleted.and(specification); + baseSpec = isClientIdEmpty() ? baseSpec : baseSpec.and(clientSpec()); + return super.findAll(baseSpec, Objects.requireNonNull(pageable)); + } + + @Override + public S save(S entity) { + if (Boolean.FALSE.equals(isClientIdEmpty())) { + entity.setClientId(SecurityUtil.getClientId()); + if (ObjectUtils.isEmpty(entity.getId())) entity.setCreatedBy(SecurityUtil.getUserDetails().getId()); + entity.setUpdatedBy(SecurityUtil.getUserDetails().getId()); + } + return super.save(entity); + } + + @Override + public List saveAll(Iterable entityList) { + // no need to set client Id in this method becauese saveAll() calls save() method to each and every object. + // In save() method we set client Id. + return super.saveAll(entityList); + } + + @Override + public long count() { + Specification baseSpec = isClientIdEmpty() ? notDeleted : notDeleted.and(clientSpec()); + return super.count(baseSpec); + } + + @Override + public long count(@Nullable Specification spec) { + Specification baseSpec = isClientIdEmpty() ? notDeleted : notDeleted.and(clientSpec()); + return super.count(baseSpec.and(spec)); + } + + @Override + @Transactional + public void refresh(T t) { + entityManager.refresh(t); + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/core/enums/AuthType.java b/connect/src/main/java/com/opsbeach/connect/core/enums/AuthType.java new file mode 100644 index 0000000..28a63f1 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/enums/AuthType.java @@ -0,0 +1,19 @@ +package com.opsbeach.connect.core.enums; + +import com.opsbeach.connect.core.utils.Constants; + +public enum AuthType { + BASIC(Constants.BASIC), + TOKEN(Constants.TOKEN), + BEARER(Constants.BEARER); + + private final String key; + + AuthType(String key) { + this.key = key; + } + + public String getKey() { + return this.key; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/core/enums/ServiceType.java b/connect/src/main/java/com/opsbeach/connect/core/enums/ServiceType.java new file mode 100644 index 0000000..396a587 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/enums/ServiceType.java @@ -0,0 +1,28 @@ +package com.opsbeach.connect.core.enums; + +import com.opsbeach.connect.core.utils.Constants; + +public enum ServiceType { + SLACK(Constants.SLACK), + PAGER_DUTY(Constants.PAGER_DUTY), + TEAMS(Constants.TEAMS), + JIRA(Constants.JIRA), + WORK_DAY(Constants.WORK_DAY), + SERVICE_NOW(Constants.SERVICE_NOW), + SALES_FORCE(Constants.SALES_FORCE), + FRESH_DESK(Constants.FRESH_DESK), + ZOHO(Constants.ZOHO), + ZENDESK(Constants.ZENDESK), + METRICS(Constants.METRICS), + GITHUB(Constants.GITHUB); + + private final String key; + + ServiceType(String key) { + this.key = key; + } + + public String getKey() { + return this.key; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/core/enums/TaskType.java b/connect/src/main/java/com/opsbeach/connect/core/enums/TaskType.java new file mode 100644 index 0000000..851858a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/enums/TaskType.java @@ -0,0 +1,26 @@ +package com.opsbeach.connect.core.enums; + +import com.opsbeach.connect.core.utils.Constants; + +public enum TaskType { + INCIDENTS(Constants.INCIDENTS), + INCIDENT_METRICS(Constants.INCIDENT_METRICS), + SERVICES(Constants.SERVICES), + INCIDENT_LOG_ENTRY(Constants.INCIDENT_LOG_ENTRY), + PILLAR(Constants.PILLAR), + CREATE_TICKET(Constants.CREATE_TICKET), + GET_TICKETS(Constants.GET_TICKETS), + TICKET_METRICS(Constants.TICKET_METRICS), + POST_MESSAGE(Constants.POST_MESSAGE), + RENEWAL_ACCESS_TOKEN(Constants.RENEWAL_ACCESS_TOKEN); + + private final String key; + + TaskType(String key) { + this.key = key; + } + + public String getKey() { + return this.key; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/core/hibernate/HibernateConfig.java b/connect/src/main/java/com/opsbeach/connect/core/hibernate/HibernateConfig.java new file mode 100644 index 0000000..f236d91 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/hibernate/HibernateConfig.java @@ -0,0 +1,41 @@ +package com.opsbeach.connect.core.hibernate; + +import org.hibernate.cfg.AvailableSettings; +import org.springframework.boot.autoconfigure.orm.jpa.JpaProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.orm.jpa.JpaVendorAdapter; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +@Configuration +@EnableTransactionManagement +public class HibernateConfig { + private final JpaProperties jpaProperties; + + public HibernateConfig(JpaProperties jpaProperties) { + this.jpaProperties = jpaProperties; + } + + @Bean + JpaVendorAdapter jpaVendorAdapter() { + return new HibernateJpaVendorAdapter(); + } + + @Bean + LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource) { + Map jpaPropertiesMap = new HashMap<>(jpaProperties.getProperties()); + jpaPropertiesMap.put(AvailableSettings.FORMAT_SQL, Boolean.TRUE); + var localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean(); + localContainerEntityManagerFactoryBean.setDataSource(dataSource); + localContainerEntityManagerFactoryBean.setPackagesToScan("com.opsbeach*"); + localContainerEntityManagerFactoryBean.setJpaVendorAdapter(this.jpaVendorAdapter()); + localContainerEntityManagerFactoryBean.setJpaPropertyMap(jpaPropertiesMap); + return localContainerEntityManagerFactoryBean; + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/core/specification/IdSpecifications.java b/connect/src/main/java/com/opsbeach/connect/core/specification/IdSpecifications.java new file mode 100644 index 0000000..54e7fa0 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/specification/IdSpecifications.java @@ -0,0 +1,236 @@ +package com.opsbeach.connect.core.specification; + +import java.time.LocalDateTime; +import java.util.List; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.EventAudit; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.sharedlib.security.SecurityUtil; + +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Component; + +/** + *

+ * Specification for querying database. + *

+ */ +@Component +public class IdSpecifications { + + private static final String CLIENT_ID = "clientId"; + private static final String INCIDENT_CREATED_AT = "incidentCreatedAt"; + private static final String METRICS_CREATED_AT = "metricsCreatedAt"; + private static final String RESOLVED_AT = "resolvedAt"; + private static final String STATUS = "status"; + private static final String ID = "id"; + private static final String NAME = "name"; + private static final String NAME_SPACE = "nameSpace"; + private static final String DELETED = "isDeleted"; + private static final String SERVICE_TYPE = "serviceType"; + private static final String TASK_TYPE = "taskType"; + private static final String KEY = "key"; + private static final String CHANNEL_ORIGIN = "channelOrigin"; + private static final String MESSAGE_TS = "messageTs"; + private static final String FROM_MESSAGE_ID = "fromMessageId"; + private static final String SOURCE_ID = "sourceId"; + private static final String TICKET_ID = "ticketId"; + private static final String UPDATED_AT = "updatedAt"; + private static final String INCIDENT_ID = "incidentId"; + private static final String CANONICAL_ID = "canonicalId"; + private static final String DOMAIN_ID = "domainId"; + private static final String WORKFLOW_ID = "workflowId"; + private static final String CLIENT_REPO_ID = "clientRepoId"; + private static final String FULL_NAME = "fullName"; + private static final String PATH = "path"; + private static final String NUMBER = "number"; + private static final String FILE_TYPE = "fileType"; + private static final String FILE_NAME = "fileName"; + private static final String TYPE = "type"; + private static final String NODE_ID = "nodeId"; + private static final String SCHEMA_FILE_AUDIT_ID = "schemaFileAuditId"; + private static final String CREATED_AT = "createdAt"; + private static final String PULL_REQUEST_ID = "pullRequestId"; + + public Specification findById(long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(ID), id); + } + + public Specification findByClientId() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(CLIENT_ID), SecurityUtil.getClientId()); + } + + public Specification findByClientId(Long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(CLIENT_ID), id); + } + + public Specification greaterThanIncidentCreatedAt(LocalDateTime date) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.greaterThanOrEqualTo(root.get(INCIDENT_CREATED_AT), date); + } + + public Specification greaterThanIncidentMetricsCreatedAt(LocalDateTime date) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.greaterThanOrEqualTo(root.get(METRICS_CREATED_AT), date); + } + + public Specification statusNotEqualTo(String name) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.notEqual(root.get(STATUS), name); + } + + public Specification statusEqualTo(String name) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(STATUS), name); + } + + public Specification resolvedIsNull() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.isNull(root.get(RESOLVED_AT)); + } + + public Specification resolvedIsNotNull() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.isNotNull(root.get(RESOLVED_AT)); + } + + public Specification findByName(String name) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(NAME), name); + } + + public Specification findByNameSpace(String nameSpace) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(NAME_SPACE), nameSpace); + } + + public Specification findByDeleted(Boolean isDeleted) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(DELETED), isDeleted); + } + + public Specification findByServiceType(ServiceType serviceType) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(SERVICE_TYPE), serviceType); + } + + public Specification findByServiceTypes(List serviceTypes) { + return (root, criteriaQuery, criteriaBuilder) -> root.get(SERVICE_TYPE).in(serviceTypes); + } + + public Specification findByTaskType(TaskType taskType) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(TASK_TYPE), taskType); + } + + public Specification findByKey(String key) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(KEY), key); + } + + public Specification findByChannelOrigin(String channelOrigin) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(CHANNEL_ORIGIN), channelOrigin); + } + + public Specification findByMessageTs(String messageTs) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MESSAGE_TS), messageTs); + } + + public Specification findByFromMessageId(Long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(FROM_MESSAGE_ID), id); + } + + public Specification findMetricsBySourceId(String id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(SOURCE_ID), id); + } + + public Specification findTicketAuditByListOfTicketId(List ids) { + return (root, criteriaQuery, criteriaBuilder) -> root.get(TICKET_ID).in(ids); + } + + public Specification greaterThanUpdatedAt(LocalDateTime updatedAt) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.greaterThan(root.get(UPDATED_AT), updatedAt); + } + + public Specification findIncidentLogEntryByIncidentId(List ids) { + return (root, criteriaQuery, criteriaBuilder) -> root.get(INCIDENT_ID).in(ids); + } + + public Specification findByTicketId(String id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(TICKET_ID), id); + } + + public Specification findTicketByCanonicalId(String id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(CANONICAL_ID), id); + } + + public Specification findByDomainId(Long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(DOMAIN_ID), id); + } + + public Specification findByWorkflowId(Long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(WORKFLOW_ID), id); + } + + public Specification findByClientRepoId(Long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(CLIENT_REPO_ID), id); + } + + public Specification findByClientRepoIds(List ids) { + return (root, criteriaQuery, criteriaBuilder) -> root.get(CLIENT_REPO_ID).in(ids); + } + + public Specification findByClientRepoStatus(ClientRepo.Status status) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(STATUS), status); + } + + public Specification findByFullName(String name) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(FULL_NAME), name); + } + + public Specification findByPath(String path) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(PATH), path); + } + + public Specification findByNumber(String number) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(NUMBER), number); + } + + public Specification findByFileType(String fileType) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(FILE_TYPE), fileType); + } + + public Specification findByFileName(String fileName) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(FILE_NAME), fileName); + } + + public Specification findByType(EventAudit.Type type) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(TYPE), type); + } + + public Specification findByNodeId(Long nodeId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(NODE_ID), nodeId); + } + + public Specification findByPullRequestStatus(PullRequest.Status status) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(STATUS), status); + } + + public Specification findBySchemaFileAudit(Long schemaFileAuditId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(SCHEMA_FILE_AUDIT_ID), schemaFileAuditId); + } + + public Specification greaterThanCreatedAt(LocalDateTime dateTime) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.greaterThan(root.get(CREATED_AT), dateTime); + } + + public Specification lessThanCreatedAt(LocalDateTime dateTime) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.lessThanOrEqualTo(root.get(CREATED_AT), dateTime); + } + + public Specification findBySlaType(ServiceType serviceType) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(TYPE), serviceType); + } + + public Specification findByPullRequest(Long prId) { + if (prId == null) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.isNull(root.get(PULL_REQUEST_ID)); + } + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(PULL_REQUEST_ID), prId); + } + + public Specification workflowIsNotNull() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.isNotNull(root.get(WORKFLOW_ID)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/core/utils/Constants.java b/connect/src/main/java/com/opsbeach/connect/core/utils/Constants.java new file mode 100644 index 0000000..7543d1a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/utils/Constants.java @@ -0,0 +1,114 @@ +package com.opsbeach.connect.core.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + *

+ * Application constant values. + *

+ */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class Constants { + // Common + public static final String BASE_URL = "base-url"; + public static final String IS_DELETED = "isDeleted"; + public static final String CLIENT_ID = "clientId"; + public static final String UTC = "UTC"; + public static final String CREATED = "CREATED"; + public static final String SLACK = "slack"; + public static final String PAGER_DUTY= "pagerDuty"; + public static final String JIRA = "jira"; + public static final String TEAMS = "teams"; + public static final String WORK_DAY = "workDay"; + public static final String SERVICE_NOW = "Service Now"; + public static final String SALES_FORCE = "Sales Force"; + public static final String FRESH_DESK = "Fresh Desk"; + public static final String ZOHO = "Zoho"; + public static final String ZENDESK = "Zendesk"; + public static final String SLA = "Sla"; + public static final String METRICS = "Metrics"; + public static final String GITHUB = "GitHub"; + + //Enums + public static final String BASIC = "Basic"; + public static final String TOKEN = "Token token="; + public static final String BEARER = "Bearer"; + public static final String INCIDENTS = "Incidents"; + public static final String SERVICES = "Services"; + public static final String INCIDENT_METRICS = "Incident Metrics"; + public static final String INCIDENT_LOG_ENTRY = "Incident Log Entry"; + public static final String TICKET = "Ticket"; + public static final String CREATE_TICKET = "Create Ticket"; + public static final String GET_TICKETS = "Get Tickets"; + public static final String ALERTS = "Alerts"; + public static final String ANALYTICS = "Analytics"; + public static final String EMAIL = "Email"; + public static final String SMS = "SMS"; + public static final String POST_MESSAGE = "Post Message"; + public static final String RENEWAL_ACCESS_TOKEN = "Renewal Access Token"; + + // pagerduty headers + public static final String X_EARLY_ACCESS = "X-EARLY-ACCESS"; + public static final String ANALYTICS_V2 = "analytics-v2"; + public static final String PAGER_DUTY_ACCEPT = "application/vnd.pagerduty+json;version=2"; + public static final String PAGER_DUTY_CONTENT_TYPE = "application/json"; + + // Jira + public static final String STORY = "Story"; + public static final String TASK = "Task"; + public static final String SUB_TASK = "Sub-task"; + public static final String BUG = "Bug"; + public static final String EPIC = "Epic"; + public static final String IN_PROGRESS = "IN PROGRESS"; + + //AskobArchitecture + public static final String ASKOB_MESSAGE = "Askob Message"; + public static final String ASKOB_ROUTING = "Askob Routing"; + public static final String ASKOB_WORKSPACE = "Askob Workspace"; + public static final String MESSAGE_ROUTING = "Message Routing"; + public static final String TICKET_ACTION = "Ticket Action"; + public static final String TICKET_LOG_ENTRY = "Ticket Log Entry"; + public static final String NEW = "New"; + public static final String OPEN = "Open"; + public static final String PENDING = "Pending"; + public static final String SOLVED = "Solved"; + public static final String CLOSED = "Closed"; + public static final String TICKET_METRICS = "Ticket Metrics"; + + //Employee + public static final String EMPLOYEE = "Employee"; + public static final String PILLAR = "Pillar"; + public static final String TEAM = "Team"; + + //Schemata + public static final String ORGANIZATION = "Organization"; + public static final String DOMIN = "Domin"; + public static final String TABLE = "Table"; + public static final String FIELD = "Field"; + + //token + public static final String AES_KEY = "1De@s21t1@3$5^"; + public static final String CONNECT = "Connect"; + public static final String HIGH = "High"; + public static final String MEDIUM = "Medium"; + public static final String LOW = "Low"; + + //headers + public static final String ACCEPT = "application/json"; + public static final String CONTENT_TYPE = "application/json"; + + //github + public static final String ACTIVITY = "Activity"; + public static final String EVENT_AUDIT = "Event Audit"; + public static final String DOMAIN = "Domain"; + public static final String MODEL = "Model"; + public static final String PULL_REQUEST = "Pull Request"; + public static final String SCHEMA_FILE_AUDIT = "Schema File Audit"; + public static final String WORKFLOW = "Workflow"; + public static final String COMMENT = "Comment"; + public static final String CLIENT_REPO = "Client Repo"; + public static final String CSV_UPLOAD_ROOT_FILE_PATH = "src/schemas"; + public static final String PROTOBUF_SCHEMA_ROOT_FILE_PATH = "src/main/schema"; + +} diff --git a/connect/src/main/java/com/opsbeach/connect/core/utils/TaskUtils.java b/connect/src/main/java/com/opsbeach/connect/core/utils/TaskUtils.java new file mode 100644 index 0000000..fbe2a00 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/core/utils/TaskUtils.java @@ -0,0 +1,40 @@ +package com.opsbeach.connect.core.utils; + +import org.springframework.scheduling.TriggerContext; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.task.entity.Task; + +import java.time.Instant; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +public class TaskUtils { + + static final Map> serviceTaskTypes = new HashMap<>(); + // Add serviceType with its respective taskType of task which are needed to run when application starts. + static { + serviceTaskTypes.put(ServiceType.GITHUB, List.of(TaskType.RENEWAL_ACCESS_TOKEN)); + } + + public static Date findNextExecutionTime(TriggerContext triggerContext, Task task) { + var instant = triggerContext.lastCompletion(); + Optional lastCompletionTime = instant == null ? Optional.empty() : Optional.of(Date.from(instant)); + Instant nextExecutionTime = nextExecutionTime(lastCompletionTime, task); + return Date.from(nextExecutionTime); + } + + private static Instant nextExecutionTime(Optional lastCompletionTime, Task task) { + if (lastCompletionTime.isPresent()) { + return lastCompletionTime.get().toInstant().plusMillis(task.getExecutionInterval()); // set range based on user info + } + if (serviceTaskTypes.containsKey(task.getServiceType()) && serviceTaskTypes.get(task.getServiceType()).contains(task.getTaskType())) { + return new Date().toInstant(); + } + return new Date().toInstant().plusMillis(task.getExecutionInterval()); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/ActivityController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/ActivityController.java new file mode 100644 index 0000000..d0e5894 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/ActivityController.java @@ -0,0 +1,40 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.ActivityDto; +import com.opsbeach.connect.github.service.ActivityService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/activity") +public class ActivityController { + + private final ActivityService activityService; + + @PostMapping + public SuccessResponse add(@RequestBody ActivityDto workflowDto) { + return SuccessResponse.statusCreated(activityService.add(workflowDto)); + } + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(activityService.get(id)); + } + + @GetMapping + public SuccessResponse> findAllByWorkflowId(@RequestParam(name = "workflowId", required = false) Long workflowId) { + return SuccessResponse.statusOk(activityService.findAllByWorkflowId(workflowId)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/ClientRepoController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/ClientRepoController.java new file mode 100644 index 0000000..8518d68 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/ClientRepoController.java @@ -0,0 +1,70 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import javax.validation.Valid; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +import com.opsbeach.connect.github.dto.ClientRepoDto; +import com.opsbeach.connect.github.dto.GitHubDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.service.ClientRepoService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/client-repo") +public class ClientRepoController { + + private final ClientRepoService clientRepoService; + + @PostMapping("/create") + public SuccessResponse create(@RequestBody @Valid GitHubDto gitHubDto) { + return SuccessResponse.statusCreated(clientRepoService.createNewRepo(gitHubDto)); + } + + @PostMapping + public SuccessResponse add(@RequestBody @Valid GitHubDto gitHubDto) { + return SuccessResponse.statusCreated(clientRepoService.add(gitHubDto)); + } + + @GetMapping("{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(clientRepoService.get(id)); + } + + @GetMapping("/repo-types") + public SuccessResponse getRepoTypes() { + return SuccessResponse.statusOk(clientRepoService.getRepoTypes()); + } + + @GetMapping + public SuccessResponse> getAll() { + return SuccessResponse.statusOk(clientRepoService.getAll()); + } + + @PutMapping("/{id}") + public SuccessResponse updateStatus(@PathVariable("id") Long id, + @RequestParam("status") ClientRepo.Status status) { + return SuccessResponse.statusOk(clientRepoService.updateStatus(id, status)); + } + + @PostMapping("/upload") + public SuccessResponse uploadRepo(@RequestParam("file") MultipartFile repoMultipartFile, + @RequestParam("repoType") ClientRepo.RepoType repoType) { + return SuccessResponse.statusCreated(clientRepoService.uploadRepo(repoMultipartFile, repoType)); + } + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/CommentController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/CommentController.java new file mode 100644 index 0000000..3822724 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/CommentController.java @@ -0,0 +1,40 @@ +package com.opsbeach.connect.github.controller; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.CommentDto; +import com.opsbeach.connect.github.service.CommentService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/comment") +public class CommentController { + + private final CommentService commentService; + + @PostMapping + public SuccessResponse add(@RequestBody CommentDto workflowDto) { + return SuccessResponse.statusCreated(commentService.add(workflowDto)); + } + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(commentService.get(id)); + } + + @PutMapping("/{id}") + public SuccessResponse updateStatus(@PathVariable("id") Long id, + @RequestParam("isResolved") Boolean isResolved) { + return SuccessResponse.statusOk(commentService.updateIsResolved(id, isResolved)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/DomainController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/DomainController.java new file mode 100644 index 0000000..8887590 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/DomainController.java @@ -0,0 +1,33 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.DomainDto; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequestMapping("/v1/domain") +@RequiredArgsConstructor +public class DomainController { + + private final DomainService domainService; + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(domainService.get(id)); + } + + @GetMapping + public SuccessResponse> getAll(@RequestParam(name = "clientRepoId", required = false) Long clientRepoId) { + return SuccessResponse.statusOk(domainService.getAll(clientRepoId)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/EventAuditController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/EventAuditController.java new file mode 100644 index 0000000..61e793a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/EventAuditController.java @@ -0,0 +1,41 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.EventAuditDto; +import com.opsbeach.connect.github.service.EventAuditService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/event-audit") +public class EventAuditController { + + private final EventAuditService eventAuditService; + + @GetMapping("{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(eventAuditService.get(id)); + } + + @Transactional + @PostMapping("/process") + public SuccessResponse processEventAudit(@RequestParam("eventAuditId") Long id) { + return SuccessResponse.statusOk(eventAuditService.processEventAudit(id)); + } + + @GetMapping("/status") + public SuccessResponse> getInitialLoadingStatus() { + return SuccessResponse.statusOk(eventAuditService.getInitialLoadStatus()); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/GitHubController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/GitHubController.java new file mode 100644 index 0000000..3a535d6 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/GitHubController.java @@ -0,0 +1,72 @@ +package com.opsbeach.connect.github.controller; + +import com.fasterxml.jackson.databind.JsonNode; +import com.opsbeach.connect.github.dto.GitHubDto; +import com.opsbeach.connect.github.dto.GithubActionDto; +import com.opsbeach.connect.github.service.GitHubService; +import com.opsbeach.sharedlib.dto.GenericResponseDto; +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.sharedlib.utils.Constants; + +import lombok.RequiredArgsConstructor; + +import java.util.List; +import java.io.IOException; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.servlet.mvc.support.RedirectAttributes; +import org.springframework.web.servlet.view.RedirectView; +import org.springframework.web.bind.annotation.PostMapping; + + +@RestController +@RequestMapping("/v1/github") +@RequiredArgsConstructor +public class GitHubController { + + private final GitHubService gitHubService; + + @PostMapping("/market-place-events") + public SuccessResponse postMethodName(@RequestBody JsonNode entity) { + System.out.println(entity.toPrettyString()); + return SuccessResponse.statusOk(GenericResponseDto.builder().status(Constants.SUCCESS).build()); + } + + + @GetMapping("/login") + public SuccessResponse logInRedirect(RedirectAttributes attributes) { + return SuccessResponse.statusOk(gitHubService.logInRedirect()); + } + + @GetMapping("/signin/callback") + public RedirectView integrateGithub(@RequestParam("code") String code, @RequestParam("smClientId") Long smClientId, RedirectAttributes attributes) { + return new RedirectView(gitHubService.getToken(code, smClientId)); + } + + @GetMapping("/repos") + public SuccessResponse getRepos(@RequestParam("connectId") Long connectId, + @RequestParam(name = "organizationName", required = false) String orgName) { + return SuccessResponse.statusOk(gitHubService.getRepos(connectId, orgName)); + } + + @GetMapping("/orgs/connect/{connectId}") + public SuccessResponse> logInRedirect(@PathVariable("connectId") Long connectId) { + return SuccessResponse.statusOk(gitHubService.getUserOrganization(connectId)); + } + + @PutMapping("/compute-score") + public SuccessResponse computeScore(@RequestBody GithubActionDto githubActionDto) throws IOException { + return SuccessResponse.statusOk(gitHubService.pullRequestAction(githubActionDto)); + } + + @PutMapping("/validate-schema") + public SuccessResponse validateSchema(@RequestBody GithubActionDto githubActionDto) throws IOException { + return SuccessResponse.statusOk(gitHubService.validateSchema(githubActionDto)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/ModelController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/ModelController.java new file mode 100644 index 0000000..903112a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/ModelController.java @@ -0,0 +1,35 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.AutoCompleteModelDto; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequestMapping("/v1/model") +@RequiredArgsConstructor +public class ModelController { + + private final ModelService modelService; + + @GetMapping + public SuccessResponse> getAll(@RequestParam(name = "domainId", required = false) Long domainId, + @RequestParam(name = "clientRepoId", required = false) Long clientRepoId, + @RequestParam(name = "path", required = false) String path) { + return SuccessResponse.statusOk(modelService.getAll(domainId, clientRepoId, path)); + } + + @GetMapping("/auto-complete") + public SuccessResponse> findByNameLike(@RequestParam("name") String name) { + return SuccessResponse.statusOk(modelService.findByNameLike(name)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/PullRequestController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/PullRequestController.java new file mode 100644 index 0000000..6aaf12b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/PullRequestController.java @@ -0,0 +1,48 @@ +package com.opsbeach.connect.github.controller; + +import org.springframework.data.domain.Pageable; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.DashboardDto; +import com.opsbeach.connect.github.dto.PullRequestDto; +import com.opsbeach.connect.github.entity.PullRequest.Status; +import com.opsbeach.connect.github.service.PullRequestService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +import java.util.List; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/pull-request") +public class PullRequestController { + + private final PullRequestService pullRequestService; + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(pullRequestService.get(id)); + } + + @PutMapping("/{id}") + public SuccessResponse updateStatus(@PathVariable("id") Long id, + @RequestParam("status") Status status) { + return SuccessResponse.statusOk(pullRequestService.updateStatus(id, status)); + } + + @GetMapping + public SuccessResponse> getAll(Pageable pageable) { + return SuccessResponse.statusOk(pullRequestService.getAll(pageable)); + } + + @GetMapping("/dashboard") + public SuccessResponse getDashboardMetrics() { + return SuccessResponse.statusOk(pullRequestService.getDashboardMetrics()); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/SchemaFileAuditController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/SchemaFileAuditController.java new file mode 100644 index 0000000..a66102e --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/SchemaFileAuditController.java @@ -0,0 +1,27 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.SchemaFileAuditDto; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequestMapping("/v1/schema-file-audit") +@RequiredArgsConstructor +public class SchemaFileAuditController { + + private final SchemaFileAuditService schemaFileAuditService; + + @GetMapping + public SuccessResponse> getAll(@RequestParam(name = "clientRepoId", required = false) Long clientRepoId) { + return SuccessResponse.statusOk(schemaFileAuditService.getAll(clientRepoId)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/controller/WorkflowController.java b/connect/src/main/java/com/opsbeach/connect/github/controller/WorkflowController.java new file mode 100644 index 0000000..0cf58a2 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/controller/WorkflowController.java @@ -0,0 +1,48 @@ +package com.opsbeach.connect.github.controller; + +import java.util.List; + +import javax.validation.Valid; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.github.dto.WorkflowDto; +import com.opsbeach.connect.github.service.WorkflowService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/workflow") +public class WorkflowController { + + private final WorkflowService workflowService; + + @PostMapping + public SuccessResponse add(@RequestBody WorkflowDto workflowDto) { + return SuccessResponse.statusCreated(workflowService.add(workflowDto)); + } + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(workflowService.get(id)); + } + + @GetMapping + public SuccessResponse> getAll() { + return SuccessResponse.statusOk(workflowService.getAll()); + } + + @PostMapping("/fields") + public SuccessResponse saveAndRaisePr(@RequestBody @Valid WorkflowDto workflowDto, + @RequestParam(name = "clientRepoId", required = false) Long clientRepoId) { + return SuccessResponse.statusOk(workflowService.saveAndRaisePr(workflowDto, clientRepoId)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/ActivityDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/ActivityDto.java new file mode 100644 index 0000000..67c1251 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/ActivityDto.java @@ -0,0 +1,29 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.Activity; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class ActivityDto { + + Long id; + Long clientId; + Long workflowId; + Activity.Type type; + Long sourceNodeId; + Long targetNodeId; + + public Activity toDomain(ActivityDto activityDto) { + + return Activity.builder().id(activityDto.id) + .clientId(activityDto.clientId) + .workflowId(activityDto.workflowId) + .type(activityDto.type) + .sourceNodeId(activityDto.sourceNodeId) + .targetNodeId(activityDto.targetNodeId) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/AutoCompleteModelDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/AutoCompleteModelDto.java new file mode 100644 index 0000000..613d51c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/AutoCompleteModelDto.java @@ -0,0 +1,10 @@ +package com.opsbeach.connect.github.dto; + +public interface AutoCompleteModelDto { + + Long getNodeId(); + + String getName(); + + String getNameSpace(); +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/ClientRepoDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/ClientRepoDto.java new file mode 100644 index 0000000..3e49fca --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/ClientRepoDto.java @@ -0,0 +1,34 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.ClientRepo; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class ClientRepoDto { + + Long id; + String name; + String owner; + String fullName; + ClientRepo.RepoType repoType; + Long connectId; + ClientRepo.Status status; + String defaultBranch; + ClientRepo.RepoSource repositorySource; + + public ClientRepo toDomain(ClientRepoDto clientRepoDto) { + return ClientRepo.builder().id(clientRepoDto.id) + .name(clientRepoDto.name) + .owner(clientRepoDto.owner) + .fullName(clientRepoDto.fullName) + .repoType(clientRepoDto.repoType) + .connectId(clientRepoDto.connectId) + .status(clientRepoDto.status) + .defaultBranch(clientRepoDto.defaultBranch) + .repositorySource(clientRepoDto.getRepositorySource()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/CommentDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/CommentDto.java new file mode 100644 index 0000000..332f7fd --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/CommentDto.java @@ -0,0 +1,32 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.Comment; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class CommentDto { + + Long id; + Long clientId; + Long nodeId; + String comments; + String type; + Long pullRequestId; + Long commentableId; + boolean isResolved; + + public Comment toDomain(CommentDto commentDto) { + return Comment.builder().id(commentDto.id) + .clientId(commentDto.clientId) + .nodeId(commentDto.nodeId) + .comments(commentDto.comments) + .type(commentDto.type) + .pullRequestId(commentDto.pullRequestId) + .commentableId(commentDto.commentableId) + .isResolved(commentDto.isResolved) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/DashboardDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/DashboardDto.java new file mode 100644 index 0000000..0150a2e --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/DashboardDto.java @@ -0,0 +1,22 @@ +package com.opsbeach.connect.github.dto; + +import java.util.Map; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class DashboardDto { + + Map openCloseGraph; + Map slaMeanGraph; + int openPrsCount; + int openPrPercent; + int slaMiss; + int slaMissPercent; + int closePrsCount; + int closePrPercent; + int totalPrsCount; + int totalPrPercent; +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/DomainDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/DomainDto.java new file mode 100644 index 0000000..e5f1fd7 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/DomainDto.java @@ -0,0 +1,28 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.Domain; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +@AllArgsConstructor +public class DomainDto { + + Long id; + Long clientId; + Long nodeId; + Long clientRepoId; + String name; + + public Domain toDomain(DomainDto domainDto) { + return Domain.builder().id(domainDto.getId()) + .clientId(domainDto.getClientId()) + .nodeId(domainDto.getNodeId()) + .name(domainDto.getName()) + .clientRepoId(domainDto.getClientRepoId()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/EventAuditDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/EventAuditDto.java new file mode 100644 index 0000000..7ef3aac --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/EventAuditDto.java @@ -0,0 +1,33 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.EventAudit; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class EventAuditDto { + + Long id; + Long clientId; + String clientName; + EventAudit.Type type; + Long eventId; + EventAudit.Status status; + String error; + String initiatedBy; + + public EventAudit toDomain(EventAuditDto eventAuditDto) { + + return EventAudit.builder().id(eventAuditDto.getId()) + .clientId(eventAuditDto.getClientId()) + .clientName(eventAuditDto.getClientName()) + .type(eventAuditDto.getType()) + .eventId(eventAuditDto.getEventId()) + .status(eventAuditDto.getStatus()) + .error(eventAuditDto.getError()) + .initiatedBy(eventAuditDto.getInitiatedBy()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/GitHubDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/GitHubDto.java new file mode 100644 index 0000000..675d659 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/GitHubDto.java @@ -0,0 +1,36 @@ +package com.opsbeach.connect.github.dto; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; + +import java.util.List; +import java.util.Map; + +@Getter +@Builder +@AllArgsConstructor +@NoArgsConstructor +@JsonIgnoreProperties(ignoreUnknown = true) +public class GitHubDto { + + private String user; + + private Long connectId; // where credentials of the user get saved. + + // private String[] selectedRepos; + + private Map selectedRepos; + + private List repos; + + private String repoOwner; + + private String loginRedirectURL; + + private String privateRepoName; +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/GithubActionDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/GithubActionDto.java new file mode 100644 index 0000000..2a8e295 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/GithubActionDto.java @@ -0,0 +1,19 @@ +package com.opsbeach.connect.github.dto; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class GithubActionDto { + String prName; + String prNumber; + String repoName; + String sourceBranch; + String targetBranch; + String sha; + String status; + String raisedBy; + String filesChanged; + String schemaValidationMessage; +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/ModelDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/ModelDto.java new file mode 100644 index 0000000..799c753 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/ModelDto.java @@ -0,0 +1,40 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.Model; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class ModelDto { + + Long id; + Long clientId; + String type; + String name; + Long schemaFileAuditId; + String path; + Long nodeId; + String nameSpace; + Long clientRepoId; + String checksum; + Long domainId; + Long pullRequestId; + + public Model toDomain(ModelDto modelDto) { + return Model.builder().id(modelDto.id) + .clientId(modelDto.clientId) + .type(modelDto.type) + .name(modelDto.name) + .nameSpace(modelDto.nameSpace) + .schemaFileAuditId(modelDto.schemaFileAuditId) + .path(modelDto.path) + .nodeId(modelDto.nodeId) + .domainId(modelDto.domainId) + .checksum(modelDto.checksum) + .clientRepoId(modelDto.clientRepoId) + .pullRequestId(modelDto.pullRequestId) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/PullRequestDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/PullRequestDto.java new file mode 100644 index 0000000..a82d434 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/PullRequestDto.java @@ -0,0 +1,40 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.PullRequest; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class PullRequestDto { + + Long id; + Long clientId; + String number; + Long workflowId; + Long clientRepoId; + PullRequest.Status status; + String sourceBranch; + String targetBranch; + String sha; + String url; + String errorMessage; + com.opsbeach.connect.schemata.validate.Status validationStatus; + + public PullRequest toDomain(PullRequestDto pullRequestDto) { + return PullRequest.builder().id(pullRequestDto.id) + .clientId(pullRequestDto.clientId) + .number(pullRequestDto.number) + .workflowId(pullRequestDto.workflowId) + .clientRepoId(pullRequestDto.clientRepoId) + .status(pullRequestDto.status) + .sourceBranch(pullRequestDto.sourceBranch) + .targetBranch(pullRequestDto.targetBranch) + .validationStatus(pullRequestDto.validationStatus) + .errorMessage(pullRequestDto.errorMessage) + .sha(pullRequestDto.sha) + .url(pullRequestDto.url) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/SchemaFileAuditDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/SchemaFileAuditDto.java new file mode 100644 index 0000000..12340a7 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/SchemaFileAuditDto.java @@ -0,0 +1,31 @@ +package com.opsbeach.connect.github.dto; + +import com.opsbeach.connect.github.entity.SchemaFileAudit; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class SchemaFileAuditDto { + + Long id; + String name; + Long clientRepoId; + String fileType; + String path; + Long rootNodeId; + String checksum; + Long pullRequestId; + + public SchemaFileAudit toDomain(SchemaFileAuditDto schemaFileAuditDto) { + return SchemaFileAudit.builder().id(schemaFileAuditDto.id) + .name(schemaFileAuditDto.name) + .clientRepoId(schemaFileAuditDto.clientRepoId) + .fileType(schemaFileAuditDto.fileType) + .path(schemaFileAuditDto.path) + .checksum(schemaFileAuditDto.checksum) + .pullRequestId(schemaFileAuditDto.pullRequestId) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/dto/WorkflowDto.java b/connect/src/main/java/com/opsbeach/connect/github/dto/WorkflowDto.java new file mode 100644 index 0000000..7f2d29c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/dto/WorkflowDto.java @@ -0,0 +1,90 @@ +package com.opsbeach.connect.github.dto; + +import java.util.List; + +import com.opsbeach.connect.github.entity.Workflow; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +@Builder +@Getter +@NoArgsConstructor +@AllArgsConstructor +public class WorkflowDto { + + Long id; + Long domainId; + Workflow.Status status; + String schemaName; + Long nodeId; + List fields; + TableDto table; + String stackHolders; + String purpose; + String creator; + String additionalReference; + float rank; + String title; + + public Workflow toDomain(WorkflowDto workflowDto) { + return Workflow.builder().id(workflowDto.id) + .domainId(workflowDto.domainId) + .nodeId(workflowDto.nodeId) + .schemaName(workflowDto.schemaName) + .stackHolders(workflowDto.stackHolders) + .purpose(workflowDto.purpose) + .creator(workflowDto.creator) + .additionalReference(workflowDto.additionalReference) + .status(workflowDto.status) + .rank(workflowDto.rank) + .title(workflowDto.title) + .build(); + } + + /* + * This Dto is to Accept the incomming changes done in UI. + */ + public record TableDto(Long id, String nameSpace, String name, String type, String description, String owner, String domain, + String email, String complianceOwner, String channel, String[] subscribers, String status, List fields, + String qualityRuleBase, String qualityRuleSql, String qualityRuleCel) { + public Table toDomain(TableDto tableDto) { + return Table.builder().id(tableDto.id) + .nameSpace(tableDto.nameSpace) + .name(tableDto.name) + .type(tableDto.type) + .description(tableDto.description) + .owner(tableDto.owner) + .domain(tableDto.domain) + .email(tableDto.email) + .complianceOwner(tableDto.complianceOwner) + .channel(tableDto.channel) + .subscribers(tableDto.subscribers) + .status(tableDto.status) + .qualityRuleBase(tableDto.qualityRuleBase) + .qualityRuleSql(tableDto.qualityRuleSql) + .qualityRuleCel(tableDto.qualityRuleCel) + .fields(tableDto.fields().stream().map(tableDto.fields().get(0)::toDomain).toList()) + .build(); + } + } + + /* + * This Dto is to Accept the incomming changes done in UI. + */ + public record FieldDto(Long id, String name, String dataType, String description, Boolean deprecated, Boolean isPii, Boolean isClassified, Long referenceFieldId) { + public Field toDomain(FieldDto fieldDto) { + return Field.builder().id(fieldDto.id) + .name(fieldDto.name) + .dataType(fieldDto.dataType) + .description(fieldDto.description) + .isPii(fieldDto.isPii) + .isClassified(fieldDto.isClassified) + .deprecated(fieldDto.deprecated) + .build(); + } + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/Activity.java b/connect/src/main/java/com/opsbeach/connect/github/entity/Activity.java new file mode 100644 index 0000000..6ab3674 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/Activity.java @@ -0,0 +1,56 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.ActivityDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Entity +@Table +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Activity extends BaseModel { + @Column(name = "workflow_id") + private Long workflowId; + + @Enumerated(EnumType.STRING) + private Type type; + @Column(name = "source_node_id") + private Long sourceNodeId; + @Column(name = "target_node_id") + private Long targetNodeId; + + public enum Type { + FIELD(0), TABLE(1); + + private final int key; + + Type(int key) { + this.key = key; + } + + public int getKey() { + return this.key; + } + } + + public ActivityDto toDto(Activity activity) { + + return ActivityDto.builder().id(activity.getId()) + .clientId(activity.getClientId()) + .workflowId(activity.getWorkflowId()) + .type(activity.getType()) + .sourceNodeId(activity.getSourceNodeId()) + .targetNodeId(activity.getTargetNodeId()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/ClientRepo.java b/connect/src/main/java/com/opsbeach/connect/github/entity/ClientRepo.java new file mode 100644 index 0000000..18ec7da --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/ClientRepo.java @@ -0,0 +1,107 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.ClientRepoDto; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Table(name = "client_repo") +@Entity +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class ClientRepo extends BaseModel { + + private String name; + + private String owner; + + @Column(name = "full_name") + private String fullName; + + @Column(name = "repo_type") + private RepoType repoType; + + @Column(name = "connect_id") + private Long connectId; + + @Setter + @Enumerated(EnumType.STRING) + private Status status; + + @Column(name = "default_branch") + private String defaultBranch; + + @Column(name = "repository_source") + @Enumerated(EnumType.STRING) + private RepoSource repositorySource; + + @Setter + @Column(name = "folder_path") + private String folderPath; // same will be the zip file path till initial loading. + + public ClientRepoDto toDto(ClientRepo clientRepo) { + return ClientRepoDto.builder().id(clientRepo.getId()) + .name(clientRepo.getName()) + .owner(clientRepo.getOwner()) + .fullName(clientRepo.getFullName()) + .repoType(clientRepo.getRepoType()) + .connectId(clientRepo.getConnectId()) + .status(clientRepo.getStatus()) + .defaultBranch(clientRepo.getDefaultBranch()) + .repositorySource(clientRepo.getRepositorySource()) + .build(); + } + + public enum RepoSource { + GITHUB(1), LOCAL(2); + + private final int key; + + RepoSource(int key) { + this.key = key; + } + + public int getKey() { + return this.key; + } + } + + public enum Status { + ACTIVE(1), DEACTIVE(2); + + private final int key; + + Status(int key) { + this.key = key; + } + + public int getKey() { + return this.key; + } + } + + public enum RepoType { + AVRO(1), JSON(2), PROTOBUF(3), YAML(4); + + private final int key; + + RepoType(int key) { + this.key = key; + } + + public int getKey() { + return this.key; + } + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/Comment.java b/connect/src/main/java/com/opsbeach/connect/github/entity/Comment.java new file mode 100644 index 0000000..18d4403 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/Comment.java @@ -0,0 +1,46 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.CommentDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Entity +@Table +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Comment extends BaseModel { + @Column(name = "node_id") + private Long nodeId; + + private String comments; + + private String type; + @Column(name = "pull_request_id") + private Long pullRequestId; + @Column(name = "commentable_id") + private Long commentableId; + @Column(name = "is_resolved") + @Setter + private Boolean isResolved; + + public CommentDto toDto(Comment comment) { + return CommentDto.builder().id(comment.getId()) + .clientId(comment.getClientId()) + .nodeId(comment.getNodeId()) + .comments(comment.getComments()) + .type(comment.getType()) + .pullRequestId(comment.getPullRequestId()) + .commentableId(comment.getCommentableId()) + .isResolved(comment.getIsResolved()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/Domain.java b/connect/src/main/java/com/opsbeach/connect/github/entity/Domain.java new file mode 100644 index 0000000..4eae0ad --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/Domain.java @@ -0,0 +1,28 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.DomainDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Entity +@Table +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Domain extends BaseModel { + private String name; + @Column(name = "node_id") + private Long nodeId; + @Column(name = "client_repo_id") + private Long clientRepoId; + public DomainDto toDto(Domain domain) { + return new DomainDto(domain.getId(), domain.getClientId(), domain.getNodeId(), domain.getClientRepoId(), domain.getName()); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/EventAudit.java b/connect/src/main/java/com/opsbeach/connect/github/entity/EventAudit.java new file mode 100644 index 0000000..202949b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/EventAudit.java @@ -0,0 +1,75 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.EventAuditDto; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@SuperBuilder +@Entity +@Table(name = "event_audit") +@Getter +@AllArgsConstructor +@NoArgsConstructor +public class EventAudit extends BaseModel { + @Enumerated(EnumType.STRING) + private Type type; + @Column(name = "client_name") + private String clientName; + @Setter + @Enumerated(EnumType.STRING) + private Status status; + @Column(name = "event_id") + private Long eventId; + @Setter + private String error; + @Column(name = "initiated_by") + private String initiatedBy; + + public enum Status { + PENDING(1), IN_PROGRESS(2), COMPLETED(3), ERROR(4); + + private final int key; + + Status(int key) { + this.key = key; + } + public int getKey() { + return this.key; + } + } + + public enum Type { + REPOSITORY_INITIAL_PULL(1), CSV_FILE_UPLOAD(2); + + private final int key; + + Type(int key) { + this.key = key; + } + public int getKey() { + return this.key; + } + } + + public EventAuditDto toDto(EventAudit eventAudit) { + + return EventAuditDto.builder().id(eventAudit.getId()) + .clientId(eventAudit.getClientId()) + .clientName(eventAudit.getClientName()) + .type(eventAudit.getType()) + .status(eventAudit.getStatus()) + .error(eventAudit.getError()) + .initiatedBy(eventAudit.getInitiatedBy()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/Model.java b/connect/src/main/java/com/opsbeach/connect/github/entity/Model.java new file mode 100644 index 0000000..e5f3a42 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/Model.java @@ -0,0 +1,52 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.ModelDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Getter +@Entity +@Table +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Model extends BaseModel { + private String type; + private String name; + @Column(name = "name_space") + private String nameSpace; + @Column(name = "schema_file_audit_id") + private Long schemaFileAuditId; + private String path; + @Column(name = "node_id") + private Long nodeId; + @Column(name = "domain_id") + private Long domainId; + private String checksum; + @Column(name = "client_repo_id") + private Long clientRepoId; + @Setter + @Column(name = "pull_request_id") + private Long pullRequestId; + public ModelDto toDto(Model model) { + return ModelDto.builder().id(model.getId()) + .clientId(model.getClientId()) + .type(model.getType()) + .name(model.getName()) + .nameSpace(model.getNameSpace()) + .schemaFileAuditId(model.getSchemaFileAuditId()) + .path(model.getPath()) + .nodeId(model.getNodeId()) + .domainId(model.getDomainId()) + .checksum(model.getChecksum()) + .clientRepoId(model.getClientRepoId()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/PullRequest.java b/connect/src/main/java/com/opsbeach/connect/github/entity/PullRequest.java new file mode 100644 index 0000000..47addd9 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/PullRequest.java @@ -0,0 +1,69 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.PullRequestDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Entity +@Table(name = "pull_request") +@Getter +@Setter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class PullRequest extends BaseModel { + private String number; + @Column(name = "workflow_id") + private Long workflowId; + @Column(name = "client_repo_id") + private Long clientRepoId; + @Enumerated(EnumType.STRING) + private Status status; + @Column(name = "source_branch") + private String sourceBranch; + @Column(name = "target_branch") + private String targetBranch; + @Column(name = "validation_status") + private com.opsbeach.connect.schemata.validate.Status validationStatus; + @Column(name = "error_message") + private String errorMessage; + private String sha; + private String url; + @Column(name = "issue_comment_id") + private Long issueCommentId; + public enum Status { + OPEN(0), CLOSED(1), MERGED(2), REOPENED(3); + private final int key; + Status(int key) { + this.key = key; + } + public int getKey() { + return this.key; + } + } + + public PullRequestDto toDto(PullRequest pullRequest) { + return PullRequestDto.builder().id(pullRequest.getId()) + .clientId(pullRequest.getClientId()) + .number(pullRequest.getNumber()) + .workflowId(pullRequest.getWorkflowId()) + .clientRepoId(pullRequest.getClientRepoId()) + .status(pullRequest.getStatus()) + .sourceBranch(pullRequest.getSourceBranch()) + .targetBranch(pullRequest.getTargetBranch()) + .validationStatus(pullRequest.getValidationStatus()) + .errorMessage(pullRequest.getErrorMessage()) + .sha(pullRequest.getSha()) + .url(pullRequest.getUrl()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/SchemaFileAudit.java b/connect/src/main/java/com/opsbeach/connect/github/entity/SchemaFileAudit.java new file mode 100644 index 0000000..fd288f1 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/SchemaFileAudit.java @@ -0,0 +1,43 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.SchemaFileAuditDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Entity +@Getter +@Setter +@Table(name = "schema_file_audit") +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class SchemaFileAudit extends BaseModel { + private String name; + @Column(name = "client_repo_id") + private Long clientRepoId; + @Column(name = "file_type") + private String fileType; + private String path; + private String checksum; + @Column(name = "root_node_id") + private Long rootNodeId; + @Column(name = "pull_request_id") + private Long pullRequestId; + + public SchemaFileAuditDto toDto(SchemaFileAudit schemaFileAudit) { + return SchemaFileAuditDto.builder().id(schemaFileAudit.getId()) + .name(schemaFileAudit.getName()) + .clientRepoId(schemaFileAudit.getClientRepoId()) + .fileType(schemaFileAudit.getFileType()) + .path(schemaFileAudit.getPath()) + .checksum(schemaFileAudit.getChecksum()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/entity/Workflow.java b/connect/src/main/java/com/opsbeach/connect/github/entity/Workflow.java new file mode 100644 index 0000000..eaa3e16 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/entity/Workflow.java @@ -0,0 +1,67 @@ +package com.opsbeach.connect.github.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.github.dto.WorkflowDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Entity +@Table +@Getter +@Setter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Workflow extends BaseModel { + @Setter + @Column(name = "domain_id") + private Long domainId; + @Column(name = "node_id") + private Long nodeId; + @Column(name = "schema_name") + private String schemaName; + @Column(name = "stack_holders") + private String stackHolders; + private String purpose; + private String creator; + @Column(name = "additional_reference") + private String additionalReference; + @Enumerated(EnumType.STRING) + private Status status; + private float rank; + private String title; + + public enum Status { + NEW(0), PR_RAISED(1), PR_MERGED(3), PR_CLOSED(4); + private final int key; + Status(int key) { + this.key = key; + } + public int getKey() { + return this.key; + } + } + + public WorkflowDto toDto(Workflow workflow) { + return WorkflowDto.builder().id(workflow.getId()) + .domainId(workflow.getDomainId()) + .nodeId(workflow.getNodeId()) + .schemaName(workflow.getSchemaName()) + .stackHolders(workflow.getStackHolders()) + .purpose(workflow.getPurpose()) + .creator(workflow.getCreator()) + .additionalReference(workflow.getAdditionalReference()) + .status(workflow.getStatus()) + .rank(workflow.getRank()) + .title(workflow.getTitle()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/ActivityRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/ActivityRepository.java new file mode 100644 index 0000000..ee54f02 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/ActivityRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.Activity; + +public interface ActivityRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/ClientRepoRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/ClientRepoRepository.java new file mode 100644 index 0000000..b6d018c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/ClientRepoRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.ClientRepo; + +public interface ClientRepoRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/CommentRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/CommentRepository.java new file mode 100644 index 0000000..6c69851 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/CommentRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.Comment; + +public interface CommentRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/DomainRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/DomainRepository.java new file mode 100644 index 0000000..fed1c0d --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/DomainRepository.java @@ -0,0 +1,9 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.Domain; + +public interface DomainRepository extends BaseRepository { + + void deleteByClientRepoId(Long clientRepoId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/EventAuditRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/EventAuditRepository.java new file mode 100644 index 0000000..f2e78c4 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/EventAuditRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.EventAudit; + +public interface EventAuditRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/ModelRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/ModelRepository.java new file mode 100644 index 0000000..aae2221 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/ModelRepository.java @@ -0,0 +1,23 @@ +package com.opsbeach.connect.github.repository; + +import java.util.List; + +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.dto.AutoCompleteModelDto; +import com.opsbeach.connect.github.entity.Model; + +public interface ModelRepository extends BaseRepository { + + @Query(value = """ + SELECT m.name, m.name_space nameSpace, m.node_id nodeId FROM analytics.model m + INNER JOIN analytics.client_repo c ON m.client_repo_id = c.id WHERE + m.name ilike :name AND m.client_id = :clientId AND m.is_deleted = false AND c.status = :status + GROUP BY m.name, m.name_space, m.node_id + """, nativeQuery = true) + List findByNameLike(@Param("name") String name, @Param("clientId") Long clientId, @Param("status") String status); + + void deleteAllByClientRepoId(Long clientRepoId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/PullRequestRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/PullRequestRepository.java new file mode 100644 index 0000000..17151a0 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/PullRequestRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.PullRequest; + +public interface PullRequestRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/SchemaFileAuditRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/SchemaFileAuditRepository.java new file mode 100644 index 0000000..2cc3b8d --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/SchemaFileAuditRepository.java @@ -0,0 +1,20 @@ +package com.opsbeach.connect.github.repository; + +import java.util.List; + +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.SchemaFileAudit; + +public interface SchemaFileAuditRepository extends BaseRepository { + + @Query(value = "SELECT s.* FROM analytics.schema_file_audit s inner join analytics.model m on m.path = s.path where m.node_id = :nodeId", nativeQuery = true) + List findByModelNodeId(@Param("nodeId") Long nodeId); + + @Query(value = "SELECT s.* FROM analytics.schema_file_audit s inner join analytics.model m on m.path = s.path where m.node_id in :nodeIds", nativeQuery = true) + List findByModelNodeIds(@Param("nodeIds") List nodeIds); + + void deleteAllByClientRepoId(Long clientRepoId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/repository/WorkflowRepository.java b/connect/src/main/java/com/opsbeach/connect/github/repository/WorkflowRepository.java new file mode 100644 index 0000000..3d8f60a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/repository/WorkflowRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.github.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.github.entity.Workflow; + +public interface WorkflowRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/ActivityService.java b/connect/src/main/java/com/opsbeach/connect/github/service/ActivityService.java new file mode 100644 index 0000000..4eb399e --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/ActivityService.java @@ -0,0 +1,51 @@ +package com.opsbeach.connect.github.service; + +import java.util.List; + +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.ActivityDto; +import com.opsbeach.connect.github.entity.Activity; +import com.opsbeach.connect.github.repository.ActivityRepository; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class ActivityService { + + private final ActivityRepository activityRepository; + + private final WorkflowService workflowService; + + private final ResponseMessage responseMessage; + + private final IdSpecifications activitySpecifications; + + public ActivityDto add(ActivityDto activityDto) { + workflowService.get(activityDto.getWorkflowId()); + var activity = activityRepository.save(activityDto.toDomain(activityDto)); + return activity.toDto(activity); + } + + public ActivityDto get(Long id) { + var activity = activityRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.ACTIVITY))); + return activity.toDto(activity); + } + + public List findAllByWorkflowId(Long workflowId) { + Specification specification = Specification.where(null); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(workflowId))) { + specification = specification.and(activitySpecifications.findByWorkflowId(workflowId)); + } + var activities = activityRepository.findAll(specification); + return activities.isEmpty() ? List.of() : activities.stream().map(activities.get(0)::toDto).toList(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/ClientRepoService.java b/connect/src/main/java/com/opsbeach/connect/github/service/ClientRepoService.java new file mode 100644 index 0000000..0e9cc69 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/ClientRepoService.java @@ -0,0 +1,368 @@ +package com.opsbeach.connect.github.service; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import org.apache.avro.SchemaParseException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; +import org.springframework.http.HttpHeaders; +import org.springframework.security.config.annotation.AlreadyBuiltException; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +import com.fasterxml.jackson.databind.JsonNode; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.ClientRepoDto; +import com.opsbeach.connect.github.dto.GitHubDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.EventAudit; +import com.opsbeach.connect.github.entity.ClientRepo.RepoSource; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.repository.ClientRepoRepository; +import com.opsbeach.connect.schemata.processor.avro.AvroSchema; +import com.opsbeach.connect.schemata.processor.json.JsonSchema; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoSchema; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.task.service.ConnectService; +import com.opsbeach.sharedlib.dto.ClientDto; +import com.opsbeach.sharedlib.exception.AlreadyExistException; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.service.GoogleCloudService; +import com.opsbeach.sharedlib.utils.FileUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Root; +import lombok.extern.slf4j.Slf4j; + + +@Slf4j +@Service +public class ClientRepoService { + + @Autowired + private ClientRepoRepository clientRepoRepository; + @Autowired + private ResponseMessage responseMessage; + @Autowired + private EventAuditService eventAuditService; + @Autowired + private App2AppService app2AppService; + @Autowired + private IdSpecifications clientRepoSpecifications; + @Autowired + private DomainService domainService; + @Autowired + private EntityManager entityManager; + @Autowired + private ConnectService connectService; + @Autowired + private GoogleCloudService googleCloudService; + @Autowired + private ApplicationConfig applicationConfig; + @Lazy @Autowired + private GitHubService gitHubService; + @Lazy @Autowired + private AvroSchema avroSchema; + @Lazy @Autowired + private JsonSchema jsonSchema; + @Lazy @Autowired + private ProtoSchema protoSchema; + @Lazy @Autowired + private DomainNodeService domainNodeService; + @Lazy @Autowired + private SchemaFileAuditService schemaFileAuditService; + @Lazy @Autowired + private ModelService modelService; + + private static final String SCHEMATA = "schemata"; + + @Value("${application.user.get-client-url}") + private String getClientUrl; + + @Value("${server.home-path}") + private String homePath; + + @Value("${server.repo-storage-path}") + private String repoStoragePath; + + @Value("${application.user.update-onboard-status}") + private String updateOnboardStatusUrl; + + public ClientRepo addModel(ClientRepo clientRepo) { + return clientRepoRepository.save(clientRepo); + } + + public ClientRepoDto get(Long id) { + var clientRepo = getModel(id); + return clientRepo.toDto(clientRepo); + } + + public ClientRepo getModel(Long id) { + return clientRepoRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.CLIENT_REPO))); + } + + public List getAll() { + var clientRepos = getAllModels(); + return clientRepos.isEmpty() ? List.of() : clientRepos.stream().map(clientRepos.get(0)::toDto).toList(); + } + + public List getAllModels() { + return clientRepoRepository.findAll(); + } + + public RepoType[] getRepoTypes() { + return RepoType.values(); + } + + public List getActiveRepoIds() { + var specification = clientRepoSpecifications.findByClientRepoStatus(ClientRepo.Status.ACTIVE); + var clientRepos = clientRepoRepository.findAll(specification); + return clientRepos.isEmpty() ? List.of() : clientRepos.stream().map(ClientRepo::getId).toList(); + } + + public ClientRepoDto updateStatus(Long id, ClientRepo.Status status) { + var clientRepo = getModel(id); + clientRepo.setStatus(status); + clientRepo = addModel(clientRepo); + return clientRepo.toDto(clientRepo); + } + + @Transactional + public ClientRepoDto createNewRepo(GitHubDto gitHubDto) { + boolean isRepoExists = true; + try { + gitHubService.getRepoDetails(gitHubDto.getRepoOwner(), gitHubDto.getPrivateRepoName(), gitHubDto.getConnectId()); + } catch (Exception e) { + isRepoExists = false; + } + if (Boolean.TRUE.equals(isRepoExists)) { + var repoFullName = gitHubDto.getRepoOwner() + "/" + gitHubDto.getPrivateRepoName(); + throw new AlreadyBuiltException(responseMessage.getErrorMessage(ErrorCode.ALREADY_EXISTS, repoFullName)); + } + var response = gitHubService.createNewRepo(gitHubDto.getRepoOwner(), gitHubDto.getPrivateRepoName(), gitHubDto.getConnectId()); + var clientRepo = createClientRepo(response, gitHubDto.getConnectId()); + return clientRepo.toDto(clientRepo); + } + + private ClientRepo createClientRepo(JsonNode response, Long connectId) { + var clientRepo = ClientRepo.builder().owner(response.get("owner").get("login").asText()) + .name(response.get("name").asText()).fullName(response.get("full_name").asText()) + .connectId(connectId).status(ClientRepo.Status.ACTIVE).repoType(RepoType.JSON) + .defaultBranch(response.get("default_branch").asText()).build(); + clientRepo = addModel(clientRepo); + var clientDto = getClient(); + var domainNode = domainNodeService.addDomainNode(clientRepo.getFullName(), clientDto.getId(), clientRepo.getId()); + domainService.addDomain(clientRepo, domainNode.getId()); + return clientRepo; + } + + @Transactional + public String add(GitHubDto gitHubDto) { + List eventAudits = new LinkedList<>(); + var selectedRepos = gitHubDto.getSelectedRepos().keySet(); + for (String repo : selectedRepos) { + if (findByFullName(gitHubDto.getRepoOwner()+"/"+repo).isEmpty()) { + eventAudits.add(createEventAudit(repo, gitHubDto)); + } else { + throw new AlreadyExistException(ErrorCode.ALREADY_EXISTS, responseMessage.getErrorMessage(ErrorCode.ALREADY_EXISTS, gitHubDto.getRepoOwner()+"/"+repo)); + } + } + eventAudits = eventAuditService.addAll(eventAudits); + var ids = eventAudits.stream().map(EventAudit::getId).toList(); + eventAuditService.processEventAuditsAsync(ids, SecurityUtil.getUserDetails()); + return "SUCCESS"; + } + + private EventAudit createEventAudit(String repo, GitHubDto gitHubDto) { + var repoType = gitHubDto.getSelectedRepos().get(repo); + var repoFullName = gitHubDto.getRepoOwner()+"/"+repo; + var defaultBranch = gitHubService.getDefaultBranch(gitHubDto.getRepoOwner(), repo, gitHubDto.getConnectId()); + var clientRepo = ClientRepo.builder().owner(gitHubDto.getRepoOwner()).name(repo).fullName(repoFullName) + .connectId(gitHubDto.getConnectId()).status(ClientRepo.Status.ACTIVE) + .repoType(repoType).defaultBranch(defaultBranch).repositorySource(RepoSource.GITHUB).build(); + clientRepo = addModel(clientRepo); + return EventAudit.builder().eventId(clientRepo.getId()).type(EventAudit.Type.REPOSITORY_INITIAL_PULL).clientName(getClient().getName()) + .initiatedBy(gitHubDto.getRepoOwner()).status(EventAudit.Status.PENDING).build(); + } + + public String uploadRepo(MultipartFile repoMultipartFile, RepoType repoType) { + var clientDto = getClient(); + var repoFullName = FileUtil.getBaseFileName(repoMultipartFile.getOriginalFilename()); + var storagePath = repoStoragePath+"/"+clientDto.getName()+"/"+repoFullName; + File file = new File(storagePath); + if (!file.exists() || !file.isDirectory()) { + file.mkdirs(); + } + try { + // Save file to disk + Path filepath = Paths.get(storagePath, repoMultipartFile.getOriginalFilename()); + Files.write(filepath, repoMultipartFile.getBytes()); + log.info("Repo File loaded successfully: " + filepath.toString()); + } catch (IOException e) { + e.printStackTrace(); + throw new InternalError("File not uploaded"); + } + var filepath = StringUtil.constructStringEmptySeparator(storagePath,"/",repoMultipartFile.getOriginalFilename()); + if (findByFullName(repoFullName).isEmpty() == false) { + FileUtil.deleteFile(filepath); + throw new AlreadyExistException(ErrorCode.ALREADY_EXISTS, responseMessage.getErrorMessage(ErrorCode.ALREADY_EXISTS, repoFullName)); + } + var clientRepo = ClientRepo.builder().name(repoFullName).fullName(repoFullName).status(ClientRepo.Status.ACTIVE) + .repoType(repoType).repositorySource(RepoSource.LOCAL).folderPath(filepath).build(); + clientRepo = addModel(clientRepo); + var eventAudit = EventAudit.builder().eventId(clientRepo.getId()).type(EventAudit.Type.REPOSITORY_INITIAL_PULL).clientName(clientDto.getName()) + .status(EventAudit.Status.PENDING).build(); + eventAuditService.addModel(eventAudit); + eventAuditService.processEventAuditsAsync(List.of(eventAudit.getId()), SecurityUtil.getUserDetails()); + return "SUCCESS"; + } + + private static final String FULL_NAME = "fullName"; + + private Optional findByFullName(String fullName) { + CriteriaBuilder builder = entityManager.getCriteriaBuilder(); + CriteriaQuery query = builder.createQuery(ClientRepo.class); + Root root = query.from(ClientRepo.class); + + query.select(root).where(builder.equal(root.get(FULL_NAME), fullName)); + var typedQuery = entityManager.createQuery(query); + return typedQuery.getResultList().size() == 0 ? Optional.empty() : Optional.of(typedQuery.getSingleResult()); + } + + public ClientDto getClient() { + return getClient(SecurityUtil.getClientId()); + } + + public ClientDto getClient(Long id) { + var url = getClientUrl.replace("{id}", id.toString()); + var entity = app2AppService.setHeaders(App2AppService.authorizationHeader("Bearer " + SecurityUtil.getAccessToken()), null); + var res = app2AppService.httpGet(url, entity, ClientDto.class); + return res; + } + + public ClientRepo getByFullName(String fullName) { + var clientRepo = findByFullName(fullName); + return clientRepo.orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, Constants.CLIENT_REPO))); + } + + public Optional getSchemataRepo() { + var clientRepo = clientRepoRepository.findOne(clientRepoSpecifications.findByName(SCHEMATA)); + if (clientRepo.isEmpty()) { + var connectDto = connectService.get(ServiceType.GITHUB); + if (Objects.isNull(connectDto.getRepoOrganization())) throw new RecordNotFoundException(ErrorCode.REPO_ORGANIZATION_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.REPO_ORGANIZATION_NOT_FOUND)); + JsonNode response; + try { + response = gitHubService.getRepoDetails(connectDto.getRepoOrganization(), SCHEMATA, connectDto); + } catch (Exception e) { + response = gitHubService.createNewRepo(connectDto.getRepoOrganization(), SCHEMATA, connectDto); + } + clientRepo = Optional.of(createClientRepo(response, connectDto.getId())); + } + return clientRepo; + } + + public EventAudit initialLoading(EventAudit eventAudit) { + var clientRepo = getModel(eventAudit.getEventId()); + switch (clientRepo.getRepositorySource()) { + case GITHUB -> githubInitialLoading(eventAudit, clientRepo); + case LOCAL -> localInitialLoading(eventAudit, clientRepo); + } + return eventAudit; + } + + private EventAudit githubInitialLoading(EventAudit eventAudit, ClientRepo clientRepo) { + var connectDto = connectService.get(clientRepo.getConnectId()); + gitHubService.downloadTarball(clientRepo, connectDto); + var repoFolderPath = StringUtil.constructStringEmptySeparator(homePath, eventAudit.getInitiatedBy(), "-", clientRepo.getName()); + new File(repoFolderPath).mkdir(); + String objectName = StringUtil.constructStringEmptySeparator(eventAudit.getClientName(), "/", clientRepo.getFullName(), "/", clientRepo.getDefaultBranch()); + pullFilesFromBucket(applicationConfig.getGcloud().get("repo-bucket"), objectName, repoFolderPath); + // NOW SEND THE ROOT FOLDER PATH TO RESPECTIVE SCHEMA FOR PARSING AND SAVING. + try { + switch (clientRepo.getRepoType()) { + case AVRO -> avroSchema.parseFolder(repoFolderPath, clientRepo); + case JSON -> jsonSchema.parseFolder(repoFolderPath, clientRepo); + case PROTOBUF -> protoSchema.parseFolder(repoFolderPath, clientRepo); + case YAML -> log.info("IN PROGRESS"); + } + } catch (Exception e) { + log.info("Error Occured While Parsing"); + FileUtil.deleteDirectory(repoFolderPath); + throw new SchemaParseException(e.getMessage()); + } + eventAuditService.updateStatus(eventAudit, EventAudit.Status.COMPLETED); + log.info("Cleaning folder downloaded from BUCKET"); + FileUtil.deleteDirectory(repoFolderPath); + updateOnboardStatus(); + return eventAudit; + } + + private void localInitialLoading(EventAudit eventAudit, ClientRepo clientRepo) { + var zipFilePath = clientRepo.getFolderPath(); + var repoFolderPath = zipFilePath.substring(0, zipFilePath.lastIndexOf('/')); + log.info("UnZip the downloaded tar.gz file"); + FileUtil.unzip(zipFilePath, repoFolderPath); + FileUtil.deleteFile(zipFilePath); + clientRepo.setFolderPath(repoFolderPath); + addModel(clientRepo); + // NOW SEND THE ROOT FOLDER PATH TO RESPECTIVE SCHEMA FOR PARSING AND SAVING. + try { + switch (clientRepo.getRepoType()) { + case AVRO -> avroSchema.parseFolder(repoFolderPath, clientRepo); + case JSON -> jsonSchema.parseFolder(repoFolderPath, clientRepo); + case PROTOBUF -> protoSchema.parseFolder(repoFolderPath, clientRepo); + case YAML -> log.info("IN PROGRESS"); + } + } catch (Exception e) { + log.info("Error Occured While Parsing"); + throw new SchemaParseException(e.getMessage()); + } + eventAuditService.updateStatus(eventAudit, EventAudit.Status.COMPLETED); + log.info("Cleaning folder downloaded from BUCKET"); + updateOnboardStatus(); + } + + private void updateOnboardStatus() { + var url = updateOnboardStatusUrl.replace("{id}", SecurityUtil.getClientId().toString()) + .replace("{isOnboarded}", Boolean.TRUE.toString()); + app2AppService.httpPut(url, app2AppService.setHeaders(Map.of(HttpHeaders.AUTHORIZATION, "Bearer "+SecurityUtil.getAccessToken()), null), JsonNode.class); + } + + private boolean pullFilesFromBucket(String bucketName, String objectName, String repoFolderPath) { + String destFilePath = repoFolderPath + '/' + bucketName + ".tar.gz"; + log.info("pulling file from bucket"); + googleCloudService.downloadFile(bucketName, objectName, destFilePath); + log.info("UnZip the downloaded tar.gz file"); + FileUtil.uncompressTarGZ(repoFolderPath, destFilePath); + FileUtil.deleteFile(destFilePath); + return true; + } + + public void rollBackRecordsCreatedById(Long clientRepoId) { + modelService.deleteAllByClientRepoId(clientRepoId); + schemaFileAuditService.deleteAllByClientRepoId(clientRepoId); + domainService.deleteByClientRepoId(clientRepoId); + domainNodeService.deleteByClientRepoId(clientRepoId); + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/CommentService.java b/connect/src/main/java/com/opsbeach/connect/github/service/CommentService.java new file mode 100644 index 0000000..b144135 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/CommentService.java @@ -0,0 +1,48 @@ +package com.opsbeach.connect.github.service; + +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.CommentDto; +import com.opsbeach.connect.github.entity.Comment; +import com.opsbeach.connect.github.repository.CommentRepository; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class CommentService { + + private final CommentRepository commentRepository; + + private final PullRequestService pullRequestService; + + private final ResponseMessage responseMessage; + + public CommentDto add(CommentDto commentDto) { + if (!ObjectUtils.isEmpty(commentDto.getCommentableId())) get(commentDto.getCommentableId()); + pullRequestService.get(commentDto.getPullRequestId()); + var comment = commentRepository.save(commentDto.toDomain(commentDto)); + return comment.toDto(comment); + } + + public CommentDto get(Long id) { + var comment = getModel(id); + return comment.toDto(comment); + } + + public Comment getModel(Long id) { + return commentRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.COMMENT))); + } + + public CommentDto updateIsResolved(Long id, boolean isResolved) { + var comment = getModel(id); + comment.setIsResolved(isResolved); + comment = commentRepository.save(comment); + return comment.toDto(comment); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/DomainService.java b/connect/src/main/java/com/opsbeach/connect/github/service/DomainService.java new file mode 100644 index 0000000..a66f16b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/DomainService.java @@ -0,0 +1,75 @@ +package com.opsbeach.connect.github.service; + +import java.util.List; + +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.DomainDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.repository.DomainRepository; + import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class DomainService { + + private final DomainRepository domainRepository; + + private final ResponseMessage responseMessage; + + private final IdSpecifications domainSpecifications; + + public DomainDto add(DomainDto domainDto) { + var domain = domainDto.toDomain(domainDto); + domain = addModel(domain); + return domain.toDto(domain); + } + + public Domain addModel(Domain domain) { + return domainRepository.save(domain); + } + + public Domain addDomain(ClientRepo clientRepo, Long nodeId) { + var domain = Domain.builder().clientId(clientRepo.getClientId()).nodeId(nodeId).clientRepoId(clientRepo.getId()) + .name(clientRepo.getFullName()).build(); + return addModel(domain); + } + + public DomainDto get(Long id) { + var domain = getModel(id); + return domain.toDto(domain); + } + + public Domain getModel(Long id) { + return domainRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.DOMAIN))); + } + + public Domain getDefaultDomain(String clientRepoFullName) { + return domainRepository.findOne(domainSpecifications.findByName(clientRepoFullName)).orElse(null); + } + + public List getAll(Long clientRepoId) { + Specification specification = Specification.where(null); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(clientRepoId))) { + specification = specification.and(domainSpecifications.findByClientRepoId(clientRepoId)); + } + return toDtos(domainRepository.findAll(specification)); + } + + private List toDtos(List domains) { + return domains.isEmpty() ? List.of() : domains.stream().map(domains.get(0)::toDto).toList(); + } + + public void deleteByClientRepoId(Long clientRepoId) { + domainRepository.deleteByClientRepoId(clientRepoId); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/EventAuditService.java b/connect/src/main/java/com/opsbeach/connect/github/service/EventAuditService.java new file mode 100644 index 0000000..1cde00c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/EventAuditService.java @@ -0,0 +1,131 @@ +package com.opsbeach.connect.github.service; + +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Lazy; +import org.springframework.scheduling.annotation.Async; +import org.springframework.security.config.annotation.AlreadyBuiltException; +import org.springframework.stereotype.Service; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.EventAuditDto; +import com.opsbeach.connect.github.entity.EventAudit; +import com.opsbeach.connect.github.repository.EventAuditRepository; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoSchema; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Service +@RequiredArgsConstructor +public class EventAuditService { + + private final EventAuditRepository eventAuditRepository; + + private final ResponseMessage responseMessage; + + private final IdSpecifications eventAuditSpecifications; + + @Lazy + @Autowired + private ProtoSchema protoSchema; + + @Lazy + @Autowired + private ClientRepoService clientRepoService; + + @Lazy + @Autowired + private GitHubService gitHubService; + + public EventAuditDto add(EventAuditDto eventAuditDto) { + var eventAudit = addModel(eventAuditDto.toDomain(eventAuditDto)); + return eventAudit.toDto(eventAudit); + } + + public EventAudit addModel(EventAudit eventAudit) { + return eventAuditRepository.save(eventAudit); + + } + + public EventAuditDto get(Long id) { + var eventAudit = getModel(id); + return eventAudit.toDto(eventAudit); + } + + public EventAudit getModel(Long id) { + return eventAuditRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.EVENT_AUDIT))); + } + + public List getAll() { + var eventAudits = eventAuditRepository.findAll(); + return eventAudits.isEmpty() ? List.of() : eventAudits.stream().map(eventAudits.get(0)::toDto).toList(); + } + + public List getInitialLoadStatus() { + var eventAudits = eventAuditRepository.findAll(eventAuditSpecifications.findByType(EventAudit.Type.REPOSITORY_INITIAL_PULL)); + return eventAudits.isEmpty() ? List.of() : eventAudits.stream().map(eventAudits.get(0)::toDto).toList(); + } + + public List addAll(List eventAudits) { + return eventAuditRepository.saveAll(eventAudits); + } + + public EventAuditDto updateStatus(Long id, EventAudit.Status status) { + var eventAudit = getModel(id); + eventAudit = updateStatus(eventAudit, status); + return eventAudit.toDto(eventAudit); + } + + public EventAudit updateStatus(EventAudit eventAudit, EventAudit.Status status) { + eventAudit.setStatus(status); + if (EventAudit.Status.COMPLETED.equals(status)) eventAudit.setError(null); + return eventAuditRepository.save(eventAudit); + } + + @Async + public void processEventAuditsAsync(List eventAuditIds, UserDto userDto) { + SecurityUtil.setCurrentLoggedInUser(userDto); + for (Long eventAuditId : eventAuditIds) { + processEventAudit(eventAuditId); + } + } + + public boolean processEventAudit(Long eventAuditId) { + var eventAudit = getModel(eventAuditId); + SecurityUtil.setClientId(eventAudit.getClientId()); + log.info("CALLED SUCCESSFULLY FROM TASK OF EVENT ID ="+eventAuditId.toString()); + SecurityUtil.setClientId(eventAudit.getClientId()); + if (eventAudit.getStatus().equals(EventAudit.Status.COMPLETED)) throw new AlreadyBuiltException("EVENT_AUDIT ALREADY PROCESSED"); + updateStatus(eventAudit, EventAudit.Status.IN_PROGRESS); + LocalDateTime localDateTime = LocalDateTime.now(); + log.info("START TIME FOR PROCESSING EVENT OF ID - "+eventAuditId.toString()+" is = " +localDateTime.toString()); + try { + switch (eventAudit.getType()) { + case REPOSITORY_INITIAL_PULL -> clientRepoService.initialLoading(eventAudit); + case CSV_FILE_UPLOAD -> log.info("IN PROGRESS"); + } + } catch (Exception e) { + eventAudit.setError(e.getMessage()); + eventAudit.setStatus(EventAudit.Status.ERROR); + eventAuditRepository.save(eventAudit); + // if (eventAudit.getType().equals(EventAudit.Type.REPOSITORY_INITIAL_PULL)) + clientRepoService.rollBackRecordsCreatedById(eventAudit.getEventId()); + log.info("OOPS.. An error occured while processign EventAudit ID -"+eventAuditId); + e.printStackTrace(); + return true; + } + log.info("PROCESSING OF EVENT OF ID - "+eventAuditId+" IS COMPLETED, TIME TAKEN = "+ ChronoUnit.SECONDS.between(localDateTime, LocalDateTime.now())); + return true; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/GitHubService.java b/connect/src/main/java/com/opsbeach/connect/github/service/GitHubService.java new file mode 100644 index 0000000..fd71cab --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/GitHubService.java @@ -0,0 +1,656 @@ +package com.opsbeach.connect.github.service; + + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.opsbeach.connect.core.enums.AuthType; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.github.dto.ClientRepoDto; +import com.opsbeach.connect.github.dto.GitHubDto; +import com.opsbeach.connect.github.dto.GithubActionDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.Workflow; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.schemata.dto.SchemaValidationDto; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.connect.task.dto.ConnectDto; +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.service.ConnectService; +import com.opsbeach.connect.task.service.TaskService; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.service.GoogleCloudService; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.DateUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.ObjectUtils; +import org.springframework.web.util.UriComponentsBuilder; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +@Slf4j +@Service +@RequiredArgsConstructor +public class GitHubService { + private final App2AppService app2AppService; + private final ConnectService connectService; + private final GoogleCloudService googleCloudService; + private final SchemaFileAuditService schemaFileAuditService; + private final TaskService taskService; + private final ClientRepoService clientRepoService; + private final ApplicationConfig applicationConfig; + private final TableService tableService; + private final PullRequestService pullRequestService; + @Value("${github.access_token}") + private String accessTokenUrl; + @Value("${github.user}") + private String userUrl; + @Value("${github.user_orgs}") + private String userOrgUrl; + @Value("${github.user_repos}") + private String clientReposUrl; + @Value("${github.organization_repos}") + private String orgReposUrl; + @Value("${github.tarball}") + private String tarballUrl; + @Value("${application.web.baseURL}") + private String webBaseUrl; + @Value("${github.refresh-token}") + private String githubRefreshTokenUrl; + @Value("${github.redirect-url}") + private String githubRedirectURI; + @Value("${github.repos-redirect-url}") + private String reposRedirectURI; + @Value("${github.login-url}") + private String loginUrl; + @Value("${github.download_file_url}") + private String downloadFileUrl; + @Value("${github.create_branch_url}") + private String createBranchUrl; + @Value("${github.get_branch_info_url}") + private String getBranchInfoUrl; + @Value("${github.create_tree_object_url}") + private String createTreeObjectUrl; + @Value("${github.create_commit_url}") + private String createCommitUrl; + @Value("${github.create_pr_url}") + private String createPrUrl; + @Value("${github.repo-details}") + private String repoDetailsUrl; + @Value("${github.create-repo-authenticated-user}") + private String createRepoForAuthenticatedUser; + @Value("${github.get-user-details}") + private String getUserDetailsUrl; + @Value("${github.construct-pr-url}") + private String constructPrUrl; + @Value("${github.push-commit-to-branch}") + private String pushCommitToBranch; + @Value("${github.create-pr-comment}") + private String createPrComment; + @Value("${github.delete-comment}") + private String deleteComment; + + @Lazy + @Autowired + private WorkflowService workflowService; + + private static final String ORGANIZATION = "Organization"; + + + public GitHubDto logInRedirect() { + String clientID = applicationConfig.getGithub().get(Constants.CLIENT_ID); + String redirectURI = githubRedirectURI.replace("{smClientId}", SecurityUtil.getClientId().toString()); + String updatedLoginUrl = loginUrl.replace("{clientID}", clientID).replace("{redirectURI}", redirectURI); + return GitHubDto.builder().loginRedirectURL(updatedLoginUrl).build(); + } + + @Transactional + public String getToken(String code, Long smClientId) { + var connect = connectService.getModel(ServiceType.GITHUB, smClientId); + if (connect.isPresent()) return reposRedirectURI + "?success=true" + "&connect_id=" + connect.get().getId(); + + JsonNodeFactory jnf = JsonNodeFactory.instance; + ObjectNode payload = jnf.objectNode(); + payload.put("client_id", applicationConfig.getGithub().get(Constants.CLIENT_ID)); + payload.put("client_secret", applicationConfig.getGithub().get(Constants.CLIENT_SECRET)); + payload.put("code", code); + + var response = app2AppService.getHttpResponse(accessTokenUrl, HttpMethod.POST, app2AppService.setHeaders(payload)); + String message = response.split("&")[0].split("=")[1]; + if (message.equals("bad_verification_code")) { + return reposRedirectURI + "?success=false"; + } + var credentials = getCredentials(response); + var userName = getUserDetails(credentials.get("access_token")).get("login").asText(); + var connectDto = createConnect(credentials, userName, smClientId); + createRefreshTokenTask(connectDto, Long.parseLong(credentials.get("expires_in"))*1000); + return reposRedirectURI + "?success=true" + "&connect_id=" + connectDto.getId(); + } + + private Map getCredentials(String response) { + Map credentials = new HashMap<>(); + var stringArray = response.split("&"); + for(String val : stringArray) { + var arr = val.split("="); + if (arr.length > 1) { + credentials.put(arr[0], arr[1]); + } + } + return credentials; + } + + // need to change execution interval from (SECONDS) to (MILLI_SECONDS) + public void createRefreshTokenTask(ConnectDto connectDto, long executionInterval) { + taskService.add(TaskDto.builder().taskType(TaskType.RENEWAL_ACCESS_TOKEN).serviceType(ServiceType.GITHUB).connectId(connectDto.getId()) + .executionInterval(executionInterval).url(githubRefreshTokenUrl).clientId(connectDto.getClientId()).build()); + log.info("Task saved"); + } + + private Map getHeaders(ConnectDto connectDto) { + return Map.of(HttpHeaders.AUTHORIZATION, "Bearer " + connectDto.getAuthToken(), + HttpHeaders.ACCEPT, "application/vnd.github+json"); + } + + public JsonNode getRepoDetails(String owner, String repoName, Long connectId) { + var connectDto = connectService.get(connectId); + return getRepoDetails(owner, repoName, connectDto); + } + + public JsonNode getRepoDetails(String owner, String repoName, ConnectDto connectDto) { + owner = ObjectUtils.isEmpty(owner) ? connectDto.getUserName() : owner; + var url = repoDetailsUrl.replace("{owner}", owner).replace("{repo}", repoName); + return app2AppService.httpGet(url, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + } + + public String getDefaultBranch(String owner, String repoName, Long connectId) { + return getRepoDetails(owner, repoName, connectId).get("default_branch").asText(); + } + + public void generateNewToken(Long taskId) { + var taskDto = taskService.get(taskId); + if (DateUtil.secondsBetweenDate(taskDto.getCreatedAt(), LocalDateTime.now()) < (taskDto.getExecutionInterval()/1000)) return; + var connectDto = connectService.get(taskDto.getConnectId()); + var url = UriComponentsBuilder.fromUriString(githubRefreshTokenUrl) + .queryParam("refresh_token", connectDto.getRefreshToken()) + .queryParam("client_id", applicationConfig.getGithub().get(Constants.CLIENT_ID)) + .queryParam("client_secret", applicationConfig.getGithub().get(Constants.CLIENT_SECRET)) + .queryParam("grant_type", "refresh_token").buildAndExpand().toUriString(); + var response = app2AppService.getHttpResponse(url, HttpMethod.POST, null); + log.info(response); + var credentials = getCredentials(response); + connectDto.setAuthToken(credentials.get("access_token")); + connectDto.setRefreshToken(credentials.get("refresh_token")); + connectService.update(connectDto); + taskDto.setExecutionInterval(Long.parseLong(credentials.get("expires_in"))*1000); + taskDto.setLastSyncDate(LocalDateTime.now()); + taskService.update(taskDto); + } + + private ConnectDto createConnect(Map credentials, String userName, Long clientId) { + var connectDto = ConnectDto.builder().authType(AuthType.BEARER).serviceType(ServiceType.GITHUB) + .authToken(credentials.get("access_token")) + .refreshToken(credentials.get("refresh_token")) + .userName(userName) + .clientId(clientId) + .build(); + return connectService.add(connectDto); + } + + public JsonNode getUserDetails(ConnectDto connectDto, String userName) { + var url = getUserDetailsUrl.replace("{userName}", userName); + return app2AppService.httpGet(url, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + } + + public List getUserOrganization(Long connectId) { + var connectDto = connectService.get(connectId); + var nodes = app2AppService.httpGetEntities(userOrgUrl, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + List orgNames = new LinkedList<>(); + for(JsonNode node : nodes) { + orgNames.add(node.get("login").asText()); + } + return orgNames; + } + public JsonNode getUserDetails(String token) { + return app2AppService.httpGet(userUrl, app2AppService.setHeaders(Map.of(HttpHeaders.AUTHORIZATION, "Bearer " + token), null), JsonNode.class); + } + + public GitHubDto getRepos(Long connectId, String orgName) { + var connectDto = connectService.get(connectId); + List repoNames = new LinkedList<>(); + String repoOwner = null; + if (ObjectUtils.isEmpty(orgName)) { // pulling user repos + var url = clientReposUrl.replace("{userId}", connectDto.getUserName()); + var nodes = app2AppService.httpGet(url, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + nodes.get("items").forEach(node -> { + repoNames.add(node.get("name").asText()); + }); + repoOwner = connectDto.getUserName(); + } else { // pulling org repos + var url = orgReposUrl.replace("{orgName}", orgName); + var nodes = app2AppService.httpGetEntities(url, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + for (JsonNode jsonNode: nodes) { + repoNames.add(jsonNode.get("name").asText()); + } + repoOwner = orgName; + } + return GitHubDto.builder().connectId(connectId).repos(repoNames).repoOwner(repoOwner).user(connectDto.getUserName()).build(); + } + + public byte[] downloadTarball(ClientRepo clientRepo, ConnectDto connectDto) { + return downloadTarball(clientRepo.getFullName(), clientRepo.getDefaultBranch(), connectDto); + } + + public byte[] downloadTarball(String repoFullName, String sourceBranch, ConnectDto connectDto) { + Map headerMap = new HashMap<>(); + headerMap.put(HttpHeaders.AUTHORIZATION, "Bearer " + connectDto.getAuthToken()); + headerMap.put(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE); + log.info("Downloading Tarball for repo = "+repoFullName); + var url = tarballUrl.replace("{FullName}", repoFullName).replace("{branchName}", sourceBranch); + var content = app2AppService.restTemplateExchange(url, HttpMethod.GET, app2AppService.setHeaders(headerMap, null), byte[].class); + pushInGoogleBucket(repoFullName, content, sourceBranch); + return content; + } + + private boolean pushInGoogleBucket(String repoFullName, byte[] content, String branchName) { + var clientDto = clientRepoService.getClient(); + String objectName = StringUtil.constructStringEmptySeparator(clientDto.getName(), "/", repoFullName, "/", branchName); + googleCloudService.publish(applicationConfig.getGcloud().get("repo-bucket"), objectName, null, content); + return true; + } + + // this method is for github action + @Transactional + public Object pullRequestAction(GithubActionDto githubActionDto) throws IOException { + var clientRepo = clientRepoService.getByFullName(githubActionDto.getRepoName()); + var connectDto = connectService.get(clientRepo.getConnectId()); + SecurityUtil.setClientId(clientRepo.getClientId()); + var pullRequest = pullRequestService.findByRepoIdAndNumber(clientRepo.getId(), githubActionDto.getPrNumber()); + if (Objects.isNull(pullRequest)) { + pullRequest = getPullRequest(githubActionDto, clientRepo, null, PullRequest.Status.OPEN, null, null, null); + pullRequest.setSha(null); // Sha should be null while creating PR for first time. + pullRequest = pullRequestService.addModel(pullRequest); + } + return switch (githubActionDto.getStatus()) { + case "open" -> pullRequestOpen(githubActionDto, pullRequest, connectDto, clientRepo); + case "merged" -> pullRequestMerged(pullRequest, clientRepo); + case "closed" -> pullRequestClosed(pullRequest, clientRepo); + default -> throw new IllegalArgumentException("Unexpected value: " + githubActionDto.getStatus()); + }; + } + + private void updateWorkflowStatus(PullRequest pullRequest) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(pullRequest.getWorkflowId()))) { + workflowService.updateStatus(pullRequest.getWorkflowId(), Workflow.Status.PR_MERGED); + } + } + + private Object pullRequestMerged(PullRequest pullRequest, ClientRepo clientRepo) { + // merge changes + tableService.acceptChanges(pullRequest.getId(), clientRepo.getId()); + pullRequestService.updateStatus(pullRequest.getId(), PullRequest.Status.MERGED); + updateWorkflowStatus(pullRequest); + return "Changes Accepted."; + } + + private Object pullRequestClosed(PullRequest pullRequest, ClientRepo clientRepo) { + //revert changes in neo4j of this PR. + tableService.revertChanges(pullRequest.getId(), clientRepo.getId()); + pullRequestService.updateStatus(pullRequest.getId(), PullRequest.Status.CLOSED); + return Map.of("tables", Map.of("closed", 0.0), "isMerge", true); + } + + private Object pullRequestOpen(GithubActionDto githubActionDto, PullRequest pullRequest, + ConnectDto connectDto, ClientRepo clientRepo) throws IOException { + var validationStatus = Objects.isNull(pullRequest.getValidationStatus()) ? Status.SUCCESS : pullRequest.getValidationStatus(); + if (pullRequest.getSha() == null) { + pullRequest = pullRequestService.updateModel(getPullRequest(githubActionDto, clientRepo, pullRequest.getId(), PullRequest.Status.OPEN, + null, pullRequest.getErrorMessage(), validationStatus)); + //save the modified deltas in neo4j. + return fetchContentAndStore(githubActionDto, pullRequest, connectDto, clientRepo); + } + if (pullRequest.getStatus().equals(PullRequest.Status.CLOSED)) { + // if the previous status of this pr is closed means then now the incomeing PR is reopend. + pullRequest = pullRequestService.updateModel(getPullRequest(githubActionDto, clientRepo, pullRequest.getId(), PullRequest.Status.REOPENED, + pullRequest.getWorkflowId(), pullRequest.getErrorMessage(), validationStatus)); + return fetchContentAndStore(githubActionDto, pullRequest, connectDto, clientRepo); + } + // if SHA is not equal, then some additional changes done in existing PR. + if (Boolean.FALSE.equals(githubActionDto.getSha().equals(pullRequest.getSha()))) { + //revert changes in neo4j of this PR. + tableService.revertChanges(pullRequest.getId(), clientRepo.getId()); + // update SHA and other details in pull_request table. + pullRequest = pullRequestService.updateModel(getPullRequest(githubActionDto, clientRepo, pullRequest.getId(), pullRequest.getStatus(), + pullRequest.getWorkflowId(), pullRequest.getErrorMessage(), validationStatus)); + //save the new modified deltas in neo4j. + return fetchContentAndStore(githubActionDto, pullRequest, connectDto, clientRepo); + } + // if SHA is equal then leave it. + return Map.of("tables", Map.of("same-pr", 0.0), "isMerge", true); + } + + private Object fetchContentAndStore(GithubActionDto githubActionDto, PullRequest pullRequest, + ConnectDto connectDto, ClientRepo clientRepo) throws IOException { + var paths = githubActionDto.getFilesChanged().split(" "); + if (clientRepo.getRepoType().equals(RepoType.PROTOBUF)) { + downloadTarball(clientRepo.getFullName(), pullRequest.getSourceBranch(), connectDto); + var tables = schemaFileAuditService.saveDeltaForProtoSchema(paths , clientRepo, pullRequest); + return Map.of("tables", tableService.computeScores(tables), "isMerge", true); + } + Map fileContentMap = new HashMap<>(); + for (String path : paths) { + var content = downloadFile(path, githubActionDto.getRepoName(), githubActionDto.getSourceBranch(), connectDto); + fileContentMap.put(path, content); + } + var tables = schemaFileAuditService.saveDelta(fileContentMap, clientRepo, pullRequest.getId()); + return Map.of("tables", tableService.computeScores(tables), "isMerge", true); + } + + private PullRequest getPullRequest(GithubActionDto githubActionDto, ClientRepo clientRepo, Long prId, PullRequest.Status status, + Long workflowId, String errorMessage, com.opsbeach.connect.schemata.validate.Status validationStatus) { + var prUrl = constructPrUrl.replace("{repoFullName}", clientRepo.getFullName()).replace("{prNumber}", githubActionDto.getPrNumber()); + return PullRequest.builder().number(githubActionDto.getPrNumber()).clientRepoId(clientRepo.getId()) + .status(status).sourceBranch(githubActionDto.getSourceBranch()).validationStatus(validationStatus) + .targetBranch(githubActionDto.getTargetBranch()).sha(githubActionDto.getSha()) + .id(prId).workflowId(workflowId).url(prUrl).errorMessage(errorMessage) + .build(); + } + + private byte[] downloadFile(String path, String repoName, String sourceBranch, ConnectDto connectDto) throws IOException { + var url = downloadFileUrl.replace("{owner}", repoName.split("/")[0]) + .replace("{repo}", repoName.split("/")[1]) + .replace("{path}", path); + url = UriComponentsBuilder.fromUriString(url).queryParam("ref", sourceBranch).toUriString(); + var fileResponse = app2AppService.httpGet(url, app2AppService.setHeaders(Map.of(HttpHeaders.AUTHORIZATION, "Bearer " + connectDto.getAuthToken()), null), JsonNode.class); + return Base64.getDecoder().decode(fileResponse.get("content").asText().replace("\n", "")); + } + + public Object validateSchema(GithubActionDto githubActionDto) throws IOException { + var clientRepo = clientRepoService.getByFullName(githubActionDto.getRepoName()); + var connectDto = connectService.get(clientRepo.getConnectId()); + SecurityUtil.setClientId(clientRepo.getClientId()); + var pullRequest = pullRequestService.findByRepoIdAndNumber(clientRepo.getId(), githubActionDto.getPrNumber()); + if (Objects.isNull(pullRequest)) { + pullRequest = getPullRequest(githubActionDto, clientRepo, null, PullRequest.Status.OPEN, null, null, null); + pullRequest.setSha(null); // Sha should be null while creating PR for first time. + pullRequest = pullRequestService.addModel(pullRequest); + } + if (githubActionDto.getStatus().equalsIgnoreCase("closed")) { + var schemaValidationDto = SchemaValidationDto.builder().status(true).build(); + return updatePrValidationStatus(schemaValidationDto, pullRequest, connectDto, clientRepo.getOwner(), clientRepo.getName()); + } + if (Objects.nonNull(githubActionDto.getSchemaValidationMessage())) { + var messages = Arrays.asList(githubActionDto.getSchemaValidationMessage().split("Summary")); + var schemaValidationDto = SchemaValidationDto.builder().status(false).errorMessages(messages).build(); + return updatePrValidationStatus(schemaValidationDto, pullRequest, connectDto, clientRepo.getOwner(), clientRepo.getName()); + } + if (clientRepo.getRepoType().equals(RepoType.PROTOBUF)) { + var schemaValidationDto = SchemaValidationDto.builder().status(true).build(); + return updatePrValidationStatus(schemaValidationDto, pullRequest, connectDto, clientRepo.getOwner(), clientRepo.getName()); + } + Map newTableMap = new HashMap<>(); + var paths = githubActionDto.getFilesChanged().split(" "); + for (String path : paths) { + var content = downloadFile(path, githubActionDto.getRepoName(), githubActionDto.getSourceBranch(), connectDto); + try { + var fileType = path.substring(path.lastIndexOf(".") + 1); + var tables = schemaFileAuditService.getTablesFromFileContent(content, Boolean.FALSE, fileType); + newTableMap.put(path, tables.get(tables.size() - 1)); + } catch (Exception e) { + var message = StringUtil.constructStringEmptySeparator("{ ",e.getMessage()," in file - ", path, " }"); + var schemaValidationDto = SchemaValidationDto.builder().status(false).errorMessages(List.of(message)).build(); + return updatePrValidationStatus(schemaValidationDto, pullRequest, connectDto, clientRepo.getOwner(), clientRepo.getName()); + } + } + var validationMessage = tableService.schemaCompare(newTableMap, clientRepo, pullRequest.getId()); + return updatePrValidationStatus(validationMessage, pullRequest, connectDto, clientRepo.getOwner(), clientRepo.getName()); + } + + private JsonNode updatePrValidationStatus(SchemaValidationDto schemaValidationDto, PullRequest pullRequest, + ConnectDto connectDto, String owner, String repo) { + var payload = JsonNodeFactory.instance.objectNode(); + if (Boolean.FALSE.equals(schemaValidationDto.getStatus())) { + pullRequest.setValidationStatus(com.opsbeach.connect.schemata.validate.Status.ERROR); + pullRequest.setSha(null); // set sha to null while validation fails. + //revert changes in neo4j of this PR. + tableService.revertChanges(pullRequest.getId(), pullRequest.getClientRepoId()); + var messages = createValidationErrorMessage(schemaValidationDto); + pullRequest.setErrorMessage(messages.get(1)); + var issueCommentId = createPrComment(owner, repo, pullRequest.getNumber(), messages.get(0), connectDto).get("id").asLong(); + if (Objects.nonNull(pullRequest.getIssueCommentId())) deleteComment(owner, repo, pullRequest.getIssueCommentId(), connectDto); + pullRequest.setIssueCommentId(issueCommentId); + payload.put("status", false).put("message", pullRequest.getErrorMessage()); + } else { + pullRequest.setValidationStatus(com.opsbeach.connect.schemata.validate.Status.SUCCESS); + pullRequest.setErrorMessage(null); + if (Objects.nonNull(pullRequest.getIssueCommentId())) deleteComment(owner, repo, pullRequest.getIssueCommentId(), connectDto); + pullRequest.setIssueCommentId(null); + payload.put("status", true).put("message", "SUCCESS"); + } + pullRequestService.updateModel(pullRequest); + return payload; + } + + private List createValidationErrorMessage(SchemaValidationDto schemaValidationDto) { + // StringBuilder message = new StringBuilder("

This PR has some errors:

"); + List messages = new ArrayList<>(); + StringBuilder message = new StringBuilder("# Data Contract violation detected in the PR: \n"); + if (Objects.nonNull(schemaValidationDto.getErrorMap())) { + message.append("| File name | Schema | Details |\n"); + message.append("|-------|-------|-------|\n"); + for (Map.Entry>> entry : schemaValidationDto.getErrorMap().entrySet()) { + var fileName = entry.getKey(); + for (Map.Entry> tableEntry : entry.getValue().entrySet()) { + var tableName = tableEntry.getKey(); + for (String value : tableEntry.getValue()) { + message.append("|**").append(fileName).append("**|**").append(tableName).append("**|**").append(value).append("**|\n"); + messages.add(value); + } + } + } + } + schemaValidationDto.getErrorMessages().forEach(msg -> { + message.append("#### ➡ ").append(msg).append("\n"); + messages.add(msg); + }); + log.info(message.toString()+" \n"+message); + return List.of(message.toString(), String.join(", \n", messages)); + } + + public String commitAndPushInMainBranch(Map fileContentMap, String commitMessage) { + var clientRepoDto = clientRepoService.get(fileContentMap.entrySet().iterator().next().getKey().getClientRepoId()); + var connectDto = connectService.get(clientRepoDto.getConnectId()); + // Get Main branch info for SHA. + log.info("getting main branch info"); + var mainBranchInfo = getBranchInfo(clientRepoDto.getFullName(), connectDto, clientRepoDto.getDefaultBranch()); + // create tree object of files in github. + log.info("Push files to git tree"); + var treeObject = pushFilesToGit(fileContentMap, connectDto, clientRepoDto.getFullName(), + clientRepoDto.getDefaultBranch(), mainBranchInfo.get("object").get("sha").asText()); + // then create the commit of files by using SHA of tree object. + log.info("Create commit using tree sha"); + var commitObject = createCommit(treeObject.get("sha").asText(), mainBranchInfo.get("object").get("sha").asText(), clientRepoDto.getFullName(), connectDto, commitMessage); + // push latest commit to default branch + log.info("Push Commit to main branch"); + pushCommitToBranch(commitObject.get("sha").asText(), clientRepoDto.getDefaultBranch(), clientRepoDto.getFullName(), connectDto); + return commitObject.get("sha").asText(); + } + + public JsonNode pushCommitToBranch(String commitSha, String branchName, String repoFullName, ConnectDto connectDto) { + var url = pushCommitToBranch.replace("{owner}", repoFullName.split("/")[0]) + .replace("{repo}", repoFullName.split("/")[1]) + .replace("{branchName}", branchName); + // var entity = app2AppService.setHeaders(getHeaders(connectDto), Map.of("sha", commitSha)); + var body = JsonNodeFactory.instance.objectNode().put("sha", commitSha).toString(); + return app2AppService.httpPatch(url, body, getHeaders(connectDto), JsonNode.class); + } + + public PullRequest commitAndRaisePr(Map fileContentMap, Workflow workflow) { + var clientRepoDto = clientRepoService.get(fileContentMap.entrySet().iterator().next().getKey().getClientRepoId()); + var connectDto = connectService.get(clientRepoDto.getConnectId()); + // Get Main branch info for SHA. + log.info("getting main branch info"); + var mainBranchInfo = getBranchInfo(clientRepoDto.getFullName(), connectDto, clientRepoDto.getDefaultBranch()); + // create tree object of files in github. + log.info("Push files to git tree"); + var treeObject = pushFilesToGit(fileContentMap, connectDto, clientRepoDto.getFullName(), + clientRepoDto.getDefaultBranch(), mainBranchInfo.get("object").get("sha").asText()); + // then create the commit of files by using SHA of tree object. + log.info("Create commit using tree sha"); + var commitObject = createCommit(treeObject.get("sha").asText(), mainBranchInfo.get("object").get("sha").asText(), clientRepoDto.getFullName(), connectDto, workflow.getPurpose()); + var newBranchName = "schemata-labs-"+workflow.getTitle().replace(" ", "-")+"-"+workflow.hashCode(); + // create the new branch with the commit SHA. + log.info("Creating new branch"); + createBranch(connectDto, newBranchName, clientRepoDto.getFullName(), commitObject.get("sha").asText()); + // raise the PR. + log.info("Raise PR in new Branch"); + var prInfo = raisePr(clientRepoDto.getDefaultBranch(), newBranchName, workflow.getTitle(), workflow.getPurpose(), clientRepoDto.getFullName(), connectDto); + // store the PR info in our DB and retrun it. + return createPullRequest(prInfo, workflow.getId(), clientRepoDto, newBranchName); + } + + private PullRequest createPullRequest(JsonNode prInfo, Long workflowId, ClientRepoDto clientRepoDto, String sourceBranch) { + var prUrl = constructPrUrl.replace("{repoFullName}", clientRepoDto.getFullName()).replace("{prNumber}", prInfo.get("number").asText()); + var pullRequest = PullRequest.builder().number(prInfo.get("number").asText()) + .status(PullRequest.Status.OPEN) + .clientRepoId(clientRepoDto.getId()) + .workflowId(workflowId) + .sha(prInfo.get("head").get("sha").asText()) + .sourceBranch(sourceBranch) + .targetBranch(clientRepoDto.getDefaultBranch()) + .url(prUrl) + .build(); + return pullRequestService.addModel(pullRequest); + } + + public JsonNode getBranchInfo(String repoFullName, ConnectDto connectDto, String branchName) { + var url = getBranchInfoUrl.replace("{owner}", repoFullName.split("/")[0]) + .replace("{repo}", repoFullName.split("/")[1]) + .replace("{branchName}", branchName); + return app2AppService.httpGet(url, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + } + + public JsonNode createBranch(ConnectDto connectDto, String branchName, String repoFullName, String commitSha) { + var url = createBranchUrl.replace("{owner}", repoFullName.split("/")[0]) + .replace("{repo}", repoFullName.split("/")[1]); + var body = Map.of("ref", "refs/heads/"+branchName, "sha", commitSha); + var entity = app2AppService.setHeaders(getHeaders(connectDto), body); + return app2AppService.httpPost(url, entity, JsonNode.class); + } + + public JsonNode pushFilesToGit(Map fileContentMap, ConnectDto connectDto, + String repoFullName, String baseBranchName, String baseTreeSha) { + var url = createTreeObjectUrl.replace("{owner}", repoFullName.split("/")[0]) + .replace("{repo}", repoFullName.split("/")[1]); + var body = createTreeObject(fileContentMap, repoFullName, baseBranchName, baseTreeSha); + var entity = app2AppService.setHeaders(getHeaders(connectDto), body); + return app2AppService.httpPost(url, entity, JsonNode.class); + } + + private ObjectNode createTreeObject(Map fileContentMap, + String repoFullName, String baseBranchName, String baseTreeSha) { + JsonNodeFactory jnf = JsonNodeFactory.instance; + ObjectNode payload = jnf.objectNode(); + payload.put("base_tree", baseTreeSha); + var tree = payload.putArray("tree"); + var basePath = StringUtil.constructStringEmptySeparator("https://github.com/", repoFullName, "/tree/", baseBranchName, "/"); + for (Map.Entry entrySet : fileContentMap.entrySet()) { + var path = entrySet.getKey().getPath(); + var blob = tree.addObject(); + blob.put("path", path.substring(basePath.length(), path.length())); + blob.put("mode", "100644"); + blob.put("type", "blob"); + blob.put("content", entrySet.getValue()); + } + System.out.println(payload.toPrettyString()); + return payload; + } + + public JsonNode createCommit(String treeSha, String parentSha, String repoFullName, ConnectDto connectDto, String message) { + var url = createCommitUrl.replace("{owner}", repoFullName.split("/")[0]) + .replace("{repo}", repoFullName.split("/")[1]); + var body = Map.of("message", message, "tree", treeSha, "parents", List.of(parentSha)); + var entity = app2AppService.setHeaders(getHeaders(connectDto), body); + return app2AppService.httpPost(url, entity, JsonNode.class); + } + + public JsonNode raisePr(String baseBranch, String headBranch, String title, String message, + String repoFullName, ConnectDto connectDto) { + var url = createPrUrl.replace("{owner}", repoFullName.split("/")[0]) + .replace("{repo}", repoFullName.split("/")[1]); + var body = Map.of("title", title, "body", message, "head", headBranch, "base", baseBranch); + var entity = app2AppService.setHeaders(getHeaders(connectDto), body); + return app2AppService.httpPost(url, entity, JsonNode.class); + } + + protected JsonNode createNewRepo(String owner, String repoName, Long connectId) { + var connectDto = connectService.get(connectId); + return createNewRepo(owner, repoName, connectDto); + } + + protected JsonNode createNewRepo(String owner, String repoName, ConnectDto connectDto) { + ObjectNode payload = JsonNodeFactory.instance.objectNode(); + payload.put("name", repoName); + payload.put("description", "Schemata Labs managed private repo"); + payload.put("private", true); + payload.put("has_issues", true); + payload.put("auto_init", true); + payload.put("default_branch", "main"); + var url = createRepoForAuthenticatedUser; + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(owner))) { + var userDetails = getUserDetails(connectDto, owner); + if (userDetails.get("type").asText().equals(ORGANIZATION)) { + url = orgReposUrl.replace("{orgName}", owner); + } + } + return app2AppService.httpPost(url, app2AppService.setHeaders(getHeaders(connectDto), payload), JsonNode.class); + } + + public JsonNode createPrComment(String owner, String repo, String prNumber, String message, Long connectId) { + var connectDto = connectService.get(connectId); + return createPrComment(owner, repo, prNumber, message, connectDto); + } + + public JsonNode createPrComment(String owner, String repo, String prNumber, String message, ConnectDto connectDto) { + var url = createPrComment.replace("{owner}", owner).replace("{repo}", repo).replace("{pr_number}", prNumber); + var body= JsonNodeFactory.instance.objectNode().put("body", message); + var entity = app2AppService.setHeaders(getHeaders(connectDto), body); + return app2AppService.httpPost(url, entity, JsonNode.class); + } + + public void deleteComment(String owner, String repo, Long commentId, Long connectId) { + var connectDto = connectService.get(connectId); + deleteComment(owner, repo, commentId, connectDto); + } + + public void deleteComment(String owner, String repo, Long commentId, ConnectDto connectDto) { + var url = deleteComment.replace("{owner}", owner).replace("{repo}", repo).replace("{comment_id}", commentId.toString()); + app2AppService.httpDelete(url, app2AppService.setHeaders(getHeaders(connectDto), null), JsonNode.class); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/ModelService.java b/connect/src/main/java/com/opsbeach/connect/github/service/ModelService.java new file mode 100644 index 0000000..348211b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/ModelService.java @@ -0,0 +1,208 @@ +package com.opsbeach.connect.github.service; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Lazy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.AutoCompleteModelDto; +import com.opsbeach.connect.github.dto.ModelDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.repository.ModelRepository; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaDelete; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.CriteriaUpdate; +import jakarta.persistence.criteria.Expression; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class ModelService { + + private final ModelRepository modelRepository; + private final IdSpecifications modelSpecifications; + private final ResponseMessage responseMessage; + private final ClientRepoService clientRepoService; + private final EntityManager entityManager; + private final DomainService domainService; + @Lazy @Autowired + private TableService tableService; + + private static final String NAME = "name"; + private static final String NAME_SPACE = "nameSpace"; + private static final String CLIENT_ID = "clientId"; + private static final String CLIENT_REPO_ID = "clientRepoId"; + private static final String PULL_REQUEST_ID = "pullRequestId"; + + public List addAll(List models) { + return modelRepository.saveAll(models); + } + + public List getAll(Long domainId, Long clientRepoId, String path) { + return toDtos(getAllModel(domainId, clientRepoId, path)); + } + + public List getAllModel(Long domainId, Long clientRepoId, String path) { + Specification specification = Specification.where(null); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(domainId))) { + specification = specification.and(modelSpecifications.findByDomainId(domainId)); + } + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(clientRepoId))) { + specification = specification.and(modelSpecifications.findByClientRepoIds(List.of(clientRepoId))); + } else { + specification = specification.and(modelSpecifications.findByClientRepoIds(clientRepoService.getActiveRepoIds())); + } + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(path))) { + specification = specification.and(modelSpecifications.findByPath(path)); + } + return modelRepository.findAll(specification); + } + + private List toDtos(List models) { + return models.isEmpty() ? List.of() : models.stream().map(models.get(0)::toDto).toList(); + } + + public List findByFullNames(Set fullNames, Long clientRepoId) { + CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); + CriteriaQuery query = criteriaBuilder.createQuery(Model.class); + Root root = query.from(Model.class); + + Expression fullName = criteriaBuilder.concat(criteriaBuilder.concat(root.get(NAME_SPACE), "."), root.get(NAME)); + Predicate clientIdPredicate = criteriaBuilder.equal(root.get(CLIENT_ID), SecurityUtil.getClientId()); + Predicate clientRepoIdPredicate = criteriaBuilder.equal(root.get(CLIENT_REPO_ID), clientRepoId); + Predicate finalPredicate = criteriaBuilder.and(fullName.in(fullNames), criteriaBuilder.and(clientIdPredicate, clientRepoIdPredicate)); + + query.select(root).where(finalPredicate); + + return entityManager.createQuery(query).getResultList(); + } + + public List findByFullNames(List fullNames) { + CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); + CriteriaQuery query = criteriaBuilder.createQuery(Model.class); + Root root = query.from(Model.class); + + Expression fullName = criteriaBuilder.concat(criteriaBuilder.concat(root.get(NAME_SPACE), "."), root.get(NAME)); + Predicate clientIdPredicate = criteriaBuilder.equal(root.get(CLIENT_ID), SecurityUtil.getClientId()); + Predicate finalPredicate = criteriaBuilder.and(fullName.in(fullNames), clientIdPredicate); + + query.select(root).where(finalPredicate); + + return entityManager.createQuery(query).getResultList(); + } + + @Transactional + public int updateModelSetPrIdToNull(Long prId) { + CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); + CriteriaUpdate update = criteriaBuilder.createCriteriaUpdate(Model.class); + Root root = update.from(Model.class); + + update.set(PULL_REQUEST_ID, null).where(criteriaBuilder.equal(root.get(PULL_REQUEST_ID), prId)); + + return entityManager.createQuery(update).executeUpdate(); + } + + @Transactional + public int deleteModelByPrId(Long prId) { + CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); + CriteriaDelete delete = criteriaBuilder.createCriteriaDelete(Model.class); + Root root = delete.from(Model.class); + + delete.where(criteriaBuilder.equal(root.get(PULL_REQUEST_ID), prId)); + + return entityManager.createQuery(delete).executeUpdate(); + } + + public List findBySchemaFileAudit(Long schemaFileAuditId) { + return modelRepository.findAll(modelSpecifications.findBySchemaFileAudit(schemaFileAuditId)); + } + + public Model addModel(Model model) { + return modelRepository.save(model); + } + + public Model getModel(Long id) { + return modelRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.MODEL))); + } + + public List findByNameLike(String name) { + return modelRepository.findByNameLike(name+"%", SecurityUtil.getClientId(), ClientRepo.Status.ACTIVE.name()); + } + + public List getNodeIds() { + var activeRepoIds = clientRepoService.getActiveRepoIds(); + var models = modelRepository.findAll(modelSpecifications.findByClientRepoIds(activeRepoIds)); + return models.isEmpty() ? List.of() : models.stream().map(Model::getNodeId).toList(); + } + + public ClientRepo getByNodeId(Long nodeId) { + var repoId = findByNodeId(nodeId).get(0).getClientRepoId(); + return clientRepoService.getModel(repoId); + } + + public List findByNodeId(Long nodeId) { + return modelRepository.findAll(modelSpecifications.findByNodeId(nodeId)); + } + + public List findModelByNameAndNameSpace(String nameSpace, String name) { + var specification = modelSpecifications.findByName(name).and(modelSpecifications.findByNameSpace(nameSpace)); + return modelRepository.findAll(specification); + } + + public List createModels(List tables, SchemaFileAudit schemaFileAudit, Domain domain) { + List models = new ArrayList<>(); + tables.forEach(table -> models.add(createModel(table, schemaFileAudit, domain, null))); + return modelRepository.saveAll(models); + } + + public Model createModel(SchemaFileAudit schemaFileAudit, Long clientRepoId, Table table) { + var domain = domainService.getAll(clientRepoId).get(0); + var model = createModel(table, schemaFileAudit, domain.toDomain(domain), null); + return addModel(model); + } + + public Model createModel(Table table, SchemaFileAudit schemaFileAudit, Domain domain, Long prId) { + return Model.builder().clientId(schemaFileAudit.getClientId()) + .type(table.getType()) + .name(table.getName()) + .nameSpace(table.getNameSpace()) + .schemaFileAuditId(schemaFileAudit.getId()) + .path(schemaFileAudit.getPath()) + .nodeId(table.getId()) + .domainId(domain.getId()) + .clientRepoId(domain.getClientRepoId()) + .checksum(schemaFileAudit.getChecksum()) + .pullRequestId(prId) + .build(); + } + + public void deleteAllByClientRepoId(Long clientRepoId) { + var models = modelRepository.findAll(modelSpecifications.findByClientRepoId(clientRepoId)); + if (models.isEmpty()) return; + var tableIds = models.stream().map(Model::getNodeId).toList(); + tableService.deleteByIds(tableIds); + modelRepository.deleteAllByClientRepoId(clientRepoId); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/PullRequestService.java b/connect/src/main/java/com/opsbeach/connect/github/service/PullRequestService.java new file mode 100644 index 0000000..52b7e12 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/PullRequestService.java @@ -0,0 +1,229 @@ +package com.opsbeach.connect.github.service; + +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.stereotype.Service; + +import com.google.common.collect.LinkedHashMultimap; +import com.google.common.collect.Multimap; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.DashboardDto; +import com.opsbeach.connect.github.dto.PullRequestDto; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.PullRequest.Status; +import com.opsbeach.connect.github.repository.PullRequestRepository; +import com.opsbeach.connect.metrics.service.SlaService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.utils.DateUtil; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Root; +import lombok.RequiredArgsConstructor; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +@Service +@RequiredArgsConstructor +public class PullRequestService { + + private final PullRequestRepository pullRequestRepository; + + private final ResponseMessage responseMessage; + + private final IdSpecifications pullRequestSpecifications; + + private final SlaService slaService; + + private final EntityManager entityManager; + + public PullRequest addModel(PullRequest pullRequest) { + return pullRequestRepository.save(pullRequest); + } + + public PullRequestDto get(Long id) { + var pullRequest = getModel(id); + return pullRequest.toDto(pullRequest); + } + + public PullRequest getModel(Long id) { + return pullRequestRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.PULL_REQUEST))); + } + + public PullRequest findByRepoIdAndNumber(Long clientRepoId, String number){ + return pullRequestRepository.findOne(pullRequestSpecifications.findByClientRepoId(clientRepoId).and(pullRequestSpecifications.findByNumber(number))).orElse(null); + } + + public PullRequest updateModel(PullRequest pullRequest) { + get(pullRequest.getId()); + return pullRequestRepository.save(pullRequest); + } + + public PullRequestDto updateStatus(Long id, Status status) { + var pullRequest = getModel(id); + pullRequest.setStatus(status); + pullRequest = pullRequestRepository.save(pullRequest); + return pullRequest.toDto(pullRequest); + } + + public List getAll(Pageable pageable) { + pageable = PageRequest.of(pageable.getPageNumber(), pageable.getPageSize(), Sort.by(Sort.Direction.DESC, CREATED_AT)); + var pullRequests = pullRequestRepository.findAll(pageable); + return pullRequests.getContent().stream().map(m -> m.toDto(m)).collect(Collectors.toList()); + } + + public Long getCountWithWorkflow(Long clientRepoId) { + var specification = pullRequestSpecifications.findByClientRepoId(clientRepoId).and(pullRequestSpecifications.workflowIsNotNull()); + return Long.valueOf(pullRequestRepository.count(specification)); + } + + public Long getCount(Status status, LocalDateTime fromDateTime) { + if (status == null) { + return Long.valueOf(pullRequestRepository.count(pullRequestSpecifications.greaterThanCreatedAt(fromDateTime))); + } + var specification = pullRequestSpecifications.findByPullRequestStatus(status); + if (Objects.nonNull(fromDateTime)) specification = specification.and(pullRequestSpecifications.greaterThanCreatedAt(fromDateTime)); + // if (Status.OPEN.equals(status)) specification = specification.or(pullRequestSpecifications.findByPullRequestStatus(Status.REOPENED)); + return Long.valueOf(pullRequestRepository.count(specification)); + } + + public Long prCountBySlaTime(boolean isExceeded) { + var clientId = SecurityUtil.getClientId(); + SecurityUtil.setClientId(0L); // need to set clientId 0 (default value added in migration) untill we get SLA time from user + var sla = slaService.getByType(ServiceType.GITHUB); // then only it will fetch sla time for all the clients. + SecurityUtil.setClientId(clientId); + return prCountBySlaTime(isExceeded, sla.getSlaTime()); + } + + private Long prCountBySlaTime(boolean isExceeded, Long slaTime) { + var date = LocalDateTime.now().minusSeconds(slaTime); + var specification = isExceeded ? pullRequestSpecifications.lessThanCreatedAt(date) + : pullRequestSpecifications.greaterThanCreatedAt(date); + specification = specification.and(pullRequestSpecifications.findByPullRequestStatus(Status.OPEN) + .or(pullRequestSpecifications.findByPullRequestStatus(Status.REOPENED))); + return Long.valueOf(pullRequestRepository.count(specification)); + } + + private static final String CREATED_AT = "createdAt"; + + public DashboardDto getDashboardMetrics() { + CriteriaBuilder builder = entityManager.getCriteriaBuilder(); + CriteriaQuery query = builder.createQuery(PullRequest.class); + Root root = query.from(PullRequest.class); + Map openCloseGraph = new HashMap<>(); + Map slaMeanGraph = new HashMap<>(); + + var clientId = SecurityUtil.getClientId(); + SecurityUtil.setClientId(0L); // need to set clientId 0 (default value added in migration) untill we get SLA time from user + var sla = slaService.getByType(ServiceType.GITHUB); // then only it will fetch sla time for all the clients. + SecurityUtil.setClientId(clientId); + + List openPrsCurrentWeek = new ArrayList<>(); // PR's created this week and still OPEN + List openPrsLastWeek = new ArrayList<>(); // PR's created last week and still OPEN + List closePrsCurrentWeek = new ArrayList<>(); // PR's created this week and CLOSED + List closePrsLastWeek = new ArrayList<>(); // PR's created last week and CLOSED + List totalPrsCurrentWeek = new ArrayList<>(); // total PR's created this week + List totalPrsLastWeek = new ArrayList<>(); // total PR's created last week + Multimap openPullRequests = LinkedHashMultimap.create(); // to collect open PR's of Last 7 days. + Multimap closedPullRequests = LinkedHashMultimap.create(); // to collect closed PR's of Last 7 days. + + var dateTimeNow = LocalDateTime.now(); + var oneWeekBeforeDateTime = LocalDate.now().minusDays(7).atStartOfDay(); + var twoWeekBeforeDateTime = oneWeekBeforeDateTime.minusDays(7); + + var predicate = builder.between(root.get(CREATED_AT), twoWeekBeforeDateTime, dateTimeNow); + predicate = builder.and(builder.equal(root.get(Constants.CLIENT_ID), SecurityUtil.getClientId()), predicate); + query.select(root).where(predicate); // get PR's created from starting of LastWeek till now. + var openPrsFromLastTwoWeek = entityManager.createQuery(query).getResultList(); + + openPrsFromLastTwoWeek.forEach(pullRequest -> { + var createdAt = pullRequest.getCreatedAt(); + if (DateUtil.isBefore(createdAt, oneWeekBeforeDateTime)) { + if (pullRequest.getStatus().equals(Status.OPEN) || pullRequest.getStatus().equals(Status.REOPENED)) { + openPrsLastWeek.add(pullRequest); + } else { + closePrsLastWeek.add(pullRequest); + } + totalPrsLastWeek.add(pullRequest); + } else { + if (pullRequest.getStatus().equals(Status.OPEN) || pullRequest.getStatus().equals(Status.REOPENED)) { + openPrsCurrentWeek.add(pullRequest); + openPullRequests.put(createdAt.toLocalDate(), pullRequest); + } else { + closePrsCurrentWeek.add(pullRequest); + closedPullRequests.put(createdAt.toLocalDate(), pullRequest); + } + totalPrsCurrentWeek.add(pullRequest); + } + }); + + for (int i=0; i<7; i++) { + var date = LocalDate.now().minusDays(i); + var GraphMetrics = getGraphMetrics(openPullRequests.get(date), closedPullRequests.get(date), sla.getSlaTime()); + openCloseGraph.put(LocalDate.now().minusDays(i).toString(), GraphMetrics.get("openCloseGraph")); + slaMeanGraph.put(LocalDate.now().minusDays(i).toString(), GraphMetrics.get("slaMeanGraph")); + } + + var openCountCurrentWeek = openPrsCurrentWeek.size(); + var openCountLastWeek = openPrsLastWeek.size(); + var openPrPercent = openCountLastWeek > 0 ? (((openCountCurrentWeek - openCountLastWeek)*100)/openCountLastWeek) + : (openCountCurrentWeek > 0 ? 100 : 0); + + var closeCountCurrentWeek = closePrsCurrentWeek.size(); + var closeCountLastWeek = closePrsLastWeek.size(); + var closePrPercent = closeCountLastWeek > 0 ? (((closeCountCurrentWeek - closeCountLastWeek)*100)/closeCountLastWeek) + : (closeCountCurrentWeek > 0 ? 100 : 0); + + var totalCountCurrentWeek = totalPrsCurrentWeek.size(); + var totalCountLastWeek = totalPrsLastWeek.size(); + var totalPrPercent = totalCountLastWeek > 0 ? (((totalCountCurrentWeek - totalCountLastWeek)*100)/totalCountLastWeek) + : (totalCountCurrentWeek > 0 ? 100 : 0); + + var slaTimeExcededCurrentWeek = getSlaMeanCount(openPrsCurrentWeek, sla.getSlaTime()).get("slaTimeExceded"); + var slaTimeExcededLastWeek = getSlaMeanCount(openPrsLastWeek, sla.getSlaTime()).get("slaTimeExceded"); + var slaTimeExcededPercent = slaTimeExcededLastWeek > 0 ? (((slaTimeExcededCurrentWeek - slaTimeExcededLastWeek)*100)/slaTimeExcededLastWeek) + : (slaTimeExcededCurrentWeek > 0 ? 100 : 0); + return DashboardDto.builder().openCloseGraph(openCloseGraph).slaMeanGraph(slaMeanGraph) + .openPrsCount(openCountCurrentWeek).openPrPercent(openPrPercent) + .closePrsCount(closeCountCurrentWeek).closePrPercent(closePrPercent) + .totalPrsCount(totalCountCurrentWeek).totalPrPercent(totalPrPercent) + .slaMiss(slaTimeExcededCurrentWeek).slaMissPercent(slaTimeExcededPercent) + .build(); + + } + + public Map getSlaMeanCount(List pullRequests, long slaTime) { + int slaTimeExceded = 0; + int slaTimeNotExceded = 0; + for (var pullRequest : pullRequests) { + if (pullRequest.getStatus().equals(Status.CLOSED) || pullRequest.getStatus().equals(Status.MERGED)) { + if (DateUtil.secondsBetweenDate(pullRequest.getCreatedAt(), pullRequest.getUpdatedAt()) > slaTime) slaTimeExceded++; + else slaTimeNotExceded++; + } + else if (DateUtil.secondsBetweenDate(pullRequest.getCreatedAt(), LocalDateTime.now()) > slaTime) slaTimeExceded++; + else slaTimeNotExceded++; + } + return Map.of("slaTimeExceded", slaTimeExceded, "slaTimeNotExceded", slaTimeNotExceded); + } + + private Map getGraphMetrics(Collection openPullRequests, Collection closePullRequests, long slaTime) { + var slaMeanGraph = getSlaMeanCount(new ArrayList<>(openPullRequests), slaTime); + var openCloseGraph = Map.of("open", openPullRequests.size(), "close", closePullRequests.size()); + return Map.of("openCloseGraph", openCloseGraph, "slaMeanGraph", slaMeanGraph); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/SchemaFileAuditService.java b/connect/src/main/java/com/opsbeach/connect/github/service/SchemaFileAuditService.java new file mode 100644 index 0000000..0cdea5f --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/SchemaFileAuditService.java @@ -0,0 +1,395 @@ +package com.opsbeach.connect.github.service; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.ClientRepo.RepoSource; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.repository.SchemaFileAuditRepository; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.processor.avro.AvroSchema; +import com.opsbeach.connect.schemata.processor.json.JsonSchema; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoSchema; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.FileUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaDelete; +import jakarta.persistence.criteria.CriteriaUpdate; +import jakarta.persistence.criteria.Root; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.ObjectUtils; + +import java.io.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; + +@Slf4j +@Service +@RequiredArgsConstructor +public class SchemaFileAuditService { + + private final SchemaFileAuditRepository schemaFileAuditRepository; + private final DomainService domainService; + private final ModelService modelService; + private final TableService tableService; + private final IdSpecifications scmFileAuditpecifications; + private final EntityManager entityManager; + private final ResponseMessage responseMessage; + @Lazy + @Autowired + private AvroSchema avroSchema; + @Lazy + @Autowired + private JsonSchema jsonSchema; + @Lazy + @Autowired + private ProtoSchema protoSchema; + + @Value("${server.home-path}") + private String homePath; + @Value("${github.construct-file-path}") + private String githubFilePath; + + private static final String PULL_REQUEST_ID = "pullRequestId"; + + public SchemaFileAudit addModel(SchemaFileAudit schemaFileAudit) { + return schemaFileAuditRepository.save(schemaFileAudit); + } + + public SchemaFileAudit createSchemaFileAuditWhileInitialLoading(String filePath, ClientRepo clientRepo, Long rootNodeId) { + var name = filePath.substring(filePath.lastIndexOf("/") + 1, filePath.lastIndexOf(".")); + var fileType = filePath.substring(filePath.lastIndexOf(".") + 1); + var schemaFileAudit = SchemaFileAudit.builder().name(name).fileType(fileType).checksum(FileUtil.getChecksum(filePath)) + .path(constructPath(clientRepo, filePath)) + .clientRepoId(clientRepo.getId()) + .rootNodeId(rootNodeId) + .build(); + return addModel(schemaFileAudit); + } + + private String constructPath(ClientRepo clientRepo, String filePath) { + return clientRepo.getRepositorySource().equals(RepoSource.GITHUB) ? getGithubPath(clientRepo.getFullName(), clientRepo.getDefaultBranch(), filePath) : filePath; + } + + public SchemaFileAudit createSchemaFileAuditForNewFile(ClientRepo clientRepo, Table table, String rootFilePath) { + var fileType = ""; + if (clientRepo.getRepoType().equals(RepoType.AVRO)) fileType = "avsc"; + if (clientRepo.getRepoType().equals(RepoType.PROTOBUF)) fileType = "proto"; + if (clientRepo.getRepoType().equals(RepoType.JSON)) { + fileType = "json"; + table = addJsonSchemaId(table); + } + var filePath = StringUtil.constructStringEmptySeparator(table.getNameSpace().replace(".", "/"), "/", table.getName(), ".", fileType); + filePath = Objects.isNull(rootFilePath) ? filePath : rootFilePath.endsWith("/") ? StringUtil.constructStringEmptySeparator(rootFilePath, filePath) + : StringUtil.constructStringEmptySeparator(rootFilePath, "/", filePath); + var githubPath = githubFilePath.replace("{repoFullName}", clientRepo.getFullName()) + .replace("{branch}", clientRepo.getDefaultBranch()) + .replace("{filePath}", filePath); + var schemaFileAudit = createSchemaFileAudit(githubPath, filePath, table.getName(), fileType, null, clientRepo, null); + schemaFileAudit.setRootNodeId(table.getId()); + return addModel(schemaFileAudit); + } + + // this method is to generate JsonSchemaId for newly added table. + private Table addJsonSchemaId(Table table) { + // var folderPath = table.getNameSpace().replace(".", "/"); + // var fileName = table.getName().concat(".json"); + // table.setJsonSchemaId(StringUtil.constructStringEmptySeparator("http://example.com/", folderPath, "/", fileName)); + var id = StringUtil.constructStringEmptySeparator("/", table.getNameSpace(), "/", table.getName()).replace(".", "/"); + table.setJsonSchemaId(id); + return tableService.addTable(table); + } + + private String getGithubPath(String repoFullName, String branchName, String filePath) { + StringBuilder githubPath = new StringBuilder(StringUtil.constructStringEmptySeparator("https://github.com/",repoFullName,"/tree/",branchName)); + String[] folders = filePath.split("/"); + // To get the root folder path of repo. so that we need starting index of downloaded repo. + int folderPathLength = homePath.split("/").length + 2; + for (int i=folderPathLength; i < folders.length; i++) { + githubPath.append('/').append(folders[i]); + } + return githubPath.toString(); + } + + public List
getTablesFromFileContent(byte[] fieldContent, Boolean toSave, String fileType) throws IOException { + switch (fileType) { + case "avsc": return avroSchema.getTables(fieldContent, toSave); + case "json": return jsonSchema.getTables(fieldContent, toSave); + default: return null; + } + } + + @Transactional + public int updateSchemaFileAuditSetPrIdToNull(Long prId) { + CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); + CriteriaUpdate update = criteriaBuilder.createCriteriaUpdate(SchemaFileAudit.class); + Root root = update.from(SchemaFileAudit.class); + + update.set(PULL_REQUEST_ID, null).where(criteriaBuilder.equal(root.get(PULL_REQUEST_ID), prId)); + + return entityManager.createQuery(update).executeUpdate(); + } + + @Transactional + public int deleteSchemaFileAuditByPrId(Long prId) { + CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); + CriteriaDelete delete = criteriaBuilder.createCriteriaDelete(SchemaFileAudit.class); + Root root = delete.from(SchemaFileAudit.class); + + delete.where(criteriaBuilder.equal(root.get(PULL_REQUEST_ID), prId)); + + return entityManager.createQuery(delete).executeUpdate(); + } + + public List
saveDeltaForProtoSchema(String[] filePaths, ClientRepo clientRepo, PullRequest pullRequest) { + Map> fileTables = protoSchema.getTablesOfFilePaths(filePaths, clientRepo, pullRequest.getSourceBranch()); + return saveDeltaOfTables(fileTables, clientRepo, pullRequest.getId()); + } + + public List
saveDelta(Map fileContentMap, ClientRepo clientRepo, Long prId) throws IOException { + Map> fileTables = new HashMap<>(); + fileContentMap.entrySet().forEach(fileContent -> { + var filePath = fileContent.getKey(); + var fileType = filePath.substring(filePath.lastIndexOf(".") + 1); + List
tables = new ArrayList<>(); + try { + tables = getTablesFromFileContent(fileContent.getValue(), Boolean.FALSE, fileType); + } catch (IOException e) { + log.info(e.getMessage()); + throw new SchemaParserException(e.getMessage()); + } + fileTables.put(filePath, tables); + }); + return saveDeltaOfTables(fileTables, clientRepo, prId); + } + + private List
saveDeltaOfTables(Map> fileTables, ClientRepo clientRepo, Long prId) { + Map> tableWithNewFields = new HashMap<>(); + Map tableIds = new HashMap<>(); + var domain = domainService.getDefaultDomain(clientRepo.getFullName()); + fileTables.entrySet().forEach(fileTable -> { + var tables = fileTable.getValue(); + var filePath = fileTable.getKey(); + log.info("saving delta for - "+filePath); + var githubPath = githubFilePath.replace("{repoFullName}", clientRepo.getFullName()) + .replace("{branch}", clientRepo.getDefaultBranch()) + .replace("{filePath}", filePath); + var fileName = filePath.substring(filePath.lastIndexOf("/") + 1, filePath.lastIndexOf(".")); + var fileType = filePath.substring(filePath.lastIndexOf(".") + 1); + var schemaFileAudit = getSchemaFileAudit(githubPath, null); //send prId is null then only it will fetch file details of main branch + if (schemaFileAudit == null) { + // if schemaFileAudit is not present then the incoming file is new file. + schemaFileAudit = createSchemaFileAudit(githubPath, filePath, fileName, fileType, null, clientRepo, prId); + // schemaFileAudit = createSchemaFileAudit(githubPath, filePath, fileName, fileType, FileUtil.getChecksum(fileContent.getValue()), clientRepo, prId); + } + var models = modelService.findBySchemaFileAudit(schemaFileAudit.getId()); + Map tableMap = new HashMap<>(); + tables.stream().forEach(table -> { + tableMap.put(StringUtil.constructStringEmptySeparator(table.getNameSpace(),".",table.getName()), table); + }); + log.info("tableMap length = "+tableMap.size()); + models.forEach(model -> { + var fullName = StringUtil.constructStringEmptySeparator(model.getNameSpace(),".",model.getName()); + if (ObjectUtils.isEmpty(tableWithNewFields.get(fullName))) { + var tableNew = tableMap.get(fullName); + tableWithNewFields.put(fullName, tableNew.getFields()); + // call tableService and save delta in neo4j; + var table = tableService.findDeltaForTable(tableNew, model.getNodeId(), prId); + tableIds.put(fullName, table.getId()); + } + tableMap.remove(fullName); + }); + for (Map.Entry entry : tableMap.entrySet()) { + // if size is greater than 0, that's tell that new schema is added in this file. + // CALL TABLE SERVICE TO ADD THIS TABLE. + // ADD IN tableDeltas list + var fullName = StringUtil.constructStringEmptySeparator(entry.getValue().getNameSpace(),".",entry.getValue().getName()); + var table = entry.getValue(); + if (ObjectUtils.isEmpty(tableWithNewFields.get(fullName))) { + tableWithNewFields.put(fullName, entry.getValue().getFields()); + table = tableService.saveNewTableWithDelta(table, prId); + tableIds.put(fullName, table.getId()); + } + table.setId(tableIds.get(fullName)); + var model = modelService.createModel(table, schemaFileAudit, domain, prId); + modelService.addModel(model); + } + if (ObjectUtils.isEmpty(schemaFileAudit.getRootNodeId())) { + var table = tables.get(tables.size()-1); + var fullName = StringUtil.constructStringEmptySeparator(table.getNameSpace(),".",table.getName()); + schemaFileAudit.setRootNodeId(tableIds.get(fullName)); + addModel(schemaFileAudit); + } + }); + var tableDeltas = tableService.findDeltaForFields(tableWithNewFields, prId, clientRepo.getRepoType()); + return tableDeltas; + } + + public SchemaFileAudit createSchemaFileAudit(String path, String filePath, String fileName, String fileType, String checkSum, ClientRepo clientRepo, Long prId) { + var schemaFileAudit = SchemaFileAudit.builder().name(fileName).fileType(fileType).checksum(checkSum) + .path(path).clientRepoId(clientRepo.getId()).clientId(clientRepo.getClientId()).pullRequestId(prId) + .build(); + return addModel(schemaFileAudit); + } + + public SchemaFileAudit getModel(Long id) { + return schemaFileAuditRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.SCHEMA_FILE_AUDIT))); + } + + public List getByModelNodeId(Long nodeId) { + return schemaFileAuditRepository.findByModelNodeId(nodeId); + } + + public List getByModelNodeIds(List nodeIds) { + return schemaFileAuditRepository.findByModelNodeIds(nodeIds); + } + + public SchemaFileAudit getSchemaFileAudit(String path) { + return schemaFileAuditRepository.findOne(scmFileAuditpecifications.findByPath(path)).orElse(null); + } + + public SchemaFileAudit getSchemaFileAudit(String path, Long prId) { + var spec = scmFileAuditpecifications.findByPath(path).and(scmFileAuditpecifications.findByPullRequest(prId)); + return schemaFileAuditRepository.findOne(spec).orElse(null); + } + + public List getAll(Long clientRepoId) { + Specification specification = Specification.where(null); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(clientRepoId))) { + specification = specification.and(scmFileAuditpecifications.findByClientRepoId(clientRepoId)); + } + return schemaFileAuditRepository.findAll(specification); + } + + /* + * Generate file content map where the given table is present. + */ + public Map generateFileContentOfSchema(Long tableId, RepoType repoType) { + if (repoType.equals(RepoType.PROTOBUF)) return protoSchema.generateSchema(tableId); + var schemaFileAudits = getByModelNodeId(tableId); + var schemaFileAuditMap = schemaFileAudits.stream().collect(Collectors.toMap(SchemaFileAudit::getRootNodeId, Function.identity())); + var tables = tableService.findAllById(new ArrayList<>(schemaFileAuditMap.keySet())); + return generateFileContentOfSchema(schemaFileAuditMap, tables); + } + + public Map generateFileContentOfSchema(List tableIds) { + var schemaFileAudits = getByModelNodeIds(tableIds); + var schemaFileAuditMap = schemaFileAudits.stream().collect(Collectors.toMap(SchemaFileAudit::getRootNodeId, Function.identity())); + var tables = tableService.findAllById(new ArrayList<>(schemaFileAuditMap.keySet())); + return generateFileContentOfSchema(schemaFileAuditMap, tables); + } + + private Map generateFileContentOfSchema(Map schemaFileAuditMap, List
tables) { + Map map = new HashMap<>(); + tables.forEach(table -> { + // need to filter unwanted fields. + var table2 = filterFields(table); + if (schemaFileAuditMap.get(table.getId()).getFileType().equalsIgnoreCase("avsc")) { + map.put(schemaFileAuditMap.get(table.getId()), avroSchema.getFileContent(table2)); + } + if (schemaFileAuditMap.get(table.getId()).getFileType().equalsIgnoreCase("json")) { + map.put(schemaFileAuditMap.get(table.getId()), jsonSchema.getFileContent(table2)); + } + }); + return map; + } + + // this method will filter the fields in the table which is in other PR's + public Table filterFields(Table table) { + List fields = table.getFields(); + Map fieldMap = table.getFields().stream().collect(Collectors.toMap(Field::getId, Function.identity())); + fields.forEach(field -> { + if (field.getIsUserChanged().equals(Boolean.TRUE)) { + if (ObjectUtils.isEmpty(field.getPrId()) && field.getIsDeleted().equals(Boolean.TRUE)) { + fieldMap.remove(field.getId()); + return; + } + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getPrId()))) { + fieldMap.remove(field.getId()); + return; + } + } + checkNestedTable(field); + }); + // check the table is modified now then transfer values from new table to old table for raising PR. + if ((table.getIsUserChanged().equals(Boolean.TRUE) && table.getIsDeleted().equals(Boolean.TRUE)) && Objects.isNull(table.getPrId())) { + tableService.transferTableProps(table, table.getModifiedTable()); + } + Field[] orderedFields = new Field[fieldMap.size()]; + // order the field according to row number. + for (Field field : fieldMap.values()) + orderedFields[field.getRowNumber()-1] = field; + table.setFields(Arrays.asList(orderedFields)); + return table; + } + + private void checkNestedTable(Field field) { + if (field.getIsPrimitiveType().equals(Boolean.FALSE)) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + var table = filterFields(field.getContain()); + field.setContain(table); + } + switch(field.getDataType()) { + case "array" -> checkNestedTableArrayField(field); + case "map" -> checkNestedTableMapField(field); + case "union" -> checkNestedTableUnionField(field); + } + } + } + + private void checkNestedTableArrayField(Field field) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + var table = filterFields(field.getContain()); + field.setContain(table); + } + switch(field.getItems()) { + case "array" -> checkNestedTableArrayField(field.getArrayField()); + case "map" -> checkNestedTableMapField(field.getMapField()); + case "union" -> checkNestedTableUnionField(field); + } + } + + private void checkNestedTableMapField(Field field) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + var table = filterFields(field.getContain()); + field.setContain(table); + } + switch(field.getValues()) { + case "array" -> checkNestedTableArrayField(field.getArrayField()); + case "map" -> checkNestedTableMapField(field.getMapField()); + case "union" -> checkNestedTableUnionField(field); + } + } + + private void checkNestedTableUnionField(Field field) { + field.getUnionTypes().stream().forEach(unionField -> checkNestedTable(unionField)); + } + + public void deleteAllByClientRepoId(Long clientRepoId) { + schemaFileAuditRepository.deleteAllByClientRepoId(clientRepoId); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/github/service/WorkflowService.java b/connect/src/main/java/com/opsbeach/connect/github/service/WorkflowService.java new file mode 100644 index 0000000..ed2c848 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/github/service/WorkflowService.java @@ -0,0 +1,467 @@ +package com.opsbeach.connect.github.service; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.dto.WorkflowDto; +import com.opsbeach.connect.github.dto.WorkflowDto.FieldDto; +import com.opsbeach.connect.github.dto.WorkflowDto.TableDto; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.Workflow; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.repository.WorkflowRepository; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.connect.schemata.validate.FieldValidator; +import com.opsbeach.connect.schemata.validate.SchemaValidator; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.sharedlib.exception.AlreadyExistException; +import com.opsbeach.sharedlib.exception.BadRequestException; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class WorkflowService { + + private final WorkflowRepository workflowRepository; + private final SchemaFileAuditService schemaFileAuditService; + private final TableService tableService; + private final GitHubService gitHubService; + private final ResponseMessage responseMessage; + private final ClientRepoService clientRepoService; + private final ModelService modelService; + private final SchemaValidator schemaValidator; + private final FieldValidator fieldValidator; + + @Value("${github.construct-file-path}") + private String githubFilePath; + + public WorkflowDto add(WorkflowDto workflowDto) { + var workflow = addModel(workflowDto); + return workflow.toDto(workflow); + } + + public Workflow addModel(WorkflowDto workflowDto) { + return workflowRepository.save(workflowDto.toDomain(workflowDto)); + } + + public Workflow addModel(Workflow workflow) { + return workflowRepository.save(workflow); + } + + public WorkflowDto get(Long id) { + var workflow = getModel(id); + return workflow.toDto(workflow); + } + + public Workflow getModel(Long id) { + return workflowRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.WORKFLOW))); + } + + public List getAll() { + var workflows = workflowRepository.findAll(); + return workflows.isEmpty() ? List.of() : workflows.stream().map(workflows.get(0)::toDto).toList(); + } + + public Workflow updateStatus(Long id, Workflow.Status status) { + var workflow = getModel(id); + workflow.setStatus(status); + return addModel(workflow); + } + + private WorkflowDto toDto(Workflow workflow, List fields) { + return WorkflowDto.builder().id(workflow.getId()) + .domainId(workflow.getDomainId()) + .nodeId(workflow.getNodeId()) + .schemaName(workflow.getSchemaName()) + .stackHolders(workflow.getStackHolders()) + .purpose(workflow.getPurpose()) + .creator(workflow.getCreator()) + .additionalReference(workflow.getAdditionalReference()) + .status(workflow.getStatus()) + .rank(workflow.getRank()) + .title(workflow.getTitle()) + // .fields(Objects.isNull(fields) ? List.of() : fields.stream().map(fields.get(0)::toDto).toList()) + .build(); + } + + @Transactional + public Object saveAndRaisePr(WorkflowDto workflowDto, Long clientRepoId) { + var clientRepo = clientRepoService.getModel(clientRepoId); + var workflow = Workflow.builder().status(Workflow.Status.NEW).title("changes-in-table-"+workflowDto.getTable().name()) + .purpose("modifying schema "+workflowDto.getTable().name()).build(); + workflow = addModel(workflow); + // if (clientRepo.getRepoType().equals(RepoType.PROTOBUF)) { + // protobufSchemaPrRaise(workflowDto.table(), clientRepo, workflow); + // } else { + AtomicBoolean isTableChanged = new AtomicBoolean(false); + var table = saveTableDelta(workflowDto.getTable(), clientRepo.getRepoType(), isTableChanged); + if (isTableChanged.get() == false) return "FIELD MAPPING SUCCESS"; + SchemaFileAudit schemaFileAudit = null; + Model model = null; + if (Objects.isNull(workflowDto.getTable().id())) { + // if the ID is null means then it is new table added + // so we need to create new file and model audit for that table. + // need to save what type of repository in clientRepotable (ex avro, json and proto). + var rootFilePath = clientRepo.getRepoType().equals(RepoType.PROTOBUF) ? Constants.PROTOBUF_SCHEMA_ROOT_FILE_PATH : null; + schemaFileAudit = schemaFileAuditService.createSchemaFileAuditForNewFile(clientRepo, table, rootFilePath); + model = modelService.createModel(schemaFileAudit, clientRepo.getId(), table); + } + Map fileContentMap = schemaFileAuditService.generateFileContentOfSchema(table.getId(), clientRepo.getRepoType()); + var pullRequest = gitHubService.commitAndRaisePr(fileContentMap, workflow); + updatePrNumber(pullRequest, table); + if (Objects.nonNull(schemaFileAudit) && Objects.nonNull(model)) { + // update PR_ID in file and model audit record. + schemaFileAudit.setPullRequestId(pullRequest.getId()); + schemaFileAuditService.addModel(schemaFileAudit); + model.setPullRequestId(pullRequest.getId()); + modelService.addModel(model); + } + workflow.setStatus(Workflow.Status.PR_RAISED); + addModel(workflow); + return toDto(workflow, null); + } + + public Table updatePrNumber(PullRequest pullRequest, Table table) { + table.getFields().forEach(field -> { + if (Boolean.TRUE.equals(field.getIsUserChanged()) && ObjectUtils.isEmpty(field.getPrId())) + field.setPrId(pullRequest.getId()); + }); + if (Boolean.TRUE.equals(table.getIsUserChanged()) && ObjectUtils.isEmpty(table.getPrId())) { + table.setPrId(pullRequest.getId()); + } + return tableService.addTable(table); + } + + private Table saveTableDelta(TableDto modifiedTable, RepoType repoType, AtomicBoolean isTableChanged) { + var tableNew = modifiedTable.toDomain(modifiedTable); + if (Objects.nonNull(modifiedTable.id())) { + // ASSUMING THAT THE CHAGED TABLE WILL NOT CONTAIN ANY PREVIOUS CHANGES. (i.e. IT IS NEWLY CHANGED TABLE). + var tableOld = tableService.getModel(modifiedTable.id()); + validateTable(tableNew, tableOld); + if (!tableService.compareTable(tableNew, tableOld)) tableNew = null; // return true if the table has any change. + return addFieldsInTable(modifiedTable.fields(), modifiedTable.id(), tableNew, tableOld, repoType, isTableChanged); + } + isTableChanged.set(true); + validateTable(tableNew, null); + tableNew.setIsUserChanged(Boolean.TRUE); + tableNew.setIsDeleted(Boolean.FALSE); + tableNew.setClientId(SecurityUtil.getClientId()); // Add clientId for new table. + var fullName = StringUtil.constructStringEmptySeparator(tableNew.getNameSpace(),".",tableNew.getName()); + List fields = new ArrayList<>(); + AtomicInteger i = new AtomicInteger(1); + modifiedTable.fields().forEach(fieldDto -> { + var field = fieldDto.toDomain(fieldDto); + field.setIsPrimitiveType(Field.isPrimitiveType(field.getDataType())); + field.setSchema(fullName); + field.setIsUserChanged(Boolean.TRUE); + field.setIsDeleted(Boolean.FALSE); + field.setRowNumber(i.getAndIncrement()); + addComplexTypeToField(field, Boolean.TRUE); + addReferenceField(field, fieldDto.referenceFieldId()); + fields.add(field); + }); + tableNew.setFields(fields); + var table = tableService.addTable(tableNew); + return table; + } + + // to save existing table delta. + public Table addFieldsInTable(List fieldDtos, Long tableId, Table modifiedTable, Table table, RepoType repoType, AtomicBoolean isTableChanged) { + if (Objects.nonNull(modifiedTable)) { + modifiedTable.setIsUserChanged(Boolean.TRUE); + modifiedTable.setIsDeleted(Boolean.FALSE); + modifiedTable.setId(null); + modifiedTable.setFields(null); + table.setModifiedTable(modifiedTable); + table.setIsUserChanged(Boolean.TRUE); + table.setIsDeleted(Boolean.TRUE); + isTableChanged.set(true); + } + var fields = table.getFields().stream().collect(Collectors.toMap(Field::getId, Function.identity())); + var fieldNames = fields.values().stream().map(Field::getName).toList(); + // List addedFields = fieldDtos.isEmpty() ? List.of() : fieldDtos.stream().map(fieldDtos.get(0)::toDomin).toList(); + List updatedFields = new ArrayList<>(); + AtomicInteger rowNumber = new AtomicInteger(fields.size()+1); + fieldDtos.forEach(fieldDto -> { + var oldField = fields.get(fieldDto.id()); + var addedField = fieldDto.toDomain(fieldDto); + validateField(addedField, oldField, fieldNames); + boolean isNewField = Boolean.TRUE; + if (Objects.nonNull(oldField)) { + var isFieldChanged = tableService.compareField(addedField, oldField, repoType); + if (isFieldChanged) { + isTableChanged.set(true); + oldField.setIsUserChanged(Boolean.TRUE); + oldField.setIsDeleted(Boolean.TRUE); + addedField.setId(null); + transferFieldProps(oldField, addedField); + updatedFields.add(oldField); + fields.remove(oldField.getId()); + isNewField = Boolean.FALSE; + } + if (Objects.nonNull(fieldDto.referenceFieldId())) { + addReferenceField(oldField, fieldDto.referenceFieldId()); + updatedFields.add(oldField); + fields.remove(oldField.getId()); + } + if (Boolean.FALSE.equals(isFieldChanged)) + return; + } + isTableChanged.set(true); + addedField.setIsUserChanged(Boolean.TRUE); + addedField.setIsDeleted(Boolean.FALSE); + addedField.setIsPrimitiveType(Field.isPrimitiveType(addedField.getDataType())); + if (addedField.getRowNumber() == 0) addedField.setRowNumber(rowNumber.getAndIncrement()); + addComplexTypeToField(addedField, isNewField); + addReferenceField(addedField, fieldDto.referenceFieldId()); + updatedFields.add(addedField); + }); + updatedFields.addAll(fields.values()); // add the modified fields and non changed fields in one list. + table.setFields(updatedFields); // set fieldList as table fields. + return tableService.addTable(table); + } + + private void addReferenceField(Field fieldNew, Long referenceFieldId) { + if (Objects.nonNull(referenceFieldId)) { + var field = tableService.getFieldModel(referenceFieldId); + fieldNew.setReferenceField(field); + } + } + + private void addComplexTypeToField(Field fieldNew, Boolean isNewField) { + // The schema mapping with field will be with another schema in same repositroy. so no need worry about mapping schema with different repo. + if (fieldNew.getIsPrimitiveType().equals(Boolean.FALSE) && isNewField) { + var fullName = fieldNew.getDataType(); + var nameSpace = fullName.substring(0, fullName.lastIndexOf(".")); + var name = fullName.substring(fullName.lastIndexOf(".")+1, fullName.length()); + var table = tableService.findByNameAndNameSpace(name, nameSpace); + fieldNew.setContain(table); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getJsonSchemaId()))) + fieldNew.setJsonSchemaRefId(table.getJsonSchemaId()); + } + } + + private void transferFieldProps(Field fromField, Field toField) { + // currently do not tranfer props like (description, isPii, isClassified) + // Because that props be changed by user. So that values should be incomming value. + // we are transfering non editable props from existing field to same delta field. + toField.setSchema(fromField.getSchema()); + toField.setRowNumber(fromField.getRowNumber()); + toField.setIsPrimaryKey(fromField.getIsPrimaryKey()); + toField.setIsPrimitiveType(fromField.getIsPrimitiveType()); + toField.setDefaultValue(fromField.getDefaultValue()); + // toField.setIsNullable(fromField.getIsNullable()); + toField.setSymbols(fromField.getSymbols()); + toField.setItems(fromField.getItems()); + toField.setValues(fromField.getValues()); + toField.setSize(fromField.getSize()); + toField.setArrayField(fromField.getArrayField()); + toField.setMapField(fromField.getMapField()); + toField.setUnionTypes(fromField.getUnionTypes()); + toField.setContain(fromField.getContain()); + toField.setReferenceField(fromField.getReferenceField()); + } + + private void validateTable(Table tableNew, Table tableOld) { + if (Objects.nonNull(tableOld)) { + if (Boolean.FALSE.equals(tableNew.getNameSpace().equals(tableOld.getNameSpace()))) { + throw new BadRequestException(ErrorCode.BAD_REQUEST, responseMessage.getErrorMessage(ErrorCode.BAD_REQUEST, "Table namespace change Not allowed")); + } + if (Boolean.FALSE.equals(tableNew.getName().equals(tableOld.getName()))) { + throw new BadRequestException(ErrorCode.BAD_REQUEST, responseMessage.getErrorMessage(ErrorCode.BAD_REQUEST, "Table name change Not allowd")); + } + if (Boolean.TRUE.equals(tableOld.getIsUserChanged())) { + throw new BadRequestException(ErrorCode.TABLE_ALREADY_MODIFIED, responseMessage.getErrorMessage(ErrorCode.TABLE_ALREADY_MODIFIED, tableOld.getName())); + } + tableOld.getFields().stream().forEach(field -> { + if (Boolean.TRUE.equals(field.getIsUserChanged())) { + throw new BadRequestException(ErrorCode.TABLE_ALREADY_MODIFIED, responseMessage.getErrorMessage(ErrorCode.TABLE_ALREADY_MODIFIED, tableOld.getName())); + } + }); + } else { + var models = modelService.findModelByNameAndNameSpace(tableNew.getNameSpace(), tableNew.getName()); + if (models.size() > 0) { + throw new AlreadyExistException(ErrorCode.ALREADY_EXISTS, responseMessage.getErrorMessage(ErrorCode.ALREADY_EXISTS, StringUtil.constructStringEmptySeparator("Table name ",tableNew.getNameSpace(),".", tableNew.getName(), "Already exists"))); + } + } + var schemaError = schemaValidator.apply(tableNew); /// mock + if (schemaError.status().equals(Status.ERROR)) { + throw new BadRequestException(ErrorCode.TABLE_VALIDATION_ERROR, responseMessage.getErrorMessage(ErrorCode.TABLE_VALIDATION_ERROR, schemaError.errorMessages())); + } + } + + private void validateField(Field fieldNew, Field fieldOld, List fieldNames) { + if (Objects.nonNull(fieldOld)) { + if (Boolean.FALSE.equals(fieldNew.getName().equals(fieldOld.getName()))) { + throw new BadRequestException(ErrorCode.BAD_REQUEST, responseMessage.getErrorMessage(ErrorCode.BAD_REQUEST, StringUtil.constructStringEmptySeparator("Field name change Not allowd from '", fieldOld.getName(), "' To '", fieldNew.getName(),"'"))); + } + if (Boolean.FALSE.equals(fieldNew.getDataType().equals(fieldOld.getDataType()))) { + throw new BadRequestException(ErrorCode.BAD_REQUEST, responseMessage.getErrorMessage(ErrorCode.BAD_REQUEST, StringUtil.constructStringEmptySeparator("Field Datatype change Not allowd in field '", fieldOld.getName(), "'"))); + } + } else { + if (fieldNames.contains(fieldNew.getName())) { + throw new AlreadyExistException(ErrorCode.ALREADY_EXISTS, responseMessage.getErrorMessage(ErrorCode.ALREADY_EXISTS, StringUtil.constructStringEmptySeparator("Field name '", fieldNew.getName(), "' Already exists"))); + } + } + var error = fieldValidator.apply(fieldNew); /// mock + if (error.status().equals(Status.ERROR)) { + throw new BadRequestException(ErrorCode.FIELD_VALIDATION_ERROR, responseMessage.getErrorMessage(ErrorCode.FIELD_VALIDATION_ERROR, error.errorMessages())); + } + } + + // public Long protobufSchemaPrRaise(TableDto modifiedTable, ClientRepo clientRepo, Workflow workflow) { + // var tableNew = modifiedTable.toDomain(modifiedTable); + // String message; + // if (Objects.nonNull(modifiedTable.id())) { + // var tableOld = tableService.getModel(modifiedTable.id()); + // message = compareAndGenerateMessageForExistingTable(tableNew, tableOld); + // } else { + // validateTable(tableNew, null); + // message = generateMessageForNewTable(tableNew); + // } + // var prCount = pullRequestService.getCountWithWorkflow(clientRepo.getId()); + // var fileName = StringUtil.constructStringEmptySeparator("V", Long.toString(prCount+1), ".0__schemalabs_pr.txt"); + // var githubPath = githubFilePath.replace("{repoFullName}", clientRepo.getFullName()) + // .replace("{branch}", clientRepo.getDefaultBranch()) + // .replace("{filePath}", fileName); + // var schemaFileAudit = SchemaFileAudit.builder().clientRepoId(clientRepo.getId()).path(githubPath).build(); + // Map fileContentMap = new HashMap<>(); + // fileContentMap.put(schemaFileAudit, message); + // System.out.println("path = "+githubPath+" fileName = "+fileName); + // System.out.println("Message = "+message); + // var pullRequest = gitHubService.commitAndRaisePr(fileContentMap, workflow); + // return pullRequest.getId(); + // } + + // public String generateMessageForNewTable(Table tableNew) { + // StringBuilder message = new StringBuilder("New Table Added: \n"); + // message.append("1. Table Name: ").append(tableNew.getName()).append("\n"); + // message.append("2. Table description: ").append(tableNew.getDescription()).append("\n"); + // message.append("3. Table owner: ").append(tableNew.getOwner()).append("\n"); + // int sNo = 4; + // if (Objects.nonNull(tableNew.getDomain())) + // message.append(sNo++).append(". Table domain: ").append(tableNew.getDomain()).append("\n"); + // if (Objects.nonNull(tableNew.getChannel())) + // message.append(sNo++).append(". Table channel: ").append(tableNew.getChannel()).append("\n"); + // if (Objects.nonNull(tableNew.getEmail())) + // message.append(sNo++).append(". Table E=mail: ").append(tableNew.getEmail()).append("\n"); + // if (Objects.nonNull(tableNew.getComplianceOwner())) + // message.append(sNo++).append(". Table Compliance owner: ").append(tableNew.getComplianceOwner()).append("\n"); + // if (Objects.nonNull(tableNew.getStatus())) + // message.append(sNo++).append(". Table status: ").append(tableNew.getStatus()).append("\n"); + // if (Objects.nonNull(tableNew.getQualityRuleBase())) + // message.append(sNo++).append(". Table Quality Rule Base: ").append(tableNew.getQualityRuleBase()).append("\n"); + // if (Objects.nonNull(tableNew.getQualityRuleSql())) + // message.append(sNo++).append(". Table Quality Rule Sql: ").append(tableNew.getQualityRuleSql()).append("\n"); + // if (Objects.nonNull(tableNew.getQualityRuleCel())) + // message.append(sNo++).append(". Table Quality Rule Cel: ").append(tableNew.getQualityRuleCel()).append("\n"); + // if (Objects.nonNull(tableNew.getSubscribers())) + // message.append(sNo++).append(". Table subscibers: ").append(Arrays.toString(tableNew.getSubscribers())).append("\n"); + // message.append("\nField Added: \n"); sNo = 1; + // for (Field field : tableNew.getFields()) { + // message.append("field ").append(sNo++).append(": \n"); + // message.append(" * name = ").append(field.getName()).append("\n"); + // message.append(" * description = ").append(field.getDescription()).append("\n"); + // message.append(" * is_pii = ").append(field.getIsPii()).append("\n"); + // message.append(" * is_classified = ").append(field.getIsClassified()).append("\n"); + // message.append(" * depricated = ").append(field.getDeprecated()).append("\n"); + // } + // return message.toString(); + // } + + // public String compareAndGenerateMessageForExistingTable(Table tableNew, Table tableOld) { + // validateTable(tableNew, tableOld); + // var schemaFileAudit = schemaFileAuditService.getByModelNodeId(tableOld.getId()); + // StringBuilder message = new StringBuilder("Changes in Table '").append(tableOld.getName()).append("' in file - ").append(schemaFileAudit.get(0).getPath()).append("\n"); + // int sNo = 1; + // if (tableService.compareString(tableNew.getDescription(), tableOld.getDescription())) + // message.append(sNo++).append(". Changes in Table description from - '").append(tableOld.getDescription()) + // .append("' To -'").append(tableNew.getDescription()).append("' \n"); + // if (tableService.compareString(tableNew.getOwner(), tableOld.getOwner())) + // message.append(sNo++).append(". Changes in Table metadata Owner from = '").append(tableOld.getOwner()) + // .append("' To -'").append(tableNew.getOwner()).append("' \n"); + // if (tableService.compareString(tableNew.getDomain(), tableOld.getDomain())) + // message.append(sNo++).append(". Changes in Table metadata Domain from = '").append(tableOld.getDomain()) + // .append("' To -'").append(tableNew.getDomain()).append("' \n"); + // if (tableService.compareString(tableNew.getComplianceOwner(), tableOld.getComplianceOwner())) + // message.append(sNo++).append(". Changes in Table metadata ComplianceOwner from = '").append(tableOld.getComplianceOwner()) + // .append("' To -'").append(tableNew.getComplianceOwner()).append("' \n"); + // if (tableService.compareString(tableNew.getChannel(), tableOld.getChannel())) + // message.append(sNo++).append(". Changes in Table metadata Channel from = '").append(tableOld.getChannel()) + // .append("' To -'").append(tableNew.getChannel()).append("' \n"); + // if (tableService.compareString(tableNew.getEmail(), tableOld.getEmail())) + // message.append(sNo++).append(". Changes in Table metadata E-mail from = '").append(tableOld.getEmail()) + // .append("' To -'").append(tableNew.getEmail()).append("' \n"); + // if (tableService.compareString(tableNew.getStatus(), tableOld.getStatus())) + // message.append(sNo++).append(". Changes in Table metadata status from = '").append(tableOld.getStatus()) + // .append("' To -'").append(tableNew.getStatus()).append("' \n"); + // if (tableService.compareString(tableNew.getQualityRuleBase(), tableOld.getQualityRuleBase())) + // message.append(sNo++).append(". Changes in Table metadata Quality Rule Base from = '").append(tableOld.getQualityRuleBase()) + // .append("' To -'").append(tableNew.getQualityRuleBase()).append("' \n"); + // if (tableService.compareString(tableNew.getQualityRuleSql(), tableOld.getQualityRuleSql())) + // message.append(sNo++).append(". Changes in Table metadata Quality Rule Sql from = '").append(tableOld.getQualityRuleSql()) + // .append("' To -'").append(tableNew.getQualityRuleSql()).append("' \n"); + // if (tableService.compareString(tableNew.getQualityRuleCel(), tableOld.getQualityRuleCel())) + // message.append(sNo++).append(". Changes in Table metadata Quality Rule Cel from = '").append(tableOld.getQualityRuleCel()) + // .append("' To -'").append(tableNew.getQualityRuleCel()).append("' \n"); + // if (!Arrays.deepEquals(tableNew.getSubscribers(), tableOld.getSubscribers())) + // message.append(sNo++).append(". Changes in Table metadata subscribers from = '").append(Arrays.toString(tableOld.getSubscribers())) + // .append("' To -'").append(Arrays.toString(tableNew.getSubscribers())).append("' \n"); + + // var oldFields = tableOld.getFields().stream().collect(Collectors.toMap(Field::getName, Function.identity())); + // for (Field fieldNew : tableNew.getFields()) { + // var fieldOld = oldFields.get(fieldNew.getName()); + // if (Objects.nonNull(fieldOld)) { + // if (tableService.compareField(fieldNew, fieldOld, RepoType.PROTOBUF)) { + // message.append(sNo++).append(". Changes in metadatas of field - '").append(fieldNew.getName()).append("': \n"); + // if (tableService.compareString(fieldNew.getDescription(), fieldOld.getDescription())) { + // message.append(" * metadata description from = '").append(Arrays.toString(tableOld.getSubscribers())) + // .append("'' To -'").append(Arrays.toString(tableNew.getSubscribers())).append("' \n"); + // } + // if (tableService.compareBoolean(fieldNew.getIsPii(), fieldOld.getIsPii())) { + // message.append(" * metadata is_pii from = '").append(fieldOld.getIsPii()) + // .append("'' To -'").append(fieldNew.getIsPii()).append("' \n"); + // } + // if (tableService.compareBoolean(fieldNew.getIsClassified(), fieldOld.getIsClassified())) { + // message.append(" * metadata is_classified from = '").append(fieldOld.getIsClassified()) + // .append("'' To -'").append(fieldNew.getIsClassified()).append("' \n"); + // } + // if (tableService.compareBoolean(fieldNew.getDeprecated(), fieldOld.getDeprecated())) { + // message.append(" * metadata Deprecated from = '").append(fieldOld.getDeprecated()) + // .append("'' To -'").append(fieldNew.getDeprecated()).append("' \n"); + // } + // } + // } else { + // message.append(sNo++).append(". New Field Added: \n"); + // message.append(" * name = ").append(fieldNew.getName()).append("\n"); + // message.append(" * description = ").append(fieldNew.getDescription()).append("\n"); + // message.append(" * is_pii = ").append(fieldNew.getIsPii()).append("\n"); + // message.append(" * is_classified = ").append(fieldNew.getIsClassified()).append("\n"); + // message.append(" * depricated = ").append(fieldNew.getDeprecated()).append("\n"); + // } + // } + // return message.toString(); + // } +} diff --git a/connect/src/main/java/com/opsbeach/connect/metrics/controller/SlaController.java b/connect/src/main/java/com/opsbeach/connect/metrics/controller/SlaController.java new file mode 100644 index 0000000..6d9ceb5 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/metrics/controller/SlaController.java @@ -0,0 +1,52 @@ +package com.opsbeach.connect.metrics.controller; + +import java.util.List; + +import javax.validation.Valid; + +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.metrics.dto.SlaDto; +import com.opsbeach.connect.metrics.service.SlaService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequestMapping("/v1/sla") +@RequiredArgsConstructor +public class SlaController { + + private final SlaService slaService; + + @Transactional + @PostMapping + public SuccessResponse add(@RequestBody @Valid SlaDto slaDto) { + return SuccessResponse.statusCreated(slaService.add(slaDto)); + } + + @Transactional + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable Long id) { + return SuccessResponse.statusOk(slaService.get(id)); + } + + @Transactional + @GetMapping + public SuccessResponse> getAll() { + return SuccessResponse.statusOk(slaService.getAll()); + } + + @Transactional + @PutMapping + public SuccessResponse update(@RequestBody @Valid SlaDto slaDto) { + return SuccessResponse.statusOk(slaService.update(slaDto)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/metrics/dto/SlaDto.java b/connect/src/main/java/com/opsbeach/connect/metrics/dto/SlaDto.java new file mode 100644 index 0000000..f3286cb --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/metrics/dto/SlaDto.java @@ -0,0 +1,30 @@ +package com.opsbeach.connect.metrics.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.opsbeach.connect.core.BaseDto; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.metrics.entity.Sla; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class SlaDto extends BaseDto { + + private ServiceType type; + + @JsonProperty("sla_time") + private long slaTime; // should save in seconds + + public Sla toDomin(SlaDto slaDto) { + return Sla.builder().id(slaDto.getId()) + .clientId(slaDto.getClientId()) + .type(slaDto.getType()) + .slaTime(slaDto.getSlaTime()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/metrics/entity/Sla.java b/connect/src/main/java/com/opsbeach/connect/metrics/entity/Sla.java new file mode 100644 index 0000000..20545a6 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/metrics/entity/Sla.java @@ -0,0 +1,38 @@ +package com.opsbeach.connect.metrics.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.metrics.dto.SlaDto; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Table +@Entity +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Sla extends BaseModel { + + @Enumerated(EnumType.STRING) + private ServiceType type; + + @Column(name = "sla_time") + private long slaTime; // time should be in seconds. + + public SlaDto toDto(Sla sla) { + return SlaDto.builder().id(sla.getId()) + .clientId(sla.getClientId()) + .type(sla.getType()) + .slaTime(sla.getSlaTime()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/metrics/repository/SlaRepository.java b/connect/src/main/java/com/opsbeach/connect/metrics/repository/SlaRepository.java new file mode 100644 index 0000000..7dd18f6 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/metrics/repository/SlaRepository.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.metrics.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.metrics.entity.Sla; + +public interface SlaRepository extends BaseRepository { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/metrics/service/SlaService.java b/connect/src/main/java/com/opsbeach/connect/metrics/service/SlaService.java new file mode 100644 index 0000000..7ff174b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/metrics/service/SlaService.java @@ -0,0 +1,77 @@ +package com.opsbeach.connect.metrics.service; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.metrics.dto.SlaDto; +import com.opsbeach.connect.metrics.entity.Sla; +import com.opsbeach.connect.metrics.repository.SlaRepository; +import com.opsbeach.sharedlib.exception.AlreadyExistException; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.response.SuccessCode; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class SlaService { + + private final SlaRepository slaRepository; + + private final ResponseMessage responseMessage; + + private final IdSpecifications slaSpecifications; + + public SlaDto add(SlaDto slaDto) { + if (getModelByType(slaDto.getType()).isPresent()) { + throw new AlreadyExistException(ErrorCode.ALREADY_EXISTS, responseMessage.getErrorMessage(ErrorCode.ALREADY_EXISTS, Constants.SLA)); + } + var sla = slaRepository.save(slaDto.toDomin(slaDto)); + return sla.toDto(sla); + } + + public SlaDto get(Long id) { + var sla = slaRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.SLA))); + return sla.toDto(sla); + } + + public SlaDto getByType(ServiceType serviceType) { + var sla = getModelByType(serviceType); + if (sla.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, Constants.SLA)); + } + return sla.get().toDto(sla.get()); + } + + public Optional getModelByType(ServiceType serviceType) { + return slaRepository.findOne(slaSpecifications.findBySlaType(serviceType)); + } + + public List getAll() { + var slas = slaRepository.findAll(); + return !ObjectUtils.isEmpty(slas) ? slas.stream().map(slas.get(0)::toDto).collect(Collectors.toList()) : List.of(); + } + + public SlaDto update(SlaDto slaDto) { + get(slaDto.getId()); + var sla = slaRepository.save(slaDto.toDomin(slaDto)); + return sla.toDto(sla); + } + + public String delete(Long id) { + var slaDto = get(id); + var sla = slaDto.toDomin(slaDto); + sla.setIsDeleted(Boolean.TRUE); + slaRepository.save(sla); + return responseMessage.getSuccessMessage(SuccessCode.DELETED, Constants.SLA); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/scheduler/SchedulerTaskService.java b/connect/src/main/java/com/opsbeach/connect/scheduler/SchedulerTaskService.java new file mode 100644 index 0000000..c00150c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/scheduler/SchedulerTaskService.java @@ -0,0 +1,69 @@ +package com.opsbeach.connect.scheduler; + +import com.opsbeach.connect.task.entity.Task; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.github.service.GitHubService; +import lombok.RequiredArgsConstructor; +import org.springframework.scheduling.TaskScheduler; +import org.springframework.scheduling.Trigger; +import org.springframework.stereotype.Service; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ScheduledFuture; + +@Service +@RequiredArgsConstructor +public class SchedulerTaskService { + + // Task Scheduler + private final TaskScheduler scheduler; + + private final ResponseMessage responseMessage; + + private final GitHubService gitHubService; + + // A map for keeping scheduled tasks + private final Map> jobsMap = new HashMap<>(); + + // Schedule Task to be executed on time period + public void addTaskToScheduler(Task task, Trigger runningDate) { + ScheduledFuture scheduledTask = scheduler.schedule(() -> + sendRequest(task.getServiceType(), task.getTaskType(), task.getId()), runningDate); + jobsMap.put(task.getId(), scheduledTask); + } + + public void sendRequest(ServiceType serviceType, TaskType taskType, Long taskId) { + switch (serviceType) { + case GITHUB: sendRequestGithub(taskType, taskId); break; + default: throw new InvalidDataException(ErrorCode.INVALID_SERVICE_TYPE, responseMessage.getErrorMessage(ErrorCode.INVALID_SERVICE_TYPE, serviceType.name())); + } + } + + private void sendRequestGithub(TaskType taskType, Long taskId) { + switch (taskType) { + case RENEWAL_ACCESS_TOKEN : gitHubService.generateNewToken(taskId); break; + default: throw new InvalidDataException(ErrorCode.INVALID_TASK_TYPE, responseMessage.getErrorMessage(ErrorCode.INVALID_TASK_TYPE, taskType.name())); + } + } + + public List getTaskIds() { + return new ArrayList<>(jobsMap.keySet()); + } + + // Remove scheduled task + public void removeTaskFromScheduler(Long id) { + ScheduledFuture scheduledTask = jobsMap.get(id); + if (Objects.nonNull(scheduledTask)) { + scheduledTask.cancel(true); + jobsMap.remove(id); + } + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/scheduler/SyncScheduler.java b/connect/src/main/java/com/opsbeach/connect/scheduler/SyncScheduler.java new file mode 100644 index 0000000..549715b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/scheduler/SyncScheduler.java @@ -0,0 +1,40 @@ +package com.opsbeach.connect.scheduler; + + +import com.opsbeach.connect.task.entity.Task; +import com.opsbeach.connect.task.service.TaskService; +import com.opsbeach.connect.core.utils.TaskUtils; + +import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.scheduling.annotation.SchedulingConfigurer; +import org.springframework.scheduling.config.ScheduledTaskRegistrar; + +import java.util.List; + +@RequiredArgsConstructor +public class SyncScheduler implements SchedulingConfigurer { + @Autowired + private SchedulerTaskService schedulerTaskService; + + @Autowired + private TaskService taskService; + + @Value("${scheduler.enabled:false}") + private boolean schedulerEnabled; + + @Override + public void configureTasks(ScheduledTaskRegistrar taskRegistrar) { + if (schedulerEnabled) { + List tasks = getTasks(); + tasks.forEach(task -> schedulerTaskService.addTaskToScheduler(task, + triggerContext -> TaskUtils.findNextExecutionTime(triggerContext, task).toInstant())); + } + } + + private List getTasks() { + return taskService.getAllForScheduler(); + } + +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/controller/DomainNodeController.java b/connect/src/main/java/com/opsbeach/connect/schemata/controller/DomainNodeController.java new file mode 100644 index 0000000..c08dac0 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/controller/DomainNodeController.java @@ -0,0 +1,39 @@ +package com.opsbeach.connect.schemata.controller; + +import java.util.List; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/schema/domain") +public class DomainNodeController { + + private final DomainNodeService domainNodeService; + + @PostMapping("/org/{id}") + public SuccessResponse add(@RequestBody DomainNode domain, @PathVariable("id") Long orgId) { + return SuccessResponse.statusCreated(domainNodeService.add(domain, orgId)); + } + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(domainNodeService.get(id)); + } + + @GetMapping("/org/{id}") + public SuccessResponse> getAll(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(domainNodeService.getAll(id)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/controller/OrganizationController.java b/connect/src/main/java/com/opsbeach/connect/schemata/controller/OrganizationController.java new file mode 100644 index 0000000..ec49e26 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/controller/OrganizationController.java @@ -0,0 +1,44 @@ +package com.opsbeach.connect.schemata.controller; + +import java.util.List; +import java.util.Objects; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.opsbeach.connect.schemata.entity.Organization; +import com.opsbeach.connect.schemata.service.OrganizationService; +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.sharedlib.utils.Constants; + +import jakarta.servlet.http.HttpServletRequest; +import lombok.RequiredArgsConstructor; + +@RestController +@RequestMapping("/v1/schema/organization") +@RequiredArgsConstructor +public class OrganizationController { + + public final OrganizationService organizationService; + + @PostMapping + public SuccessResponse add(@RequestParam("clientName") String clientName, HttpServletRequest httpServletRequest) { + var clientId = httpServletRequest.getHeader(Constants.CLIENT_ID_HEADER); + if (Objects.isNull(clientId)) return SuccessResponse.statusCreated("SUCCESS"); + return SuccessResponse.statusCreated(organizationService.add(Long.parseLong(clientId), clientName)); + } + + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(organizationService.get(id)); + } + + @GetMapping + public SuccessResponse> getAll() { + return SuccessResponse.statusOk(organizationService.getAll()); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/controller/TableController.java b/connect/src/main/java/com/opsbeach/connect/schemata/controller/TableController.java new file mode 100644 index 0000000..e456665 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/controller/TableController.java @@ -0,0 +1,83 @@ +package com.opsbeach.connect.schemata.controller; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import org.springframework.data.domain.Pageable; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +import com.fasterxml.jackson.databind.JsonNode; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.schemata.dto.TableDto; +import com.opsbeach.connect.schemata.dto.TableFilterOptionsDto; +import com.opsbeach.connect.schemata.dto.SchemaVisualizerDto; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v1/table") +public class TableController { + + private final TableService tableService; + + @GetMapping("/graph/{id}") + public SuccessResponse getSchemaVisualizer(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(tableService.getSchemaVisualizer(id)); + } + + @GetMapping("/graph") + public SuccessResponse getSchemaVisualizerForAll() { + return SuccessResponse.statusOk(tableService.getSchemaVisualizerForAll()); + } + + @GetMapping() + public SuccessResponse getAll(Pageable pageable, @RequestParam(name = "owner", required = false) List owners, + @RequestParam(name = "domain", required = false) List domains, + @RequestParam(name = "subscribers", required = false) List subscribers) { + return SuccessResponse.statusOk(tableService.getAll(owners, domains, subscribers, pageable)); + } + + @GetMapping("/{id}") + public SuccessResponse getTable(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(tableService.get(id)); + } + + // @PatchMapping("/score/{rootNodeId}") + // public SuccessResponse getScore(@PathVariable("rootNodeId") Long id, @RequestBody SchemaVisualizerDto uiDto) { + // return SuccessResponse.statusOk(tableService.computeScores(uiDto, id)); + // } + + // @PatchMapping("/validate/{rootNodeId}") + // public SuccessResponse validateSchema(@PathVariable("rootNodeId") Long id, @RequestBody SchemaVisualizerDto uiDto) { + // return SuccessResponse.statusOk(tableService.validateSchema(uiDto, id)); + // } + + @GetMapping("/field/data-type") + public SuccessResponse fieldDataTypes(@RequestParam(name = "repoType", required = false) RepoType repoType, + @RequestParam(name = "tableId", required = false) Long tableId) { + if (Objects.nonNull(tableId)) { + return SuccessResponse.statusOk(tableService.getFieldDataTypes(tableId)); + } + return SuccessResponse.statusOk(tableService.fieldDataTypes(repoType)); + } + + @GetMapping("/filter-options") + public SuccessResponse getTableFilterOptions() { + return SuccessResponse.statusOk(tableService.getTableFilterOptions()); + } + + @PostMapping("/upload-csv") + public SuccessResponse uploadCsvFile(@RequestParam("files") List multipartFiles) throws IOException { + return SuccessResponse.statusOk(tableService.uploadCsvToGit(multipartFiles)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/FieldDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/FieldDto.java new file mode 100644 index 0000000..2538437 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/FieldDto.java @@ -0,0 +1,100 @@ +package com.opsbeach.connect.schemata.dto; + +import java.util.List; + +import org.springframework.util.ObjectUtils; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.opsbeach.connect.schemata.entity.Field; + +import lombok.Builder; +import lombok.Getter; + +@Getter +@Builder +@JsonInclude(Include.NON_NULL) +public class FieldDto { + + private Long id; + + private int rowNumber; + + @Builder.Default + private Boolean isDeleted = Boolean.FALSE; + + @Builder.Default + private Boolean isUserChanged = Boolean.FALSE; + + private Long prId; + + private String name; + + private String schema; + + private String dataType; + + private Boolean isPrimitiveType; + + private String description; + + private String defaultValue; + + private Boolean isPii; + + private Boolean isClassified; + + private Boolean deprecated; + + private Boolean isPrimaryKey; + + private String[] symbols; // for enum values + + private String items; // for array field (ex: Array) + + private Integer size; // for fixed field + + private String values; // for Map field (ex: Map) The key for an Avro map must be a string + + // this field get added while items (or) values field contains ARRAY + private FieldDto arrayField; // for nested array field (ex: Array> or Map>) + + // this field get added while items (or) values field contains MAP + private FieldDto mapField; // for nested map field (ex: Map> or Array>) + + private List unionTypes; + // union field may have more than one filed (ex: [null, boolean, double, record]) these all in one type is union. + + private TableDto contain; + + // This Field is refered to field of another table. (i.e. may or may not be foreign key). + private FieldDto referenceField; + + public Field toDomin(FieldDto fieldDto) { + return Field.builder().id(fieldDto.getId()) + .rowNumber(fieldDto.getRowNumber()) + .isDeleted(fieldDto.getIsDeleted()) + .isUserChanged(fieldDto.getIsUserChanged()) + .prId(fieldDto.getPrId()) + .name(fieldDto.getName()) + .isPii(fieldDto.getIsPii()) + .schema(fieldDto.getSchema()) + .dataType(fieldDto.getDataType()) + .isPrimitiveType(fieldDto.isPrimitiveType) + .description(fieldDto.getDescription()) + .defaultValue(fieldDto.getDefaultValue()) + .isClassified(fieldDto.getIsClassified()) + .deprecated(fieldDto.getDeprecated()) + .isPrimaryKey(fieldDto.getIsPrimaryKey()) + .symbols(fieldDto.getSymbols()) + .items(fieldDto.getItems()) + .size(fieldDto.getSize()) + .values(fieldDto.getValues()) + .arrayField(ObjectUtils.isEmpty(fieldDto.getArrayField()) ? null : fieldDto.getArrayField().toDomin(fieldDto.getArrayField())) + .mapField(ObjectUtils.isEmpty(fieldDto.getMapField()) ? null : fieldDto.getMapField().toDomin(fieldDto.getMapField())) + .unionTypes(ObjectUtils.isEmpty(fieldDto.getUnionTypes()) ? List.of() : fieldDto.getUnionTypes().stream().map(fieldDto.getUnionTypes().get(0)::toDomin).toList()) + .contain(ObjectUtils.isEmpty(fieldDto.getContain()) ? null : fieldDto.getContain().toDomain(fieldDto.getContain())) + .referenceField(ObjectUtils.isEmpty(fieldDto.getReferenceField()) ? null : fieldDto.getReferenceField().toDomin(fieldDto.getReferenceField())) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/RedshiftDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/RedshiftDto.java new file mode 100644 index 0000000..479cf75 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/RedshiftDto.java @@ -0,0 +1,109 @@ +package com.opsbeach.connect.schemata.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@Builder +@NoArgsConstructor +@AllArgsConstructor +@JsonInclude(Include.NON_NULL) +public class RedshiftDto { + private Object tableCatalog; + private Object tableSchema; + private Object tableName; + private Object columnName; + private Object ordinalPosition; + private Object columnDefault; + private Object isNullable; + private Object dataType; + private Object characterMaximumLength; + private Object characterOctetLength; + private Object numericPrecision; + private Object numericPrecisionRadix; + private Object numericScale; + private Object datetimePrecision; + private Object intervalLype; + private Object intervalPrecision; + private Object characterSetCatalog; + private Object characterSetSchema; + private Object characterSetName; + private Object collationCatalog; + private Object collationSchema; + private Object collationName; + private Object domainCatalog; + private Object domainSchema; + private Object domainName; + private Object udtCatalog; + private Object udtSchema; + private Object udtName; + private Object scopeCatalog; + private Object scopeSchema; + private Object scopeName; + private Object maximumCardinality; + private Object dtdIdentifier; + private Object isSelfReferencing; + private Object isIdentity; + private Object identityGeneration; + private Object identityStart; + private Object identityIncrement; + private Object identityMaximum; + private Object identityMinimum; + private Object identityCycle; + private Object isGenerated; + private Object generationExpression; + private Object isUpdatable; + + public RedshiftDto toDto(Object[] a) { + return RedshiftDto.builder().tableCatalog(a[0]) + .tableSchema(a[1]) + .tableName(a[2]) + .columnName(a[3]) + .ordinalPosition(a[4]) + .columnDefault(a[5]) + .isNullable(a[6]) + .dataType(a[7]) + .characterMaximumLength(a[8]) + .characterOctetLength(a[9]) + .numericPrecision(a[10]) + .numericPrecisionRadix(a[11]) + .numericScale(a[12]) + .datetimePrecision(a[13]) + .intervalLype(a[14]) + .intervalPrecision(a[15]) + .characterSetCatalog(a[16]) + .characterSetSchema(a[17]) + .characterSetName(a[18]) + .collationCatalog(a[19]) + .collationSchema(a[20]) + .collationName(a[21]) + .domainCatalog(a[22]) + .domainSchema(a[23]) + .domainName(a[24]) + .udtCatalog(a[25]) + .udtSchema(a[26]) + .udtName(a[27]) + .scopeCatalog(a[28]) + .scopeSchema(a[29]) + .scopeName(a[30]) + .maximumCardinality(a[31]) + .dtdIdentifier(a[32]) + .isSelfReferencing(a[33]) + .isIdentity(a[34]) + .identityGeneration(a[35]) + .identityStart(a[36]) + .identityIncrement(a[37]) + .identityMaximum(a[38]) + .identityMinimum(a[39]) + .identityCycle(a[40]) + .isGenerated(a[41]) + .generationExpression(a[42]) + .isUpdatable(a[43]) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaValidationDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaValidationDto.java new file mode 100644 index 0000000..caa2b22 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaValidationDto.java @@ -0,0 +1,17 @@ +package com.opsbeach.connect.schemata.dto; + +import java.util.List; +import java.util.Map; + +import lombok.Builder; +import lombok.Getter; + +@Builder +@Getter +public class SchemaValidationDto { + + Boolean status; + List errorMessages; + String changes; + Map>> errorMap; +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaVisualizerDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaVisualizerDto.java new file mode 100644 index 0000000..ae9ae23 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/SchemaVisualizerDto.java @@ -0,0 +1,32 @@ +package com.opsbeach.connect.schemata.dto; + +import java.util.List; +import java.util.Map; + +import lombok.Builder; +import lombok.Getter; + +@Getter +@Builder +public class SchemaVisualizerDto { + + private List tables; + + private List fields; + + private List> links; + + // this enum is used for parsing purpose of uidto + public enum Purpose { + VALIDATE(0), SUBMIT(1); + + private final int key; + + Purpose(int key) { + this.key = key; + } + public int getKey() { + return this.key; + } + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableCsvDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableCsvDto.java new file mode 100644 index 0000000..c6eb7aa --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableCsvDto.java @@ -0,0 +1,28 @@ +package com.opsbeach.connect.schemata.dto; + +import com.fasterxml.jackson.databind.PropertyNamingStrategies; +import com.fasterxml.jackson.databind.annotation.JsonNaming; + +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Getter +@NoArgsConstructor +@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy.class) +public class TableCsvDto { + private String tableName; + private String tableNamespace; + private String tableDescription; + private String owner; + private String domain; + private String columnName; + private String dataType; + private String columnDescription; + private Boolean isPii; + private Boolean isClassified; + + public static String[] getCsvHeaders() { + String[] arr = {"table_namespace", "table_name", "table_description", "owner", "domain", "column_name", "data_type", "column_description", "is_pii", "is_classified"}; + return arr; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableDto.java new file mode 100644 index 0000000..85d8d41 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableDto.java @@ -0,0 +1,123 @@ +package com.opsbeach.connect.schemata.dto; + +import java.util.List; +import java.util.Objects; + +import org.springframework.util.ObjectUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.EventType; +import com.opsbeach.connect.schemata.enums.ModelType; +import com.opsbeach.connect.schemata.enums.SchemaType; + +import lombok.Builder; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Builder +@JsonInclude(Include.NON_NULL) +public class TableDto { + + @Setter + private Long id; + + private String name; + + private Long clientId; + + @Setter + private Long clientRepoId; + + @Setter + @Builder.Default + private Boolean isDeleted = Boolean.FALSE; + + @Setter + @Builder.Default + private Boolean isUserChanged = Boolean.FALSE; + + private Long prId; + + private String nameSpace; + + private String type; + + private String description; //editable + + private String owner; + + private String domain; //editable + + private String email; //editable + + private String complianceOwner; //editable + + private String channel; //editable + + private String[] subscribers; //editable + + private String qualityRuleBase; + + private String qualityRuleSql; + + private String qualityRuleCel; + + @Builder.Default + private String status = "Active"; //editable + + @Builder.Default + private SchemaType schemaType = SchemaType.UNKNOWN; + + @JsonIgnore + @Builder.Default + private EventType eventType = EventType.NONE; + + @JsonIgnore + @Builder.Default + private ModelType modelType = ModelType.NONE; + + private String[] requiredFields; // Json Schema Prop + + private List fields; + + private TableDto modifiedTable; + + @Setter + private List dataTypes; /// this field to send datatypes for this table. + + @Setter + @Builder.Default + private Boolean isFieldChanged = Boolean.FALSE; + + public Table toDomain(TableDto tableDto) { + return Table.builder().id(tableDto.getId()) + .name(tableDto.getName()) + .isDeleted(tableDto.getIsDeleted()) + .isUserChanged(tableDto.getIsUserChanged()) + .prId(tableDto.getPrId()) + .nameSpace(tableDto.getNameSpace()) + .type(tableDto.getType()) + .description(tableDto.getDescription()) + .channel(tableDto.getChannel()) + .subscribers(tableDto.getSubscribers()) + .owner(tableDto.getOwner()) + .domain(tableDto.getDomain()) + .status(tableDto.getStatus()) + .schemaType(tableDto.getSchemaType()) + .eventType(tableDto.getEventType()) + .modelType(tableDto.getModelType()) + .qualityRuleBase(tableDto.getQualityRuleBase()) + .qualityRuleSql(tableDto.getQualityRuleSql()) + .qualityRuleCel(tableDto.getQualityRuleCel()) + .email(tableDto.getEmail()) + .complianceOwner(tableDto.getComplianceOwner()) + .requiredFields(tableDto.getRequiredFields()) + .modifiedTable(Objects.nonNull(tableDto.getModifiedTable()) ? toDomain(tableDto.getModifiedTable()) : null) + .fields(ObjectUtils.isEmpty(tableDto.getFields()) ? List.of() : tableDto.getFields().stream().map(tableDto.getFields().get(0)::toDomin).toList()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableFilterOptionsDto.java b/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableFilterOptionsDto.java new file mode 100644 index 0000000..e0bcbfb --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/dto/TableFilterOptionsDto.java @@ -0,0 +1,8 @@ +package com.opsbeach.connect.schemata.dto; + +import java.util.List; +import java.util.Set; + +public record TableFilterOptionsDto(List owners, List domains, Set subscribers) { + +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/entity/DomainNode.java b/connect/src/main/java/com/opsbeach/connect/schemata/entity/DomainNode.java new file mode 100644 index 0000000..b3e7510 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/entity/DomainNode.java @@ -0,0 +1,38 @@ +package com.opsbeach.connect.schemata.entity; + +import java.util.List; + +import org.springframework.data.neo4j.core.schema.GeneratedValue; +import org.springframework.data.neo4j.core.schema.Id; +import org.springframework.data.neo4j.core.schema.Node; +import org.springframework.data.neo4j.core.schema.Relationship; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Node +@Getter +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class DomainNode { + + @Id @GeneratedValue + private Long id; + + private Long clientId; + + private String name; + + private Long clientRepoId; + + @Setter + @JsonIgnore + @Relationship(value = "CONTAIN") + private List
tables; +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/entity/Field.java b/connect/src/main/java/com/opsbeach/connect/schemata/entity/Field.java new file mode 100644 index 0000000..ffd9a12 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/entity/Field.java @@ -0,0 +1,179 @@ +package com.opsbeach.connect.schemata.entity; + +import java.util.List; + +import org.springframework.data.neo4j.core.schema.GeneratedValue; +import org.springframework.data.neo4j.core.schema.Id; +import org.springframework.data.neo4j.core.schema.Node; +import org.springframework.data.neo4j.core.schema.Relationship; +import org.springframework.util.ObjectUtils; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.opsbeach.connect.schemata.dto.FieldDto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Node +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +@JsonInclude(Include.NON_NULL) +public class Field { + + @Id @GeneratedValue + private Long id; + + private int rowNumber; + + private String jsonSchemaRefId; + + @Builder.Default + private Boolean isDeleted = Boolean.FALSE; + + @Builder.Default + private Boolean isUserChanged = Boolean.FALSE; + + private Long prId; // primary key of pull_request table postgres. + + private String name; + + private String schema; + + private String dataType; + + private Boolean isPrimitiveType; + + private String description; //editable + + private String defaultValue; + + @Builder.Default + private Boolean isPii = Boolean.FALSE; //editable + + @Builder.Default + private Boolean isClassified = Boolean.FALSE; //editable + + @Builder.Default + private Boolean deprecated = Boolean.FALSE; + + private Boolean isPrimaryKey; + + private String enumFilePath; + + private String enumName; + + private String enumPackage; + + private String[] symbols; // for enum values + + private String items; // for array field (ex: Array) + + private Integer size; // for fixed field + + private String values; // for Map field (ex: Map) The key for an Avro map must be a string + + @Setter + @Relationship(value = "ARRAY_FIELD") // this field get added while items (or) values field contains ARRAY + private Field arrayField; // for nested array field (ex: Array> or Map>) + + @Setter + @Relationship(value = "MAP_FIELD") // this field get added while items (or) values field contains MAP + private Field mapField; // for nested map field (ex: Map> or Array>) + + @Setter + @Relationship(value = "UNION_TYPE") + private List unionTypes; + // union field may have more than one filed (ex: [null, boolean, double, record]) these all in one type is union. + + @Setter + @Relationship(value = "CONTAIN") + private Table contain; + + @Setter + @Relationship(value = "REFERENCE_FIELD") // This Field is refered to field of another table. (i.e. may or may not be foreign key). + private Field referenceField; + + public FieldDto toDto(Field field) { + return FieldDto.builder().id(field.getId()) + .rowNumber(field.getRowNumber()) + .isDeleted(field.getIsDeleted()) + .isUserChanged(field.getIsUserChanged()) + .prId(field.getPrId()) + .name(field.getName()) + .isPii(field.getIsPii()) + .schema(field.getSchema()) + .dataType(field.getDataType()) + .isPrimitiveType(field.isPrimitiveType) + .description(field.getDescription()) + .defaultValue(field.getDefaultValue()) + .isClassified(field.getIsClassified()) + .deprecated(field.getDeprecated()) + .isPrimaryKey(field.getIsPrimaryKey()) + .symbols(field.getSymbols()) + .items(field.getItems()) + .size(field.getSize()) + .values(field.getValues()) + .arrayField(ObjectUtils.isEmpty(field.getArrayField()) ? null : field.getArrayField().toDto(field.getArrayField())) + .mapField(ObjectUtils.isEmpty(field.getMapField()) ? null : field.getMapField().toDto(field.getMapField())) + .unionTypes(ObjectUtils.isEmpty(field.getUnionTypes()) ? List.of() : field.getUnionTypes().stream().map(field.getUnionTypes().get(0)::toDto).toList()) + .contain(ObjectUtils.isEmpty(field.getContain()) ? null : field.getContain().toDto(field.getContain())) + .referenceField(ObjectUtils.isEmpty(field.getReferenceField()) ? null : field.getReferenceField().toDto(field.getReferenceField())) + .build(); + } + + public static boolean isPrimitiveType(String dataType) { + var primitives = List.of("string", "bytes", "int", "long", "float", "double", "boolean", "null", "number", "boolean", "integer"); + return primitives.contains(dataType); + } + public static class Prop { + + public static final String DATA_TYPE = "dataType"; + + // AVRO + public static final String DESC = "desc"; + public static final String DESCRIPTION = "description"; + public static final String COMMENT = "comment"; + public static final String SEE_ALSO = "see_also"; + public static final String REFERENCE = "reference"; + public static final String IS_PII = "is_pii"; + public static final String DEPRECATED = "deprecated"; + public static final String IS_CLASSIFIED = "is_classified"; + public static final String IS_PRIMARY_KEY = "is_primary_key"; + public static final String PRODUCT_TYPE = "product_type"; + public static final String LINK = "link"; + public static final String DEPENDS = "depends"; + public static final String MODEL = "model"; + public static final String COLUMN = "column"; + public static final String DEFAULT = "default"; + public static final String CLASSIFICATION_LEVEL = "classification_level"; + + // JSON + public static final String TYPE = "type"; + public static final String PROPERTIES = "properties"; + public static final String OBJECT = "object"; + public static final String ITEMS = "items"; + public static final String ARRAY = "array"; + public static final String ENUM = "enum"; + public static final String STRING = "string"; + public static final String INTEGER = "integer"; + public static final String MIN_ITEMS = "minItems"; + public static final String MIN_LENGTH = "minLength"; + public static final String MAX_LENGTH = "maxLength"; + public static final String PATTERN = "pattern"; + public static final String MINIMUM = "minimum"; + public static final String MAXIMUM = "maximum"; + public static final String ANY_OF = "anyOf"; + public static final String ONE_OF = "oneOf"; + public static final String $REF = "$ref"; + public static final String MULTIPLE_OF = "multipleOf"; + public static final String EXCLUSIVE_MINIMUM = "exclusiveMinimum"; + public static final String EXCLUSIVE_MAXIMUM = "exclusiveMaximum"; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/entity/Organization.java b/connect/src/main/java/com/opsbeach/connect/schemata/entity/Organization.java new file mode 100644 index 0000000..a7e7b7d --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/entity/Organization.java @@ -0,0 +1,33 @@ +package com.opsbeach.connect.schemata.entity; + +import java.util.List; + +import org.springframework.data.neo4j.core.schema.GeneratedValue; +import org.springframework.data.neo4j.core.schema.Id; +import org.springframework.data.neo4j.core.schema.Node; +import org.springframework.data.neo4j.core.schema.Relationship; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Node +@Getter +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class Organization { + + @Id @GeneratedValue + private Long id; + + private String name; + + private Long clinetId; + + @Setter + @Relationship(value = "HAS") + private List domains; +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/entity/Table.java b/connect/src/main/java/com/opsbeach/connect/schemata/entity/Table.java new file mode 100644 index 0000000..f0c2292 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/entity/Table.java @@ -0,0 +1,167 @@ +package com.opsbeach.connect.schemata.entity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.opsbeach.connect.schemata.dto.TableDto; +import com.opsbeach.connect.schemata.enums.EventType; +import com.opsbeach.connect.schemata.enums.ModelType; +import com.opsbeach.connect.schemata.enums.SchemaType; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import org.springframework.data.neo4j.core.schema.GeneratedValue; +import org.springframework.data.neo4j.core.schema.Id; +import org.springframework.data.neo4j.core.schema.Node; +import org.springframework.data.neo4j.core.schema.Relationship; +import org.springframework.util.ObjectUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +@Node +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +@JsonInclude(Include.NON_NULL) +public class Table { + + @Id + @GeneratedValue + private Long id; + + private Long clientId; + + private String jsonSchemaId; + + @Builder.Default + private Boolean isDeleted = Boolean.FALSE; + + @Builder.Default + private Boolean isUserChanged = Boolean.FALSE; + + private Long prId; // primary key of pull_request table postgres. + + private String name; + + private String nameSpace; + + private String type; + + private String description; //editable + + private String owner; + + private String domain; //editable + + private String email; //editable + + private String complianceOwner; //editable + + private String channel; //editable + + private String[] subscribers; //editable + + private String qualityRuleBase; + + private String qualityRuleSql; + + private String qualityRuleCel; + + @Builder.Default + private String status = "Active"; //editable + + @Builder.Default + @Enumerated(EnumType.STRING) + private SchemaType schemaType = SchemaType.UNKNOWN; + + @Builder.Default + @Enumerated(EnumType.STRING) + private EventType eventType = EventType.NONE; + + @Builder.Default + @Enumerated(EnumType.STRING) + private ModelType modelType = ModelType.NONE; + + private String[] requiredFields; // Json Schema Prop + + @Builder.Default + @Setter + @Relationship(value = "PROPERTIES") + private List fields = new ArrayList<>(); + + @Relationship(value = "MODIFIED_TO") + private Table modifiedTable; + + public TableDto toDto(Table table) { + return TableDto.builder().id(table.getId()) + .name(table.getName()) + .isDeleted(table.getIsDeleted()) + .isUserChanged(table.getIsUserChanged()) + .prId(table.getPrId()) + .nameSpace(table.getNameSpace()) + .type(table.getType()) + .description(table.getDescription()) + .channel(table.getChannel()) + .subscribers(table.getSubscribers()) + .owner(table.getOwner()) + .qualityRuleBase(table.getQualityRuleBase()) + .qualityRuleSql(table.getQualityRuleSql()) + .qualityRuleCel(table.getQualityRuleCel()) + .domain(table.getDomain()) + .status(table.getStatus()) + .schemaType(table.getSchemaType()) + .eventType(table.getEventType()) + .modelType(table.getModelType()) + .email(table.getEmail()) + .complianceOwner(table.getComplianceOwner()) + .requiredFields(table.getRequiredFields()) + .modifiedTable(Objects.nonNull(table.getModifiedTable()) ? toDto(table.getModifiedTable()) : null) + .fields(ObjectUtils.isEmpty(table.getFields()) ? List.of() : table.getFields().stream().map(table.getFields().get(0)::toDto).toList()) + .build(); + } + + public static final class Prop { + + // AVRO + public static final String DESC = "desc"; + public static final String DESCRIPTION = "description"; + public static final String COMMENT = "comment"; + public static final String SEE_ALSO = "see_also"; + public static final String REFERENCE = "reference"; + public static final String OWNER = "owner"; + public static final String DOMAIN = "domain"; + public static final String STATUS = "status"; + public static final String SCHEMA_TYPE = "schema_type"; + public static final String EVENT_TYPE = "event_type"; + public static final String MODEL_TYPE = "model_type"; + public static final String EMAIL = "email"; + public static final String TEAM_CHANNEL = "team_channel"; + public static final String ALERT_CHANNEL = "alert_channel"; + public static final String COMPLIANCE_OWNER = "compliance_owner"; + public static final String COMPLIANCE_CHANNEL = "compliance_channel"; + public static final String SUBSCRIBERS = "subscribers"; + public static final String CHANNEL = "channel"; + public static final String QUALITY_RULE_BASE = "quality_rule_base"; + public static final String QUALITY_RULE_SQL = "quality_rule_sql"; + public static final String QUALITY_RULE_CEL = "quality_rule_cel"; + + // JSON + public static final String PROPERTIES = "properties"; + public static final String JSON_SCHEMA_ID = "$id"; + public static final String DEFENITIONS = "definitions"; + public static final String TYPE = "type"; + public static final String TITLE = "title"; + public static final String $SCHEMA = "$schema"; + public static final String REQUIRED = "required"; + public static final String ADDITIONAL_PROPERTIES = "additionalProperties"; + public static final String MIN_PROPERTIES = "minProperties"; + public static final String MAX_PROPERTIES = "maxProperties"; + } +} diff --git a/src/main/java/org/schemata/domain/EventType.java b/connect/src/main/java/com/opsbeach/connect/schemata/enums/EventType.java similarity index 86% rename from src/main/java/org/schemata/domain/EventType.java rename to connect/src/main/java/com/opsbeach/connect/schemata/enums/EventType.java index f7dd352..28a302b 100644 --- a/src/main/java/org/schemata/domain/EventType.java +++ b/connect/src/main/java/com/opsbeach/connect/schemata/enums/EventType.java @@ -1,4 +1,4 @@ -package org.schemata.domain; +package com.opsbeach.connect.schemata.enums; import java.util.Arrays; diff --git a/src/main/java/org/schemata/domain/ModelType.java b/connect/src/main/java/com/opsbeach/connect/schemata/enums/ModelType.java similarity index 85% rename from src/main/java/org/schemata/domain/ModelType.java rename to connect/src/main/java/com/opsbeach/connect/schemata/enums/ModelType.java index f370edc..d3db0ae 100644 --- a/src/main/java/org/schemata/domain/ModelType.java +++ b/connect/src/main/java/com/opsbeach/connect/schemata/enums/ModelType.java @@ -1,4 +1,4 @@ -package org.schemata.domain; +package com.opsbeach.connect.schemata.enums; import java.util.Arrays; diff --git a/src/main/java/org/schemata/domain/SchemaType.java b/connect/src/main/java/com/opsbeach/connect/schemata/enums/SchemaType.java similarity index 86% rename from src/main/java/org/schemata/domain/SchemaType.java rename to connect/src/main/java/com/opsbeach/connect/schemata/enums/SchemaType.java index 7d1b949..882307c 100644 --- a/src/main/java/org/schemata/domain/SchemaType.java +++ b/connect/src/main/java/com/opsbeach/connect/schemata/enums/SchemaType.java @@ -1,4 +1,4 @@ -package org.schemata.domain; +package com.opsbeach.connect.schemata.enums; import java.util.Arrays; diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/graph/SchemaGraph.java b/connect/src/main/java/com/opsbeach/connect/schemata/graph/SchemaGraph.java new file mode 100644 index 0000000..303901c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/graph/SchemaGraph.java @@ -0,0 +1,244 @@ +package com.opsbeach.connect.schemata.graph; + +import java.math.BigDecimal; +import java.math.MathContext; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.avro.Schema; +import org.apache.commons.collections4.SetUtils; +import org.apache.commons.lang3.StringUtils; +import org.jgrapht.alg.scoring.PageRank; +import org.jgrapht.graph.DirectedWeightedMultigraph; +import org.jgrapht.util.SupplierUtil; + +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.sharedlib.exception.SchemaNotFoundException; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public final class SchemaGraph { + + private final DirectedWeightedMultigraph graph = + new DirectedWeightedMultigraph<>(SupplierUtil.createSupplier(Table.class), + SupplierUtil.createSupplier(WeightedSchemaEdge.class)); + + private final List
schemaList; + Map schemaMap; + private final PageRank pageRank; + + static final Map PRIMITIVES = new HashMap<>(); + + static { + PRIMITIVES.put("string", Schema.Type.STRING); + PRIMITIVES.put("bytes", Schema.Type.BYTES); + PRIMITIVES.put("int", Schema.Type.INT); + PRIMITIVES.put("long", Schema.Type.LONG); + PRIMITIVES.put("float", Schema.Type.FLOAT); + PRIMITIVES.put("double", Schema.Type.DOUBLE); + PRIMITIVES.put("boolean", Schema.Type.BOOLEAN); + PRIMITIVES.put("null", Schema.Type.NULL); + } + + public SchemaGraph(List
schemaList) { + this.schemaList = schemaList; + this.schemaMap = buildGraph(); + this.buildEdge(); + pageRank = new PageRank<>(graph); + } + + private Map buildGraph() { + Map map = new HashMap<>(); + for (Table schema : schemaList) { + var fullName = Objects.isNull(schema.getNameSpace()) ? schema.getName() + : StringUtil.constructStringEmptySeparator(schema.getNameSpace(),".",schema.getName()); + map.put(fullName, schema); + this.addVertex(schema); + } + return map; + } + + private void buildEdge() + throws SchemaNotFoundException { + for (Table schema : this.schemaList) { + log.info("processing schema - "+schema.getName()); + buildEdge(schema.getFields(), schema); + } + } + + private void buildEdge(List fields, Table schema) { + for (Field field : fields) { + log.info("processing field - "+field.getName()+" of schema - "+schema.getName()); + if (Boolean.FALSE.equals(field.getIsPrimitiveType()) && Boolean.FALSE.equals(field.getIsDeleted())) { + var dataType = findDataType(field, schema); + if (Boolean.FALSE.equals(isPrimitiveType(dataType))) { + findVertex(dataType).ifPresentOrElse( + value -> this.addEdge(new WeightedSchemaEdge(schema, value, field)), () -> { + throw new SchemaNotFoundException("DataType " + dataType + " Not found in the graph"); + }); + } + } + } + } + + // check the DATA TYPE belongs to which COMPLEX TYPE + private String findDataType(Field field, Table schema) { + var dataType = field.getDataType(); + if (dataType.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + dataType = findNestedDataType(field, schema, field.getItems()); + } + else if (dataType.equalsIgnoreCase(Schema.Type.ENUM.name())) { dataType = Schema.Type.STRING.name(); } + else if (dataType.equalsIgnoreCase(Schema.Type.MAP.name())) { + dataType = findNestedDataType(field, schema, field.getValues()); + } + else if (dataType.equalsIgnoreCase(Schema.Type.UNION.name())) { + // In UNION we may have MORE THAN ONE complex dataTypes so the need to build edge to these types. + buildEdge(field.getUnionTypes(), schema); + dataType = Schema.Type.STRING.name(); + } + else if (dataType.equalsIgnoreCase(Schema.Type.FIXED.name())) { dataType = Schema.Type.STRING.name(); } + return dataType; + } + + private String findNestedDataType(Field field, Table schema, String dataType) { + // check nested field is ARRAY + if (dataType.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + dataType = findDataType(field.getArrayField(), schema); + } + // check nested field is MAP + else if (dataType.equalsIgnoreCase(Schema.Type.MAP.name())) { + dataType = findDataType(field.getMapField(), schema); + } + // check nested field is UNION + else if (dataType.equalsIgnoreCase(Schema.Type.UNION.name())) { + buildEdge(field.getUnionTypes(), schema); + dataType = Schema.Type.STRING.name(); + } + else if (dataType.equalsIgnoreCase(Schema.Type.FIXED.name())) { dataType = Schema.Type.STRING.name(); } + else if (dataType.equalsIgnoreCase(Schema.Type.ENUM.name())) { dataType = Schema.Type.STRING.name(); } + return dataType; + } + + private boolean isPrimitiveType(String name) { + return PRIMITIVES.containsKey(name.toLowerCase()); + } + + private void addVertex(Table schema) { + graph.addVertex(schema); + } + + private void addEdge(WeightedSchemaEdge edge) { + if (edge == null) { + throw new IllegalArgumentException("Edge can't be null"); + } + try { + graph.addEdge(edge.getSource(), edge.getTarget(), edge); + } catch (Exception e) { + log.info("Source({}) and Destination({}) are same {}", edge.getSource().getName(), edge.getTarget().getName(), e.getMessage()); + } + } + + public Set incomingEdgesOf(String vertex) + throws SchemaNotFoundException { + return graph.incomingEdgesOf(getSchema(vertex)); + } + + public Set
incomingVertexOf(String vertex) { + Set
incomingSchemaSet = new HashSet<>(); + incomingEdgesOf(vertex).forEach(e -> incomingSchemaSet.add(e.getSource())); + return incomingSchemaSet; + } + + public Set outgoingEdgesOf(String vertex) + throws SchemaNotFoundException { + return graph.outgoingEdgesOf(getSchema(vertex)); + } + + public Set
outgoingVertexOf(String vertex) { + Set
outgoingSchemaSet = new HashSet<>(); + outgoingEdgesOf(vertex).forEach(e -> outgoingSchemaSet.add(e.getTarget())); + return outgoingSchemaSet; + } + + public Set
outgoingEntityVertexOf(String vertex) { + return outgoingVertexOf(vertex).stream().filter(f -> "ENTITY".equalsIgnoreCase(f.getSchemaType().name())) + .collect(Collectors.toSet()); + } + + public Set
getAllEntityVertex() { + return graph.vertexSet().stream().filter(f -> "ENTITY".equalsIgnoreCase(f.getSchemaType().name())).collect(Collectors.toSet()); + } + + public Double getVertexPageRankScore(String vertex) { + return pageRank.getVertexScore(getSchema(vertex)); + } + + public Double getSchemataScore(String vertex) { + var schema = getSchema(vertex); + double score = switch (schema.getSchemaType().name().toUpperCase()) { + case "ENTITY" -> computeEntityScore(vertex); + case "EVENT" -> computeEventScore(vertex, schema.getEventType().name()); + default -> 0.0; + }; + return roundUp(score); + } + + private double computeEntityScore(String vertex) { + double totalEdges = graph.edgeSet().size(); + double referenceEdges = referenceEdges(vertex).size(); + return totalEdges == 0 ? 0 : 1 - ((totalEdges - referenceEdges) / totalEdges); + } + + public Set referenceEdges(String vertex) { + return SetUtils.union(incomingEdgesOf(vertex), outgoingEdgesOf(vertex)); + } + + private double computeEventScore(String vertex, String eventType) { + double score = switch (eventType) { + case "LIFECYCLE" -> outgoingEntityVertexOf(vertex).size() > 0 ? 1.0 : 0.0; + case "ACTIVITY", "AGGREGATED" -> computeNonLifecycleScore(vertex); + default -> 0.0; + }; + return score; + } + + private double computeNonLifecycleScore(String vertex) { + Set
referenceVertex = + outgoingEntityVertexOf(vertex).stream().map(v -> outgoingEntityVertexOf(v.getNameSpace()+"."+v.getName())).flatMap(Collection::stream) + .collect(Collectors.toSet()); + Set
outgoingVertex = outgoingEntityVertexOf(vertex); + double vertexCount = SetUtils.union(referenceVertex, outgoingVertex).size(); + double totalVertex = getAllEntityVertex().size(); + return 1 - ((totalVertex - vertexCount) / totalVertex); + } + + public Table getSchema(String vertex) + throws SchemaNotFoundException { + return findVertex(vertex).orElseThrow( + () -> new SchemaNotFoundException("Vertex " + vertex + " Not found in the graph")); + } + + public Optional
findVertex(String vertex) { + if (StringUtils.isBlank(vertex)) { + return Optional.empty(); + } + if (this.schemaMap.containsKey(vertex)) { + return Optional.of(this.schemaMap.get(vertex)); + } + return Optional.empty(); + } + + private double roundUp(double value) { + return new BigDecimal(value, new MathContext(3)).doubleValue(); + } +} diff --git a/src/main/java/org/schemata/graph/WeightedSchemaEdge.java b/connect/src/main/java/com/opsbeach/connect/schemata/graph/WeightedSchemaEdge.java similarity index 71% rename from src/main/java/org/schemata/graph/WeightedSchemaEdge.java rename to connect/src/main/java/com/opsbeach/connect/schemata/graph/WeightedSchemaEdge.java index 157fe2c..1c622b1 100644 --- a/src/main/java/org/schemata/graph/WeightedSchemaEdge.java +++ b/connect/src/main/java/com/opsbeach/connect/schemata/graph/WeightedSchemaEdge.java @@ -1,25 +1,26 @@ -package org.schemata.graph; +package com.opsbeach.connect.schemata.graph; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.jgrapht.graph.DefaultEdge; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; + +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; public class WeightedSchemaEdge extends DefaultEdge { private static final Double DEFAULT_WEIGHT = 1.0; - Schema source; - Schema target; + Table source; + Table target; Field edgeField; double weight; // Set default weight == 1 - public WeightedSchemaEdge(Schema source, Schema target, Field edgeField) { + public WeightedSchemaEdge(Table source, Table target, Field edgeField) { this(source, target, edgeField, DEFAULT_WEIGHT); } - public WeightedSchemaEdge(Schema source, Schema target, Field edgeField, double weight) { + public WeightedSchemaEdge(Table source, Table target, Field edgeField, double weight) { this.source = source; this.target = target; this.edgeField = edgeField; @@ -27,12 +28,12 @@ public WeightedSchemaEdge(Schema source, Schema target, Field edgeField, double } @Override - public Schema getSource() { + public Table getSource() { return source; } @Override - public Schema getTarget() { + public Table getTarget() { return target; } @@ -51,8 +52,8 @@ public String toString() { } public String summaryPrint() { - return "WeightedSchemaEdge{" + "source=" + source.name() + ", target=" + target.name() + ", edgeField=" - + edgeField.name() + ", weight=" + weight + '}'; + return "WeightedSchemaEdge{" + "source=" + source.getName() + ", target=" + target.getName() + ", edgeField=" + + edgeField.getName() + ", weight=" + weight + '}'; } @Override diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/SchemaFileProcessor.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/SchemaFileProcessor.java new file mode 100644 index 0000000..3737f0b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/SchemaFileProcessor.java @@ -0,0 +1,17 @@ +package com.opsbeach.connect.schemata.processor; + +import java.util.List; + +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.schemata.entity.Table; + +public interface SchemaFileProcessor { + + void parseFolder(String path, ClientRepo clientRepo); + + List
getTables(String path, Boolean toSave) throws Exception; + + List
getTables(byte[] path, Boolean toSave) throws Exception; + + String getFileContent(Table table); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchema.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchema.java new file mode 100644 index 0000000..2cccce8 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchema.java @@ -0,0 +1,133 @@ +package com.opsbeach.connect.schemata.processor.avro; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.avro.Schema; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.processor.SchemaFileProcessor; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.FileNotFoundException; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.FileUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Component +@RequiredArgsConstructor +public class AvroSchema implements SchemaFileProcessor { + + private final TableService tableService; + private final DomainService domainService; + private final DomainNodeService domainNodeService; + private final SchemaFileAuditService schemaFileAuditService; + private final ModelService modelService; + private final ResponseMessage responseMessage; + + @Value("${server.home-path}") + private String homePath; + + static final Map PRIMITIVES = new HashMap<>(); + + static { + PRIMITIVES.put("string", Schema.Type.STRING); + PRIMITIVES.put("bytes", Schema.Type.BYTES); + PRIMITIVES.put("int", Schema.Type.INT); + PRIMITIVES.put("long", Schema.Type.LONG); + PRIMITIVES.put("float", Schema.Type.FLOAT); + PRIMITIVES.put("double", Schema.Type.DOUBLE); + PRIMITIVES.put("boolean", Schema.Type.BOOLEAN); + PRIMITIVES.put("null", Schema.Type.NULL); + } + + @Override + public List
getTables(byte[] content, Boolean toSave) throws IOException { + var avroSchema = new Schema.Parser().parse(new ByteArrayInputStream(content)); + Map tableMap = new HashMap<>(); + var table = parseSchema(avroSchema, tableMap, toSave); + List
tables = new LinkedList<>(tableMap.values()); + tables.add(table); + return tables; + } + + @Override + public List
getTables(String path, Boolean toSave) throws IOException { + if (Boolean.FALSE.equals(path.endsWith("avsc"))) { + throw new InvalidDataException(ErrorCode.INVALID_FILE, responseMessage.getErrorMessage(ErrorCode.INVALID_FILE, path)); + } + var avroSchema = new Schema.Parser().parse(new File(path)); + Map tableMap = new HashMap<>(); // this map is to track schemas present in current file. + var table = parseSchema(avroSchema, tableMap, toSave); + if (toSave) { + table = tableService.addTable(table); + } + List
tables = new LinkedList<>(tableMap.values()); + tables.add(table); + return tables; + } + + private Table parseSchema(Schema schema, Map tableMap, Boolean bool) { + var avroSchemaParser = new AvroSchemaParser(tableService, PRIMITIVES); + return avroSchemaParser.parseSchema(schema, tableMap, bool); + } + + @Override + public String getFileContent(Table table) { + var avroSchemaGenerator = new AvroSchemaGenerator(PRIMITIVES); + return avroSchemaGenerator.generateTableSchema(table).toString(true); + } + + @Override + public void parseFolder(String folderPath, ClientRepo clientRepo) { + var filePaths = FileUtil.deepSearchFiles(folderPath, ".avsc"); + List models = new ArrayList<>(); + log.info("creating domain in neo4j"); + var domainNode = domainNodeService.addDomainNode(clientRepo.getFullName(), clientRepo.getClientId(), clientRepo.getId()); + var domain = domainService.addDomain(clientRepo, domainNode.getId()); + domainNode.setTables(new ArrayList<>()); + filePaths.forEach(filePath -> { + log.info("Working on fetching Models from file : "+filePath); + var tables = addModelsInGraphDB(filePath); + if (tables != null) { + var schemaFileAudit = schemaFileAuditService.createSchemaFileAuditWhileInitialLoading(filePath, clientRepo, tables.get(tables.size()-1).getId()); + models.addAll(modelService.createModels(tables, schemaFileAudit, domain)); + domainNode.getTables().addAll(tables); + } + }); + log.info("Fetching Models from Files is completed"); + domainNodeService.update(domainNode); + modelService.addAll(models); + } + + private List
addModelsInGraphDB(String filePath) { + var fileName = filePath.substring(filePath.lastIndexOf("/") + 1, filePath.lastIndexOf(".")); + var fileType = filePath.substring(filePath.lastIndexOf(".") + 1); + // create tables on neo4j + log.info(StringUtil.constructStringEmptySeparator("Started parsing file ",fileName,".",fileType)); + try { + return getTables(filePath, Boolean.TRUE); + } catch (Exception e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.FILE_NOT_FOUND, e.getMessage())); + } + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaGenerator.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaGenerator.java new file mode 100644 index 0000000..6cc521b --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaGenerator.java @@ -0,0 +1,172 @@ +package com.opsbeach.connect.schemata.processor.avro; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.apache.avro.Schema; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class AvroSchemaGenerator { + + private final Map PRIMITIVES; + + public AvroSchemaGenerator(Map primitives) { + this.PRIMITIVES = primitives; + } + + public Schema generateTableSchema(Table table) { + List fieldSchemas = new ArrayList<>(); + table.getFields().forEach(field -> { + Object defval = ObjectUtils.isEmpty(field.getDefaultValue()) ? null : parseDefaultValue(field.getDefaultValue(), field.getDataType()); + var fieldSchema = new Schema.Field(field.getName(), generateFieldSchema(field, field.getDataType()), field.getDescription(), defval); + fieldSchemas.add(addProps(fieldSchema, field)); + }); + var schema = Schema.createRecord(table.getName(), table.getDescription(), table.getNameSpace(), false, fieldSchemas); + return addProps(schema, table); + } + + private Schema addProps(Schema schema, Table table) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getOwner()))) schema.addProp(Table.Prop.OWNER, table.getOwner()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getSchemaType()))) schema.addProp(Table.Prop.SCHEMA_TYPE, table.getSchemaType().name()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getChannel()))) schema.addProp(Table.Prop.CHANNEL, table.getChannel()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getDomain()))) schema.addProp(Table.Prop.DOMAIN, table.getDomain()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getEmail()))) schema.addProp(Table.Prop.EMAIL, table.getEmail()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getStatus()))) schema.addProp(Table.Prop.STATUS, table.getStatus()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getComplianceOwner()))) schema.addProp(Table.Prop.COMPLIANCE_OWNER, table.getComplianceOwner()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getSubscribers()))) schema.addProp(Table.Prop.SUBSCRIBERS, Arrays.asList(table.getSubscribers())); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleBase()))) schema.addProp(Table.Prop.QUALITY_RULE_BASE, table.getQualityRuleBase()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleSql()))) schema.addProp(Table.Prop.QUALITY_RULE_SQL, table.getQualityRuleSql()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleCel()))) schema.addProp(Table.Prop.QUALITY_RULE_CEL, table.getQualityRuleCel()); + return schema; + } + + private Schema.Field addProps(Schema.Field fieldSchema, Field field) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getIsPii()))) fieldSchema.addProp(Field.Prop.IS_PII, field.getIsPii().toString()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getIsClassified()))) fieldSchema.addProp(Field.Prop.IS_CLASSIFIED, field.getIsClassified().toString()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getDeprecated()))) fieldSchema.addProp(Field.Prop.DEPRECATED, field.getDeprecated().toString()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getReference()))) fieldSchema.addProp(Field.Prop.REFERENCE, field.getReference()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getClassificationLevel()))) fieldSchema.addProp(Field.Prop.CLASSIFICATION_LEVEL, field.getClassificationLevel()); + return fieldSchema; + } + + private Schema generateFieldSchema(Field field, String dataType) { + log.info("Processing field --"+field.getName()+" of schema --"+field.getSchema()); + if (Boolean.TRUE.equals(field.getIsPrimitiveType())) { + return Schema.create(Schema.Type.valueOf(PRIMITIVES.get(dataType).name())); + } + else { + if (dataType.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + return generateArraySchema(field); + } + else if (dataType.equalsIgnoreCase(Schema.Type.MAP.name())) { + return generateMapSchema(field); + } + else if (dataType.equalsIgnoreCase(Schema.Type.ENUM.name())) { + return generateEnumSchema(field); + } + else if (dataType.equalsIgnoreCase(Schema.Type.UNION.name())) { + return Schema.createUnion(field.getUnionTypes().stream().map(type -> generateFieldSchema(type, type.getDataType())).toList()); + } + else if (dataType.equalsIgnoreCase(Schema.Type.FIXED.name())) { + return Schema.createFixed(field.getName(), field.getDescription(), field.getSchema(), field.getSize()); + } + else { + return generateTableSchema(field.getContain()); + } + } + } + + public boolean isPrimitiveType(String name) { + return PRIMITIVES.containsKey(name); + } + + public Schema generateEnumSchema(Field field) { + var defaultVal = ObjectUtils.isEmpty(field.getDefaultValue()) ? null : field.getDefaultValue(); + return Schema.createEnum(field.getName(), field.getDescription(), field.getSchema(), Arrays.asList(field.getSymbols()), defaultVal); + } + + private Schema generateArraySchema(Field field) { + var items = field.getItems(); + Schema itemSchema = null; // (itemSchema is same as "elementType" of array schema) + if (Boolean.TRUE.equals(isPrimitiveType(items))) { + itemSchema = Schema.create(Schema.Type.valueOf(PRIMITIVES.get(items).name())); + } + else { + if (items.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + itemSchema = generateArraySchema(field.getArrayField()); + } + else if (items.equalsIgnoreCase(Schema.Type.MAP.name())) { + itemSchema = generateMapSchema(field.getMapField()); + } + else if (items.equalsIgnoreCase(Schema.Type.ENUM.name())) { + itemSchema = generateEnumSchema(field); + } + else if (items.equalsIgnoreCase(Schema.Type.UNION.name())) { + itemSchema = Schema.createUnion(field.getUnionTypes().stream().map(type -> generateFieldSchema(type, type.getDataType())).toList()); + } + else if (items.equalsIgnoreCase(Schema.Type.FIXED.name())) { + itemSchema = Schema.createFixed(field.getName(), field.getDescription(), field.getSchema(), field.getSize()); + } + else { + itemSchema = generateTableSchema(field.getContain()); + } + } + return Schema.createArray(itemSchema); + } + + private Schema generateMapSchema(Field field) { + var values = field.getValues(); + Schema valueSchema = null; // (valueSchema is same as "valueType" of map schema) + if (Boolean.TRUE.equals(isPrimitiveType(values))) { + valueSchema = Schema.create(Schema.Type.valueOf(PRIMITIVES.get(values).name())); + } + else { + if (values.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + valueSchema = generateArraySchema(field.getArrayField()); + } + else if (values.equalsIgnoreCase(Schema.Type.MAP.name())) { + valueSchema = generateMapSchema(field.getMapField()); + } + else if (values.equalsIgnoreCase(Schema.Type.ENUM.name())) { + valueSchema = generateEnumSchema(field); + } + else if (values.equalsIgnoreCase(Schema.Type.UNION.name())) { + valueSchema = Schema.createUnion(field.getUnionTypes().stream().map(type -> generateFieldSchema(type, type.getDataType())).toList()); + } + else if (values.equalsIgnoreCase(Schema.Type.FIXED.name())) { + valueSchema = Schema.createFixed(field.getName(), field.getDescription(), field.getSchema(), field.getSize()); + } + else { + valueSchema = generateTableSchema(field.getContain()); + } + } + return Schema.createMap(valueSchema); + } + + private Object parseDefaultValue(String defaultValue, String dataType) { + if (dataType.equalsIgnoreCase(Schema.Type.INT.name())) { + return Integer.parseInt(defaultValue); + } + if (dataType.equalsIgnoreCase(Schema.Type.FLOAT.name())) { + return Float.parseFloat(defaultValue); + } + if (dataType.equalsIgnoreCase(Schema.Type.LONG.name())) { + return Long.parseLong(defaultValue); + } + if (dataType.equalsIgnoreCase(Schema.Type.DOUBLE.name())) { + return Double.parseDouble(defaultValue); + } + // if (dataType.equalsIgnoreCase(Schema.Type.ENUM.name())) { + // return null; + // } + return defaultValue; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaParser.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaParser.java new file mode 100644 index 0000000..7b54d76 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaParser.java @@ -0,0 +1,262 @@ +package com.opsbeach.connect.schemata.processor.avro; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.avro.Schema; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.EventType; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.utils.JsonUtil; + +public class AvroSchemaParser { + + private final TableService tableService; + + private final Map PRIMITIVES; + + public AvroSchemaParser(TableService tableService, Map primitives) { + this.tableService = tableService; + this.PRIMITIVES = primitives; + } + + public Table parseSchema(Schema avroSchema, Map tableMap, Boolean initialPull) { + List fields = new ArrayList<>(); + var avroFields = avroSchema.getFields(); + int rowNumber = 1; + for (Schema.Field avroField : avroFields) { + fields.add(parseField(avroSchema.getFullName(), avroField, rowNumber, tableMap, initialPull)); + rowNumber++; + } + return parseTable(avroSchema, fields); + } + + private Table parseTable(Schema schema,List fields) { + return Table.builder().name(schema.getName()) + .nameSpace(schema.getNamespace()) + .clientId(SecurityUtil.getClientId()) + .type(schema.getType().name()) + .fields(fields) + .description(schema.getDoc()) + // .comment(schema.getProp(Table.Prop.COMMENT)) + // .seeAlso(schema.getProp(Table.Prop.SEE_ALSO)) + // .reference(schema.getProp(Table.Prop.REFERENCE)) + .owner(schema.getProp(Table.Prop.OWNER)) + .domain(schema.getProp(Table.Prop.DOMAIN)) + .status(ObjectUtils.isEmpty(schema.getProp(Table.Prop.STATUS)) ? "Active" : schema.getProp(Table.Prop.STATUS)) + .schemaType(handleEmptyTableType(schema)) + .eventType(handleEmptyEventType(schema)) + .channel(schema.getProp(Table.Prop.CHANNEL)) + .subscribers(handleSubscribers(schema)) + .email(schema.getProp(Table.Prop.EMAIL)) + // .teamChannel(schema.getProp(Table.Prop.TEAM_CHANNEL)) + // .alertChannel(schema.getProp(Table.Prop.ALERT_CHANNEL)) + .complianceOwner(schema.getProp(Table.Prop.COMPLIANCE_OWNER)) + // .complianceChannel(schema.getProp(Table.Prop.COMPLIANCE_CHANNEL)) + .qualityRuleBase(schema.getProp(Table.Prop.QUALITY_RULE_BASE)) + .qualityRuleSql(schema.getProp(Table.Prop.QUALITY_RULE_SQL)) + .qualityRuleCel(schema.getProp(Table.Prop.QUALITY_RULE_CEL)) + .build(); + } + + private Table parseNestedTable(Schema schema, Map tableMap, Boolean initialPull) { + if (Boolean.TRUE.equals(initialPull)) { + // check the table present in DB already, find it by name and nameSpace of schema. + var table = tableService.findByNameAndNameSpace(schema.getName(), schema.getNamespace()); + if (ObjectUtils.isEmpty(table)) { + // If table is not present in DB then parse it and save. + table = parseSchema(schema, tableMap, initialPull); + table = tableService.addTable(table); + } + else { + // Need to parse the table if it present in DB, because we need nested table Id of this table. + parseSchema(schema, tableMap, initialPull); + } + tableMap.putIfAbsent(table.getId().toString(), table); + return table; + } + var table = parseSchema(schema, tableMap, initialPull); + tableMap.put(table.getName(), table); + return table; + } + + private Field parseField(String schemaName, Schema.Field avroField, int rowNumber, Map tableMap, Boolean initialPull) { + var builder = Field.builder(); + String dataType = avroField.schema().getType().getName(); + var val = isPrimitiveType(dataType) ? avroField.defaultVal() : avroField.schema().getObjectProps().get(Field.Prop.DEFAULT); + String defaultVal = ObjectUtils.isEmpty(val) ? null : val.toString(); + + // if the field is RECORD then create new table and add dataType as table name + if (dataType.equalsIgnoreCase(Schema.Type.RECORD.name())) { + var table = parseNestedTable(avroField.schema(), tableMap, initialPull); + dataType = avroField.schema().getFullName(); + builder.contain(table); + } + // if the field is ENUM then add symbols(enum values). + else if (dataType.equalsIgnoreCase(Schema.Type.ENUM.name())) { + builder.symbols(avroField.schema().getEnumSymbols().toArray(new String[0])); + defaultVal = avroField.schema().getEnumDefault(); + } + // if the field is MAP then parse it. + else if (dataType.equalsIgnoreCase(Schema.Type.MAP.name())) { + builder = parseMapField(avroField.name(), avroField.schema(), builder, tableMap, initialPull); + } + // if the field is ARRAY then parse it. + else if (dataType.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + builder = parseArrayField(avroField.name(), avroField.schema(), builder, tableMap, initialPull); + } + // if the field is UNION then parse it. + else if (dataType.equalsIgnoreCase(Schema.Type.UNION.name())) { + builder = parseUnionFields(avroField.name(), avroField.schema().getTypes(), builder, tableMap, initialPull); + } + else if (dataType.equalsIgnoreCase(Schema.Type.FIXED.name())) { + builder.size(avroField.schema().getFixedSize()); + } + return builder.schema(schemaName) + .name(avroField.name()) + .rowNumber(rowNumber) + .dataType(dataType) + .isPrimitiveType(isPrimitiveType(dataType)) + .description(avroField.doc()) + .defaultValue(defaultVal) + // .comment(avroField.getProp(Field.Prop.COMMENT)) + // .seeAlso(avroField.getProp(Field.Prop.SEE_ALSO)) + // .reference(avroField.getProp(Field.Prop.REFERENCE)) + .isClassified(Boolean.parseBoolean(avroField.getProp(Field.Prop.IS_CLASSIFIED))) + .isPrimaryKey(Boolean.parseBoolean(avroField.getProp(Field.Prop.IS_PRIMARY_KEY))) + .isPii(Boolean.parseBoolean(avroField.getProp(Field.Prop.IS_PII))) + .deprecated(Boolean.parseBoolean(avroField.getProp(Field.Prop.DEPRECATED))) + // .classificationLevel(avroField.getProp(Field.Prop.CLASSIFICATION_LEVEL)) + // .productType(avroField.getProp(Field.Prop.PRODUCT_TYPE)) + .build(); + } + + private Field.FieldBuilder parseUnionFields(String name, List schemas, Field.FieldBuilder builder, Map tableMap, Boolean initialPull) { + List unionTypes = new ArrayList<>(); + + // UNION field hava more than one DATA TYPE so we need to create new FIELD foreach and every DATA TYPE and + // add these as new list of relation fields to UNION FIELD object. + schemas.forEach(type -> unionTypes.add(parseUnionField(name, type, tableMap, initialPull))); + builder.unionTypes(unionTypes); + return builder; + } + + // This method is for creating new FIELD object for every UNION FIELDS + private Field parseUnionField(String name, Schema schema, Map tableMap, Boolean initialPull) { + var builder = Field.builder(); + // Need to check the union field belongs to which type then parse the respective type + String dataType = schema.getType().getName(); + if (dataType.equalsIgnoreCase(Schema.Type.RECORD.name())) { + var table = parseNestedTable(schema, tableMap, initialPull); + dataType = schema.getFullName(); + builder.contain(table); + } + else if (dataType.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + // We don't know array contains nested field, so pass current builder add values and return + builder = parseArrayField(name, schema, builder, tableMap, initialPull); + } + else if (dataType.equalsIgnoreCase(Schema.Type.MAP.name())) { + // We don't know map contains nested field, so pass current builder add values and return + builder = parseMapField(name, schema, builder, tableMap, initialPull); + } + else if (dataType.equalsIgnoreCase(Schema.Type.UNION.name())) { + builder = parseUnionFields(name, schema.getTypes(), builder, tableMap, initialPull); + } + else if (dataType.equalsIgnoreCase(Schema.Type.ENUM.name())) { + builder.symbols(schema.getEnumSymbols().toArray(new String[0])); + } + else if (dataType.equalsIgnoreCase(Schema.Type.FIXED.name())) { + builder.size(schema.getFixedSize()); + } + return builder.name(name).dataType(dataType) + .isPrimitiveType(isPrimitiveType(dataType)) + .build(); + } + + // This method for parsing ARRAY and NESTED ARRAY fields of any type. + private Field.FieldBuilder parseArrayField(String fieldName, Schema schema, Field.FieldBuilder builder, Map tableMap, Boolean initialPull) { + var dataType = schema.getType().getName(); + var items = schema.getElementType().getType().getName(); + if (items.equalsIgnoreCase(Schema.Type.RECORD.name())) { + var table = parseNestedTable(schema.getElementType(), tableMap, initialPull); + items = schema.getElementType().getFullName(); + builder.contain(table); + } + else if (items.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + var arrayBuilder = parseArrayField(fieldName, schema.getElementType(), Field.builder(), tableMap, initialPull); + builder.arrayField(arrayBuilder.build()); + } + else if (items.equalsIgnoreCase(Schema.Type.MAP.name())) { + var mapBuilder = parseMapField(fieldName, schema.getElementType(), Field.builder(), tableMap, initialPull); + builder.mapField(mapBuilder.build()); + } + else if (items.equalsIgnoreCase(Schema.Type.UNION.name())) { + builder = parseUnionFields(fieldName, schema.getElementType().getTypes(), builder, tableMap, initialPull); + } + else if (items.equalsIgnoreCase(Schema.Type.ENUM.name())) { + builder.symbols(schema.getElementType().getEnumSymbols().toArray(new String[0])); + } + else if (items.equalsIgnoreCase(Schema.Type.FIXED.name())) { + builder.size(schema.getElementType().getFixedSize()); + } + builder.name(fieldName).dataType(dataType).items(items); + return builder; + } + + // This method for parsing MAP and NESTED MAP fields of any type. + // (NOTE: the key of avro map should be always string.) + private Field.FieldBuilder parseMapField(String fieldName, Schema schema, Field.FieldBuilder builder, Map tableMap, Boolean initialPull) { + var dataType = schema.getType().getName(); + var values = schema.getValueType().getType().getName(); + if (values.equalsIgnoreCase(Schema.Type.RECORD.name())) { + var table = parseNestedTable(schema.getValueType(), tableMap, initialPull); + values = schema.getValueType().getFullName(); + builder.contain(table); + } + else if (values.equalsIgnoreCase(Schema.Type.ARRAY.name())) { + var arrayBuilder = parseArrayField(fieldName, schema.getValueType(), Field.builder(), tableMap, initialPull); + builder.arrayField(arrayBuilder.build()); + } + else if (values.equalsIgnoreCase(Schema.Type.MAP.name())) { + var mapBuilder = parseMapField(fieldName, schema.getValueType(), Field.builder(), tableMap, initialPull); + builder.mapField(mapBuilder.build()); + } + else if (values.equalsIgnoreCase(Schema.Type.UNION.name())) { + builder = parseUnionFields(fieldName, schema.getValueType().getTypes(), builder, tableMap, initialPull); + } + else if (values.equalsIgnoreCase(Schema.Type.ENUM.name())) { + builder.symbols(schema.getValueType().getEnumSymbols().toArray(new String[0])); + } + else if (values.equalsIgnoreCase(Schema.Type.FIXED.name())) { + builder.size(schema.getValueType().getFixedSize()); + } + builder.name(fieldName).dataType(dataType).values(values); + return builder; + } + + private EventType handleEmptyEventType(Schema schema) { + return schema.getProp(Table.Prop.EVENT_TYPE) == null ? EventType.NONE + : EventType.get(schema.getProp(Table.Prop.EVENT_TYPE)); + } + + private SchemaType handleEmptyTableType(Schema schema) { + return schema.getProp(Table.Prop.SCHEMA_TYPE) == null ? SchemaType.UNKNOWN + : SchemaType.get(schema.getProp(Table.Prop.SCHEMA_TYPE)); + } + + private String[] handleSubscribers(Schema schema) { + if (schema.getObjectProp(Table.Prop.SUBSCRIBERS) == null) return null; + var subscribers = JsonUtil.jsonArrayToObjectList(JsonUtil.convertObjectIntoJson(schema.getObjectProp(Table.Prop.SUBSCRIBERS)), String.class); + return subscribers.toArray(new String[0]); + } + + public boolean isPrimitiveType(String name) { + return PRIMITIVES.containsKey(name); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchema.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchema.java new file mode 100644 index 0000000..1374549 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchema.java @@ -0,0 +1,146 @@ +package com.opsbeach.connect.schemata.processor.json; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.processor.SchemaFileProcessor; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.FileNotFoundException; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.FileUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Component +@RequiredArgsConstructor +public class JsonSchema implements SchemaFileProcessor { + + static final List PRIMITIVES = new ArrayList<>(); + + static { + PRIMITIVES.add("string"); + PRIMITIVES.add("number"); + PRIMITIVES.add("integer"); + PRIMITIVES.add("boolean"); + PRIMITIVES.add("null"); + } + + private final TableService tableService; + private final DomainNodeService domainNodeService; + private final DomainService domainService; + private final ModelService modelService; + private final ResponseMessage responseMessage; + private final SchemaFileAuditService schemaFileAuditService; + + @Value("${server.home-path}") + private String homePath; + + private ObjectMapper mapper = new ObjectMapper(); + + @Override + public List
getTables(byte[] content, Boolean initialPull) throws IOException { + var jsonSchema = mapper.readTree(content); + return parseSchema(jsonSchema, new HashMap<>(), initialPull); + } + + @Override + public String getFileContent(Table table) { + return new JsonSchemaGenerator(PRIMITIVES).generateTableSchema(table).toPrettyString(); + } + + @Override + public List
getTables(String path, Boolean initialPull) throws IOException { + var inputStream = new FileInputStream(new File(path)); + var jsonSchema = mapper.readTree(inputStream); // get schema structure as JsonNode from inputStream of file. + return parseSchema(jsonSchema, new HashMap<>(), initialPull); + } + + private List
parseSchema(JsonNode jsonSchema, Map fieldTableMap, Boolean initialPull) { + Map tableMap = new HashMap<>(); // this map is to store nested tables. + var jsonSchemaParser = new JsonSchemaParser(tableService, PRIMITIVES, mapper); + var table = jsonSchemaParser.parseTable(jsonSchema, tableMap, fieldTableMap, initialPull); + // add to Neo4j DB + if (initialPull) { + table = tableService.addTable(table); + } + List
tables = new LinkedList<>(tableMap.values()); + tables.add(table); + return tables; + } + + @Override + public void parseFolder(String folderPath, ClientRepo clientRepo) { + var filePaths = FileUtil.deepSearchFiles(folderPath, ".json"); + List models = new ArrayList<>(); + log.info("creating domain in neo4j"); + var domainNode = domainNodeService.addDomainNode(clientRepo.getFullName(), clientRepo.getClientId(), clientRepo.getId()); + var domain = domainService.addDomain(clientRepo, domainNode.getId()); + domainNode.setTables(new ArrayList<>()); + Map fieldTableMap = new HashMap<>(); + filePaths.forEach(filePath -> { + log.info("Working on fetching Models from file : "+filePath); + var tables = readSchemaAndAddInGraphDB(filePath, fieldTableMap); + if (tables != null) { + var schemaFileAudit = schemaFileAuditService.createSchemaFileAuditWhileInitialLoading(filePath, clientRepo, tables.get(tables.size()-1).getId()); + models.addAll(modelService.createModels(tables, schemaFileAudit, domain)); + domainNode.getTables().addAll(tables); + } + }); + mergeFieldAndTable(fieldTableMap); + log.info("Fetching Models from Files is completed"); + domainNodeService.update(domainNode); + modelService.addAll(models); + } + + private void mergeFieldAndTable(Map fieldTableMap) { + for (Map.Entry entry : fieldTableMap.entrySet()) { + var fullname = entry.getValue(); + var nameSpace = fullname.substring(0, fullname.lastIndexOf(".")); + var name = fullname.substring(fullname.lastIndexOf(".")+1); + var table = tableService.findByNameAndNameSpace(name, nameSpace); + var field = entry.getKey(); + field.setContain(table); + tableService.addField(field); + } + } + + public List
readSchemaAndAddInGraphDB(String filePath, Map fieldTableMap) { + var fileName = filePath.substring(filePath.lastIndexOf("/") + 1, filePath.lastIndexOf(".")); + var fileType = filePath.substring(filePath.lastIndexOf(".") + 1); + // create tables on neo4j + log.info(StringUtil.constructStringEmptySeparator("Started parsing file ",fileName,".",fileType)); + try { + var inputStream = new FileInputStream(new File(filePath)); + var jsonSchema = mapper.readTree(inputStream); // get schema structure as JsonNode from inputStream of file. + return parseSchema(jsonSchema, fieldTableMap, Boolean.TRUE); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.FILE_NOT_FOUND, e.getMessage())); + } catch (SchemaParserException e) { + throw new SchemaParserException("filePath", e.fillInStackTrace()); + } + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaGenerator.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaGenerator.java new file mode 100644 index 0000000..fea293a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaGenerator.java @@ -0,0 +1,154 @@ +package com.opsbeach.connect.schemata.processor.json; + +import java.util.List; +import java.util.Objects; + +import org.springframework.util.ObjectUtils; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class JsonSchemaGenerator { + + private final List PRIMITIVES; + + private ObjectNode defenitionNode; + + private final String DRAFT = "http://json-schema.org/draft-07/schema#"; + + public JsonSchemaGenerator(List primitives) { + this.PRIMITIVES = primitives; + } + + public JsonNode generateTableSchema(Table table) { + var schema = JsonNodeFactory.instance.objectNode(); + defenitionNode = JsonNodeFactory.instance.objectNode(); + generateTableObject(table, schema); + if (!defenitionNode.isEmpty()) + schema.putIfAbsent("definitions", defenitionNode); + log.info(schema.toPrettyString()); + return schema; + } + + private void generateTableObject(Table table, ObjectNode tableNode) { + log.info("Json Schema Table Content generation: "+table.getName()); + addTableProps(tableNode, table); + var properties = tableNode.putObject(Field.Prop.PROPERTIES); + getProterties(table.getFields(), properties); + if (Objects.nonNull(table.getRequiredFields())) { + constructArrayNode(table.getRequiredFields(), tableNode.putArray(Table.Prop.REQUIRED)); + } + } + + private void addTableProps(ObjectNode tableNode, Table table) { + tableNode.put(Table.Prop.$SCHEMA, DRAFT); + if (Objects.nonNull(table.getJsonSchemaId())) tableNode.put(Table.Prop.JSON_SCHEMA_ID, table.getJsonSchemaId()); + tableNode.put(Table.Prop.TITLE, table.getName()); + tableNode.put(Table.Prop.TYPE, "object"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getDescription()))) tableNode.put(Table.Prop.DESCRIPTION, table.getDescription()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getOwner()))) tableNode.put(Table.Prop.OWNER, table.getOwner()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getSchemaType()))) tableNode.put(Table.Prop.SCHEMA_TYPE, table.getSchemaType().name()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getChannel()))) tableNode.put(Table.Prop.CHANNEL, table.getChannel()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getDomain()))) tableNode.put(Table.Prop.DOMAIN, table.getDomain()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getEmail()))) tableNode.put(Table.Prop.EMAIL, table.getEmail()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getStatus()))) tableNode.put(Table.Prop.STATUS, table.getStatus()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getComplianceOwner()))) tableNode.put(Table.Prop.COMPLIANCE_OWNER, table.getComplianceOwner()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getSubscribers()))) constructArrayNode(table.getSubscribers(), tableNode.putArray(Table.Prop.SUBSCRIBERS)); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleBase()))) tableNode.put(Table.Prop.QUALITY_RULE_BASE, table.getQualityRuleBase()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleSql()))) tableNode.put(Table.Prop.QUALITY_RULE_SQL, table.getQualityRuleSql()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleCel()))) tableNode.put(Table.Prop.QUALITY_RULE_CEL, table.getQualityRuleCel()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getRequiredFields()))) constructArrayNode(table.getRequiredFields(), tableNode.putArray(Table.Prop.REQUIRED)); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getAdditionalProperties()))) tableNode.put(Table.Prop.ADDITIONAL_PROPERTIES, table.getAdditionalProperties()); + } + + private void getProterties(List fields, ObjectNode properties) { + fields.forEach(field -> { + var fieldNode = properties.putObject(field.getName()); + generateFieldObject(field, fieldNode); + }); + } + + private void generateFieldObject(Field field, ObjectNode fieldNode) { + log.info("Json Schema Field Content generation: "+field.getName()); + var dataType = field.getDataType(); + fieldNode.put(Field.Prop.TYPE, dataType); + if (dataType.equals(Field.Prop.ARRAY)) { + generateArrayObject(field, fieldNode); + } + else if (dataType.equals("union")) { + // generate union type with "anyOf". + fieldNode.remove(Field.Prop.TYPE); + var unionTypes = fieldNode.putArray(Field.Prop.ANY_OF); + field.getUnionTypes().forEach(fld -> unionTypes.add(JsonNodeFactory.instance.objectNode().put(Field.Prop.TYPE, fld.getDataType()))); + } + else if (dataType.equals(Field.Prop.ENUM)) { + fieldNode.put(Field.Prop.TYPE, "string"); + constructArrayNode(field.getSymbols(), fieldNode.putArray(Field.Prop.ENUM)); + } + else if (Objects.nonNull(field.getContain())) { + if (Objects.nonNull(field.getJsonSchemaRefId())) { + fieldNode.removeAll(); + fieldNode.put(Field.Prop.$REF, field.getJsonSchemaRefId()); + if (field.getJsonSchemaRefId().startsWith("#/definitions")) { + var defNode = defenitionNode.putObject(field.getJsonSchemaRefId().split("/")[2]); + generateTableObject(field.getContain(), defNode); + } + } else { + var typeNode = fieldNode.putObject(Field.Prop.TYPE); + generateTableObject(field.getContain(), typeNode); + } + } + fieldNode.put(Field.Prop.DESCRIPTION, field.getDescription()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getIsPii()))) fieldNode.put(Field.Prop.IS_PII, field.getIsPii().toString()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getIsClassified()))) fieldNode.put(Field.Prop.IS_CLASSIFIED, field.getIsClassified().toString()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getDeprecated()))) fieldNode.put(Field.Prop.DEPRECATED, field.getDeprecated().toString()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getReference()))) fieldNode.put(Field.Prop.REFERENCE, field.getReference()); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getClassificationLevel()))) fieldNode.put(Field.Prop.CLASSIFICATION_LEVEL, field.getClassificationLevel()); + } + + private void generateArrayObject(Field field, ObjectNode fieldNode) { + var items = field.getItems(); + if (PRIMITIVES.contains(items)) { + var itemNode = fieldNode.putObject(Field.Prop.ITEMS); + itemNode.put(Field.Prop.TYPE, items); + } + else if (items.equals(Field.Prop.ARRAY)) { + var itemNode = fieldNode.putObject(Field.Prop.ITEMS); + itemNode.put(Field.Prop.TYPE, Field.Prop.ARRAY); + generateArrayObject(field.getArrayField(), itemNode); + } + else if (items.equals("union")) { + var unionTypes = fieldNode.putArray(Field.Prop.ITEMS); + field.getUnionTypes().forEach(fld -> unionTypes.add(fld.getDataType())); + } + else if (items.equals(Field.Prop.ENUM)) { + var itemNode = fieldNode.putObject(Field.Prop.ITEMS); + itemNode.put(Field.Prop.TYPE, "string"); + constructArrayNode(field.getSymbols(), itemNode.putArray(Field.Prop.ENUM)); + } + else if (Objects.nonNull(field.getContain())) { + if (Objects.nonNull(field.getJsonSchemaRefId())) { + var itemNode = fieldNode.putObject(Field.Prop.ITEMS); + itemNode.put(Field.Prop.$REF, field.getJsonSchemaRefId()); + if (field.getJsonSchemaRefId().startsWith("#/definitions")) { + var defNode = defenitionNode.putObject(field.getJsonSchemaRefId().split("/")[2]); + generateTableObject(field.getContain(), defNode); + } + } else { + var typeNode = fieldNode.putObject(Field.Prop.ITEMS); + generateTableObject(field.getContain(), typeNode); + } + } + } + + private void constructArrayNode(String[] values, ArrayNode arrayNode) { + if (values.length > 0) for (String value : values) arrayNode.add(value); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaParser.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaParser.java new file mode 100644 index 0000000..f6e8e4a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaParser.java @@ -0,0 +1,322 @@ +package com.opsbeach.connect.schemata.processor.json; + +import java.net.URI; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicInteger; + +import org.hibernate.tool.schema.extract.spi.SchemaExtractionException; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class JsonSchemaParser { + + private final TableService tableService; + + private final List PRIMITIVES; + + private final ObjectMapper mapper; + + private final String DRAFT = "http://json-schema.org/draft-07/schema#"; + + public JsonSchemaParser(TableService tableService, List primitives, ObjectMapper mapper) { + this.tableService = tableService; + this.PRIMITIVES = primitives; + this.mapper = mapper; + } + + // get table properties from JsonNode of schema. + public Table parseTable(JsonNode schema, Map tableMap, Map fieldTableMap, Boolean initialPull) { + var draft = getString(schema, Table.Prop.$SCHEMA, null); + if (Objects.isNull(draft) || Boolean.FALSE.equals(draft.equals(DRAFT))) throw new SchemaExtractionException("Schema Draft is invalid - (Draft 07 only acceptable)"); + var id = getString(schema, Table.Prop.JSON_SCHEMA_ID, null); + if (Objects.isNull(id)) throw new SchemaExtractionException("Schema $id is not present"); + // var name = getString(schema, Table.Prop.TITLE, null); + // if (Objects.isNull(name)) throw new SchemaExtractionException("Schema Title is Not present"); + // if (name.contains(" ")) throw new SchemaExtractionException("Invalid title - "+name); + + var uri = getUri(id); + var name = uri.getPath().substring(uri.getPath().lastIndexOf("/")+1); + var nameSpace = uri.getPath().substring(1, uri.getPath().lastIndexOf("/")).replaceAll("/", "."); + return constructTable(id, schema, nameSpace, name, tableMap, fieldTableMap, initialPull); + } + + private Table constructTable(String schemaId, JsonNode schema, String nameSpace, String name, Map tableMap, Map fieldTableMap, Boolean initialPull) { + var fullname = StringUtil.constructStringEmptySeparator(nameSpace, ".", name); + return Table.builder().name(name) + .nameSpace(nameSpace) + .jsonSchemaId(schemaId) + .clientId(SecurityUtil.getClientId()) + .type(schema.get(Table.Prop.TYPE).asText()) + .description(getString(schema, Table.Prop.DESCRIPTION, null)) + .fields(parseFields(fullname, schema, tableMap, fieldTableMap, initialPull)) // get list of fields + // .requiredFields(schema.has(Table.Prop.REQUIRED) ? getArrayFromNode(schema.get(Table.Prop.REQUIRED)) : null) + // .additionalProperties(getBoolean(schema, Table.Prop.ADDITIONAL_PROPERTIES)) + .domain(getString(schema, Table.Prop.DOMAIN, null)) + .owner(getString(schema, Table.Prop.OWNER, null)) + .complianceOwner(getString(schema, Table.Prop.COMPLIANCE_OWNER, null)) + .channel(getString(schema, Table.Prop.CHANNEL, null)) + .email(getString(schema, Table.Prop.EMAIL, null)) + .status(getString(schema, Table.Prop.STATUS, "Active")) + .subscribers(schema.has(Table.Prop.SUBSCRIBERS) ? getArrayFromNode(schema.get(Table.Prop.SUBSCRIBERS)) : null) + .qualityRuleBase(getString(schema, Table.Prop.QUALITY_RULE_BASE, null)) + .qualityRuleSql(getString(schema, Table.Prop.QUALITY_RULE_SQL, null)) + .qualityRuleCel(getString(schema, Table.Prop.QUALITY_RULE_CEL, null)) + .build(); + } + + private URI getUri(String uri) { + try { + return new URI(uri); + } catch (Exception e) { + throw new InvalidDataException(ErrorCode.INVALID_ID, e.getMessage()); + } + } + + // get field properites from JsonNode + private List parseFields(String schemaName, JsonNode schema, Map tableMap, Map fieldTableMap, Boolean initialPull) { + if (Boolean.FALSE.equals(schema.has(Table.Prop.PROPERTIES))) { + return null; + } + JsonNode properties = schema.get(Table.Prop.PROPERTIES); + List fields = new ArrayList<>(); + Iterator> fieldsIterator = properties.fields(); // get fields from node + var rowNumber = new AtomicInteger(1); + while (fieldsIterator.hasNext()) { + Map.Entry fieldEntry = fieldsIterator.next(); + var fieldNode = fieldEntry.getValue(); + var builder = Field.builder(); // build field from node + builder.rowNumber(rowNumber.getAndIncrement()); + var field = parseField(schema, fieldNode, builder, schemaName, fieldEntry.getKey(), tableMap, fieldTableMap, initialPull); + fields.add(field); + } + return fields; + } + + private Field parseField(JsonNode schema, JsonNode fieldNode, Field.FieldBuilder builder, String schemaName, + String fieldName, Map tableMap, Map fieldTableMap, Boolean initialPull) { + builder.description(getString(fieldNode, Field.Prop.DESCRIPTION, null)); + builder.name(fieldName); + builder.schema(schemaName); + builder.defaultValue(getString(fieldNode, Field.Prop.DEFAULT, null)); + builder.isPii(getBoolean(fieldNode, Field.Prop.IS_PII, Boolean.FALSE)); + builder.isClassified(getBoolean(fieldNode, Field.Prop.IS_CLASSIFIED, Boolean.FALSE)); + builder.deprecated(getBoolean(fieldNode, Field.Prop.DEPRECATED, Boolean.FALSE)); + /* + * Here need to check "$ref" for inner schmea because it refers ouside of this schema. + */ + if (fieldNode.has(Field.Prop.$REF)) { + // Need to check table is present already or not before saving it into the table. + var ref = fieldNode.get(Field.Prop.$REF).asText(); + if (ref.startsWith("#/definitions")) { + var path = ref.split("/"); + fieldNode = schema.get(path[1]).get(path[2]); + var nameSpace = schemaName; + var name = path[2]; + var table = constructTable(null, fieldNode, nameSpace, name, tableMap, fieldTableMap, initialPull); + if (Boolean.TRUE.equals(initialPull)) { + table = tableService.addTable(table); + tableMap.put(table.getId().toString(), table); + } else { + tableMap.put(table.getName(), table); + } + return builder.isPrimitiveType(false).contain(table).build(); + } else { + var uri = getUri(ref); + var path = uri.getPath(); + var name = path.substring(path.lastIndexOf("/")+1); // taking name from $id + var nameSpace = path.substring(1, path.lastIndexOf("/")).replaceAll("/", "."); // taking namespace from $id + var type = StringUtil.constructStringEmptySeparator(nameSpace, ".", name); + var field = builder.dataType(type).isPrimitiveType(false).build(); + if (initialPull) field = tableService.addField(field); + fieldTableMap.put(field, type); + return field; + } + } + + // check weather the field is enum + if (fieldNode.has(Field.Prop.ENUM)) { + log.info("Field name - {} of Type - {} of schema - {}", fieldName, Field.Prop.ENUM, schemaName); + var field = builder.dataType(Field.Prop.ENUM) + .isPrimitiveType(false) + .symbols(getArrayFromNode(fieldNode.get(Field.Prop.ENUM))) + .build(); + return field; + } + // if the type has list then it is union type field + JsonNode unionTypes = null; + if (fieldNode.has(Field.Prop.ONE_OF)) unionTypes = fieldNode.get(Field.Prop.ONE_OF); + else if (fieldNode.has(Field.Prop.ANY_OF)) unionTypes = fieldNode.get(Field.Prop.ANY_OF); + + if (Objects.nonNull(unionTypes)) { + builder.dataType("union"); + builder.isPrimitiveType(false); + parseUnionTypes(schema, builder, unionTypes, schemaName, fieldName, tableMap, fieldTableMap, initialPull); + return builder.build(); + } + if (fieldNode.get(Field.Prop.TYPE).isArray()) { // if union type less than draft 4. + builder.dataType("union"); + builder.isPrimitiveType(false); + parseUnionType(builder, fieldNode.get(Field.Prop.TYPE), fieldName); + return builder.build(); + } + + var type = fieldNode.get(Field.Prop.TYPE).asText(); + builder.dataType(type); + builder.isPrimitiveType(isPrimitiveType(type)); + + log.info("Field name - {} of Type - {} of schema - {}", fieldName, type, schemaName); + + // check weather the field is array + if (type.contains(Field.Prop.ARRAY)) { + return parseArrayField(schema, builder, fieldNode, schemaName, fieldName, tableMap, fieldTableMap, initialPull); + } + // check weather the field is object + else if (type.contains(Field.Prop.OBJECT)) { + // nameSpace of new table is same as Parent table AND name is same as Field. + var table = parseNestedTable(fieldNode, schemaName, fieldName, tableMap, fieldTableMap, initialPull); + builder.dataType(type).isPrimitiveType(false).contain(table); + } + // or else the field is primitive type + return builder.build(); + } + + private Table parseNestedTable(JsonNode fieldNode, String nameSpace, String name, Map tableMap, Map fieldTableMap, Boolean initialPull) { + var id = getString(fieldNode, Table.Prop.JSON_SCHEMA_ID, null); + var table = constructTable(id, fieldNode, nameSpace, name, tableMap, fieldTableMap, initialPull); + var type = StringUtil.constructStringEmptySeparator(nameSpace, ".", name); + if (Boolean.TRUE.equals(initialPull)) { + table = tableService.addTable(table); + tableMap.put(table.getId().toString(), table); + } else { + tableMap.put(type, table); + } + return table; + } + + private void parseUnionTypes(JsonNode schema, Field.FieldBuilder builder, JsonNode unionNode, String schemaName, String fieldName, + Map tableMap, Map fieldTableMap, Boolean initialPull) { + List unionTypes = new ArrayList<>(); + for (JsonNode jsonNode : unionNode) { + var field = parseField(schema, jsonNode, Field.builder(), schemaName, fieldName, tableMap, fieldTableMap, initialPull); + unionTypes.add(field); + } + builder.unionTypes(unionTypes); + } + + private void parseUnionType(Field.FieldBuilder builder, JsonNode fieldTypes, String fieldName) { + List unionTypes = new ArrayList<>(); + var types = getArrayFromNode(fieldTypes); + for (String type : types) { + unionTypes.add(Field.builder().name(fieldName).dataType(type).build()); + } + builder.unionTypes(unionTypes); + } + + private Field parseArrayField(JsonNode schema, Field.FieldBuilder builder, JsonNode fieldNode, String nameSpace, String name, Map tableMap, + Map fieldTableMap, Boolean initialPull) { + JsonNode itemNode = fieldNode.get(Field.Prop.ITEMS); + Table table = null; + if (itemNode.has(Field.Prop.$REF)) { + var ref = itemNode.get(Field.Prop.$REF).asText(); + builder.jsonSchemaRefId(ref); + if (ref.startsWith("#/definitions")) { + var path = ref.split("/"); + fieldNode = schema.get(path[1]).get(path[2]); + name = path[2]; + var type = StringUtil.constructStringEmptySeparator(nameSpace, ".", name); + table = constructTable(null, fieldNode, nameSpace, name, tableMap, fieldTableMap, initialPull); + if (Boolean.TRUE.equals(initialPull)) { + table = tableService.addTable(table); + tableMap.put(table.getId().toString(), table); + } else { + tableMap.put(table.getName(), table); + } + return builder.isPrimitiveType(false).contain(table).items(type).build(); + } else { + var uri = getUri(ref); + var path = uri.getPath(); + name = path.substring(path.lastIndexOf("/")+1); + nameSpace = path.substring(1, path.lastIndexOf("/")).replaceAll("/", "."); + var type = StringUtil.constructStringEmptySeparator(nameSpace, ".", name); + var field = builder.items(type).isPrimitiveType(false).build(); + field = tableService.addField(field); + fieldTableMap.put(field, type); + return field; + } + } + + // check weather the field is enum + if (itemNode.has(Field.Prop.ENUM)) { + var field = builder.items(Field.Prop.ENUM) + .isPrimitiveType(false) + .symbols(getArrayFromNode(itemNode.get(Field.Prop.ENUM))) + .build(); + return field; + } + + JsonNode unionTypes = null; + if (itemNode.has(Field.Prop.ONE_OF)) unionTypes = itemNode.get(Field.Prop.ONE_OF); + else if (itemNode.has(Field.Prop.ANY_OF)) unionTypes = itemNode.get(Field.Prop.ANY_OF); + + if (Objects.nonNull(unionTypes)) { + parseUnionTypes(schema, builder, unionTypes, nameSpace, name, tableMap, fieldTableMap, initialPull); + return builder.items("union").build(); + } + if (itemNode.isArray()) { // if union type less than draft 4. + parseUnionType(builder, itemNode, name); + return builder.items("union").build(); + } + var items =itemNode.get(Field.Prop.TYPE).asText(); + // check weather the array field contains object (i.e., check weather it is + // array of object) + if (items.equals(Field.Prop.OBJECT)) { + // if the field is object, then construct another Tabe. + table = parseNestedTable(fieldNode, nameSpace, name, tableMap, fieldTableMap, initialPull); + items = getString(itemNode, Table.Prop.TYPE, null); + builder.isPrimitiveType(false); + } + return builder.items(items).contain(table).build(); + } + + private String[] getArrayFromNode(JsonNode arrNode) { + // List strings = new ArrayList<>(); + // if (arrNode.isArray()) { + // for (JsonNode objNode : arrNode) { + // strings.add(objNode.asText()); + // } + // } + // return strings.toArray(new String[0]); + return mapper.convertValue(arrNode, String[].class); + } + + private String getString(JsonNode node, String type, String defaultValue) { + return node.has(type) ? node.get(type).asText() : defaultValue; + } + + // private Integer getInteger(JsonNode node, String type) { + // return node.has(type) ? node.get(type).asInt() : null; + // } + + private Boolean getBoolean(JsonNode node, String type, Boolean defaultvalue) { + return node.has(type) ? node.get(type).asBoolean() : defaultvalue; + } + + private boolean isPrimitiveType(String name) { + return PRIMITIVES.contains(name); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/Loader.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/Loader.java new file mode 100644 index 0000000..cfb9486 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/Loader.java @@ -0,0 +1,16 @@ +package com.opsbeach.connect.schemata.processor.protobuf; + +import com.google.protobuf.Descriptors; +import com.google.protobuf.Descriptors.Descriptor; +import com.google.protobuf.Descriptors.FileDescriptor; + +import java.util.List; +import java.util.Map; + +/** + * Abstracts loading of Descriptor objects from various sources + */ +public interface Loader { + public List loadDescriptors() throws Descriptors.DescriptorValidationException; + public Map loadFileDescriptors() throws Descriptors.DescriptorValidationException; +} diff --git a/src/main/java/org/schemata/provider/protobuf/ProtoFileDescriptorSetLoader.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoFileDescriptorSetLoader.java similarity index 67% rename from src/main/java/org/schemata/provider/protobuf/ProtoFileDescriptorSetLoader.java rename to connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoFileDescriptorSetLoader.java index 734ceca..05f1683 100644 --- a/src/main/java/org/schemata/provider/protobuf/ProtoFileDescriptorSetLoader.java +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoFileDescriptorSetLoader.java @@ -1,16 +1,17 @@ -package org.schemata.provider.protobuf; +package com.opsbeach.connect.schemata.processor.protobuf; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.ExtensionRegistry; +import com.google.protobuf.Descriptors.DescriptorValidationException; + import org.jgrapht.graph.DirectedAcyclicGraph; import org.jgrapht.util.SupplierUtil; import org.schemata.schema.SchemataBuilder; -import org.schemata.schema.SchemataConstraintsBuilder; -import org.schemata.schema.SchemataSubscribersBuilder; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -24,23 +25,33 @@ public class ProtoFileDescriptorSetLoader implements Loader { private final DescriptorProtos.FileDescriptorSet descriptorSet; + public ProtoFileDescriptorSetLoader(DescriptorProtos.FileDescriptorSet descriptorSet) { + this.descriptorSet = descriptorSet; + } + public ProtoFileDescriptorSetLoader(InputStream stream) throws IOException { var registry = ExtensionRegistry.newInstance(); SchemataBuilder.registerAllExtensions(registry); - SchemataSubscribersBuilder.registerAllExtensions(registry); - SchemataConstraintsBuilder.registerAllExtensions(registry); this.descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(stream, registry); } @Override - public List loadDescriptors() throws Descriptors.DescriptorValidationException {// we need to build a DAG of filenames so that we can build the Descriptors.Descriptor objects + public List loadDescriptors() throws Descriptors.DescriptorValidationException { + var descriptors = loadFileDescriptors(); + // lastly, we collect out each of the message Descriptor objects from the FileDescriptors into a flat list + return collectAllMessageDescriptors(descriptors.values()); + } + + @Override + public Map loadFileDescriptors() throws DescriptorValidationException { + // we need to build a DAG of filenames so that we can build the Descriptors.Descriptor objects // we need to build a DAG of filenames that import each other, so that we can build the Descriptors.Descriptor - // objects in the correct order, providing each one with a Descriptor for each file it imports + // objects in the the correct order, providing each one with a Descriptor for each file it imports var dependencyFilenames = buildFileDependencyGraph(descriptorSet); // we key the basic proto representations of the FileDescriptor by filename for simpler retrieval - var fileDescriptorProtosByName = indexFileDescriptorProtoByFilename(descriptorSet); + var fileDescriptorProtosByName = indexFileDescriptorProtosByFilename(descriptorSet); // these be the parsed FileDescriptor objects (again keyed by filename) so that they can be passed back into // the instantiation of any other files that import them @@ -59,13 +70,11 @@ public List loadDescriptors() throws Descriptors.Descrip var descriptor = Descriptors.FileDescriptor.buildFrom(file, dependenciesForFile); descriptors.put(filename, descriptor); } - - // lastly, we collect out each of the message Descriptor objects from the FileDescriptors into a flat list - return collectAllMessageDescriptors(descriptors.values()); + return descriptors; } private DirectedAcyclicGraph buildFileDependencyGraph(DescriptorProtos.FileDescriptorSet descriptorSet) { - var dependencyFilenames = new DirectedAcyclicGraph<>( + var dependencyFilenames = new DirectedAcyclicGraph( SupplierUtil.createSupplier(String.class), SupplierUtil.createSupplier(String.class), false); @@ -82,7 +91,7 @@ private DirectedAcyclicGraph buildFileDependencyGraph(Descriptor return dependencyFilenames; } - public Map indexFileDescriptorProtoByFilename + private Map indexFileDescriptorProtosByFilename (DescriptorProtos.FileDescriptorSet descriptorSet) { return descriptorSet .getFileList() @@ -90,16 +99,26 @@ private DirectedAcyclicGraph buildFileDependencyGraph(Descriptor .collect(Collectors.toMap(DescriptorProtos.FileDescriptorProto::getName, file -> file)); } - public DescriptorProtos.FileDescriptorSet getDescriptorSet() { - return descriptorSet; - } - private List collectAllMessageDescriptors - (Collection descriptors) { - return descriptors - .stream() - .flatMap(fileDescriptor -> fileDescriptor.getMessageTypes().stream()) - .toList(); + (Collection fileDescriptors) { + List descriptors = new ArrayList<>(); + for (var fileDescriptor : fileDescriptors) { + descriptors.addAll(collectAllNestedMessageDescriptors(fileDescriptor.getMessageTypes().stream().toList())); + } + return descriptors; } + public List collectAllNestedMessageDescriptors + (List descriptors) { + List descriptorList = new ArrayList<>(); + for (var descriptor : descriptors) { + if (descriptor.getNestedTypes().isEmpty()) { + descriptorList.add(descriptor); + } else { + descriptorList.addAll(collectAllNestedMessageDescriptors(descriptor.getNestedTypes())); + descriptorList.add(descriptor); + } + } + return descriptorList; + } } \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoProcessor.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoProcessor.java new file mode 100644 index 0000000..acee57c --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoProcessor.java @@ -0,0 +1,122 @@ +package com.opsbeach.connect.schemata.processor.protobuf; + +import com.google.protobuf.Descriptors; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.EventType; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.sharedlib.security.SecurityUtil; + +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; + +import org.schemata.schema.SchemataBuilder; + +public class ProtoProcessor { + + private static final Set INCLUDED_PRIMITIVE_TYPES = Set.of("google.protobuf.Timestamp"); + + public List
parse(List descriptors, Map fieldTableMap) { + return descriptors + .stream() + .filter(this::isGoogleDefaultProto) + .filter(this::isSchemataDefaultProto) + .map(descriptor -> parseSingleSchema(descriptor, fieldTableMap)) + .toList(); + } + + public Table parseSingleSchema(Descriptors.Descriptor descriptor, Map fieldTableMap) { + String schemaName = descriptor.getFullName(); + // Extract all the metadata for the fieldList + var fieldList = extractFields(descriptor.getFields(), schemaName, fieldTableMap); + return extractSchema(descriptor, schemaName, fieldList); + } + + public Table extractSchema(Descriptors.Descriptor descriptorType, String schema, List fieldList) { + var builder = Table.builder().name(descriptorType.getName()).nameSpace(schema.substring(0, schema.lastIndexOf("."))) + .clientId(SecurityUtil.getClientId()).fields(fieldList); + for (Map.Entry entry : descriptorType.getOptions().getAllFields().entrySet()) { + + switch (entry.getKey().getName()) { + case "message_core" -> { + SchemataBuilder.CoreMetadata coreMetadata = (SchemataBuilder.CoreMetadata) entry.getValue(); + builder.description(coreMetadata.getDescription()); + // builder.comment(coreMetadata.getComment()); + // builder.seeAlso(coreMetadata.getSeeAlso()); + // builder.reference(coreMetadata.getReference()); + } + case "owner" -> builder.owner(Objects.toString(entry.getValue(), null)); + case "domain" -> builder.domain(Objects.toString(entry.getValue(), null)); + case "schema_type" -> builder.schemaType(SchemaType.get(entry.getValue().toString())); + case "event_type" -> builder.eventType(EventType.get(entry.getValue().toString())); + case "status" -> builder.status(Objects.toString(entry.getValue(), null)); + // case "team_channel" -> builder.teamChannel(Objects.toString(entry.getValue(), "")); + // case "alert_channel" -> builder.alertChannel(Objects.toString(entry.getValue(), "")); + case "compliance_owner" -> builder.complianceOwner(Objects.toString(entry.getValue(), null)); + // case "compliance_channel" -> builder.complianceChannel(Objects.toString(entry.getValue(), "")); + case "channel" -> builder.channel(Objects.toString(entry.getValue(), null)); + case "email" -> builder.email(Objects.toString(entry.getValue(), null)); + case "quality_rule_base" -> builder.email(Objects.toString(entry.getValue(), null)); + // case "quality_rule_sql" -> builder.email(Objects.toString(entry.getValue(), null)); + // case "quality_rule_cel" -> builder.email(Objects.toString(entry.getValue(), null)); + case "subscribers" -> builder.subscribers(extractSubscribers(entry.getValue())); + } + } + return builder.build(); + } + + private String[] extractSubscribers(Object value) { + SchemataBuilder.Subscribers subscribers = (SchemataBuilder.Subscribers) value; + return subscribers.getNameList().stream().toList().toArray(new String[0]); + } + + public List extractFields(List fieldDescriptorList, String schema, Map fieldTableMap) { + List fields = new ArrayList<>(); + AtomicInteger rowNumber = new AtomicInteger(1); + for (Descriptors.FieldDescriptor entry : fieldDescriptorList) { + String type = entry.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ? entry.getMessageType().getFullName() + : entry.getType().name(); + var builder = Field.builder().name(entry.getName()).schema(schema).dataType(type).isPrimitiveType(isPrimitiveType(entry.getType(), type)); + for (Map.Entry fieldEntry : entry.getOptions().getAllFields().entrySet()) { + switch (fieldEntry.getKey().getName()) { + case "field_core" -> { + SchemataBuilder.CoreMetadata coreMetadata = (SchemataBuilder.CoreMetadata) fieldEntry.getValue(); + builder.description(coreMetadata.getDescription()); + // builder.comment(coreMetadata.getComment()); + // builder.seeAlso(coreMetadata.getSeeAlso()); + // builder.reference(coreMetadata.getReference()); + } + case "is_classified" -> builder.isClassified(Boolean.parseBoolean(fieldEntry.getValue().toString())); + case "is_pii" -> builder.isPii(Boolean.parseBoolean(fieldEntry.getValue().toString())); + case "depricated" -> builder.deprecated(Boolean.parseBoolean(fieldEntry.getValue().toString())); + // case "classification_level" -> builder.classificationLevel(Objects.toString(fieldEntry.getValue(), "")); + // case "product_type" -> builder.productType(Objects.toString(fieldEntry.getValue(), "")); + case "is_primary_key" -> builder.isPrimaryKey(Boolean.parseBoolean(fieldEntry.getValue().toString())); + } + } + var field = builder.rowNumber(rowNumber.getAndIncrement()).build(); + if (type == Descriptors.FieldDescriptor.Type.ENUM.name()) { + field.setEnumFilePath("main/schema/"+entry.getEnumType().getFile().getFullName()); + field.setEnumName(entry.getEnumType().getName()); + field.setEnumPackage(entry.getEnumType().getFile().getPackage()); + field.setSymbols(entry.getEnumType().getValues().stream().map(v -> v.getName()).toList().toArray(new String[0])); + } + if (field.getIsPrimitiveType().equals(Boolean.FALSE) && entry.getType() == Descriptors.FieldDescriptor.Type.MESSAGE) { + fieldTableMap.put(field, type); + } + fields.add(field); + } + return fields; + } + + private boolean isGoogleDefaultProto(Descriptors.Descriptor descriptor) { + return !descriptor.getFullName().startsWith("google.protobuf"); + } + private boolean isSchemataDefaultProto(Descriptors.Descriptor descriptor) { + return !descriptor.getFullName().startsWith("org.schemata"); + } + + private boolean isPrimitiveType(Descriptors.FieldDescriptor.Type type, String typeName) { + return type != Descriptors.FieldDescriptor.Type.MESSAGE || INCLUDED_PRIMITIVE_TYPES.contains(typeName); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchema.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchema.java new file mode 100644 index 0000000..f1711bc --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchema.java @@ -0,0 +1,215 @@ +package com.opsbeach.connect.schemata.processor.protobuf; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.service.ClientRepoService; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.FileNotFoundException; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.service.GoogleCloudService; +import com.opsbeach.sharedlib.utils.FileUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Component +@RequiredArgsConstructor +public class ProtoSchema { + + private final DomainNodeService domainNodeService; + private final DomainService domainService; + private final TableService tableService; + private final ModelService modelService; + private final SchemaFileAuditService schemaFileAuditService; + private final ResponseMessage responseMessage; + private final GoogleCloudService googleCloudService; + private final ClientRepoService clientRepoService; + private final ApplicationConfig applicationConfig; + + @Value("${server.home-path}") + private String homePath; + + // public List
getTables(String path, Boolean toSave) throws IOException, DescriptorValidationException { + // var loader = new ProtoFileDescriptorSetLoader(new FileInputStream(new File(path))); + // var descriptors = loader.loadDescriptors(); + // return new ProtoProcessor().parse(descriptors, new HashMap<>()); + // } + + public Map generateSchema(Long tableId) { + var fileGenerator = new ProtobufFileGenerator(schemaFileAuditService, modelService, tableService); + return fileGenerator.getSchema(tableId); + } + + public void parseFolder(String folderPath, ClientRepo clientRepo) { + var filePaths = FileUtil.deepSearchFiles(folderPath, ".proto"); + log.info("creating domain in neo4j"); + var domainNode = domainNodeService.addDomainNode(clientRepo.getFullName(), clientRepo.getClientId(), clientRepo.getId()); + var domain = domainService.addDomain(clientRepo, domainNode.getId()); + domainNode.setTables(new ArrayList<>()); + Map fieldTableMap = new HashMap<>(); + filePaths.forEach(filePath -> { + if (filePath.contains("/src/org/schemata/protobuf/")) return; + log.info("Working on fetching Models from file : "+filePath); + var tables = getTableFromProtoFileByPath(filePath, folderPath, fieldTableMap); + if (tables != null) { + tables = tableService.addTables(tables); + var schemaFileAudit = schemaFileAuditService.createSchemaFileAuditWhileInitialLoading(filePath, clientRepo, tables.get(tables.size()-1).getId()); + modelService.createModels(tables, schemaFileAudit, domain); + domainNode.getTables().addAll(tables); + } + }); + mergeFieldAndTable(fieldTableMap); + log.info("Fetching Models from Files is completed"); + domainNodeService.update(domainNode); + } + + private List
getTableFromProtoFileByPath(String filePath, String repoFolderPath, Map fieldTableMap) { + try { + var schemataProtoFilesPath = StringUtil.constructStringEmptySeparator(repoFolderPath, "/", new File(repoFolderPath).list()[0], "/src/org"); + var rootFolderPath = StringUtil.constructStringEmptySeparator(repoFolderPath, "/", new File(repoFolderPath).list()[0], "/src/main/schema"); + var descriptorFilePath = generateDescriptorFileForProtoFile(filePath, repoFolderPath, schemataProtoFilesPath, rootFolderPath); + var loader = new ProtoFileDescriptorSetLoader(new FileInputStream(new File(descriptorFilePath))); + var fileDescriptors = loader.loadFileDescriptors(); + var fileName = filePath.replace(rootFolderPath+"/", ""); + // FileUtil.deleteFile(descriptorFilePath); + var descriptors = fileDescriptors.get(fileName).getMessageTypes().stream().toList(); + descriptors = loader.collectAllNestedMessageDescriptors(descriptors); + return new ProtoProcessor().parse(descriptors, fieldTableMap); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.FILE_NOT_FOUND, e.getMessage())); + } catch (Exception e) { + log.info("Error occured while parsing file: "+filePath); + throw new SchemaParserException(e.getMessage()); + } + } + + private void mergeFieldAndTable(Map fieldTableMap) { + for (Map.Entry entry : fieldTableMap.entrySet()) { + var fullname = entry.getValue(); + var nameSpace = fullname.substring(0, fullname.lastIndexOf(".")); + var name = fullname.substring(fullname.lastIndexOf(".")+1); + var table = tableService.findByNameAndNameSpace(name, nameSpace); + var field = entry.getKey(); + field.setContain(table); + tableService.addField(field); + } + } + + // private String generateDescriptorFileForFolder(String folderPath) { + // var filePaths = FileUtil.deepSearchFiles(folderPath, ".proto"); + // Set protoPaths = new HashSet<>(); + // filePaths.forEach(path -> { + // protoPaths.add(path.substring(0, path.lastIndexOf("/"))); + // }); + // StringBuilder protoComand = new StringBuilder("protoc"); + // protoPaths.forEach(pp -> protoComand.append(" -I=".concat(pp))); + // protoComand.append(" --descriptor_set_out=model.desc --include_imports --include_source_info "); + // // protoComand.append(folderPath.concat("/**/*.proto ")); + // protoPaths.forEach(fp -> protoComand.append(fp+"/*.proto ")); + // protoComand.append("--experimental_allow_proto3_optional"); + // System.out.println(protoComand.toString()); + // executeProtocCmd(protoComand.toString(), folderPath); + // return folderPath+"/model.desc"; + // } + + private String generateDescriptorFileForProtoFile(String filePath, String repoFolderPath, + String schemataProtoFilesPath, String rootFolderPath) throws IOException, InterruptedException { + var folderPath = filePath.substring(0, filePath.lastIndexOf("/")); + var descriptorFileName = filePath.substring(filePath.lastIndexOf("/")+1, filePath.lastIndexOf(".")); + StringBuilder protoComand = new StringBuilder("protoc -I="); + protoComand.append(schemataProtoFilesPath); + protoComand.append(" -I=".concat(rootFolderPath)); + protoComand.append(" --descriptor_set_out=").append(descriptorFileName).append(".desc --include_imports --include_source_info "); + protoComand.append(filePath).append(" --experimental_allow_proto3_optional"); + System.out.println(protoComand.toString()); + executeProtocCmd(protoComand.toString(), folderPath); + return StringUtil.constructStringEmptySeparator(folderPath, "/", descriptorFileName, ".desc"); + } + + private void executeProtocCmd(String cmd, String cmdExecutionPath) throws IOException, InterruptedException { + System.out.println(System.getProperty("os.name")); + ProcessBuilder builder = new ProcessBuilder(); + builder.command("sh", "-c", cmd); + builder.directory(new File(cmdExecutionPath)); + Process process = builder.start(); + + // Read the output of the command if needed + BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); + String line; + while ((line = reader.readLine()) != null) { + System.out.println(line); + } + // Wait for the process to finish + int exitCode = process.waitFor(); + System.out.println("Command completed with exit code "+exitCode); + } + + public Map> getTablesOfFilePaths(String[] filePaths, ClientRepo clientRepo, String branchName) { + var clientDto = clientRepoService.getClient(); + String objectName = StringUtil.constructStringEmptySeparator(clientDto.getName(), "/", clientRepo.getFullName(), "/", branchName); + var repoFolderPath = StringUtil.constructStringEmptySeparator(homePath, clientRepo.getFullName(), "-delta"); + new File(repoFolderPath).mkdirs(); + pullFilesFromBucket(objectName, repoFolderPath); + Map filteredFilePaths = filterFilePaths(FileUtil.deepSearchFiles(repoFolderPath, ".proto"), filePaths); + Map> fileTables = new HashMap<>(); + for (Map.Entry entry : filteredFilePaths.entrySet()) { + var filePath = entry.getValue(); + if (filePath.contains("/src/org/schemata/protobuf/")) continue; + log.info("Working on fetching Models from file : "+filePath); + var tables = getTableFromProtoFileByPath(filePath, repoFolderPath, new HashMap<>()); + fileTables.put(entry.getKey(), tables); + } + FileUtil.deleteDirectory(repoFolderPath); + return fileTables; + } + + private Map filterFilePaths(List allFilePaths, String[] changesFilePaths) { + Map filePaths = new HashMap<>(); + AtomicInteger flag = new AtomicInteger(0); + for (String filePath : allFilePaths) { + if (filePath.endsWith(changesFilePaths[flag.get()])) { + /* + * Here Key value contains absolute path of file where as value contains exactPath of folder from root folder. + */ + filePaths.put(changesFilePaths[flag.getAndIncrement()], filePath); + } + if (flag.get() > (changesFilePaths.length - 1)) break; + } + return filePaths; + } + + private boolean pullFilesFromBucket(String objectName, String repoFolderPath) { + String destFilePath = repoFolderPath + "/repo.tar.gz"; + log.info("pulling file from bucket"); + googleCloudService.downloadFile(applicationConfig.getGcloud().get("repo-bucket"), objectName, destFilePath); + log.info("UnZip the downloaded tar.gz file"); + FileUtil.uncompressTarGZ(repoFolderPath, destFilePath); + FileUtil.deleteFile(destFilePath); + return true; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtobufFileGenerator.java b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtobufFileGenerator.java new file mode 100644 index 0000000..bb7461a --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/processor/protobuf/ProtobufFileGenerator.java @@ -0,0 +1,173 @@ +package com.opsbeach.connect.schemata.processor.protobuf; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class ProtobufFileGenerator { + + private final SchemaFileAuditService schemaFileAuditService; + private final ModelService modelService; + private final TableService tableService; + + public Map getSchema(Long tableId) { + var schemaFileAudit = schemaFileAuditService.getByModelNodeId(tableId).get(0); // protobuf schema contain only one file for one table. + var models = modelService.findBySchemaFileAudit(schemaFileAudit.getId()); + var tableIds = models.stream().map(Model::getNodeId).toList(); + var tables = tableService.findAllById(tableIds); + Set imports = new HashSet<>(); + imports.add("\"schemata/protobuf/schemata.proto\""); + List messages = new ArrayList<>(tableIds.size()); + Map enums = new HashMap<>(); + tables.forEach(table -> { + System.out.println("processing table "+table.getName()); + table = schemaFileAuditService.filterFields(table); + createMessage(table, imports, schemaFileAudit.getPath(), messages, enums); + }); + StringBuilder content = new StringBuilder("syntax = \"proto3\";\n"); + content.append("package ").append(tables.get(0).getNameSpace()).append(";\n\n"); + imports.forEach(i -> content.append("import ").append(i).append(";\n")); + content.append("\n"); + enums.values().forEach(message -> content.append(message).append("\n")); + messages.forEach(message -> content.append(message).append("\n")); + System.out.println(content.toString()); + return Map.of(schemaFileAudit, content.toString()); + } + + private void createMessage(Table table, Set imports, String filePath, List messages, Map enums) { + StringBuilder message = new StringBuilder("message "); + message.append(table.getName()).append(" {\n"); + message.append(" option(org.schemata.schema.message_core).description = \"").append(table.getDescription()).append("\";\n"); + message.append(" option(org.schemata.schema.owner) = \"").append(table.getOwner()).append("\";\n"); + message.append(" option(org.schemata.schema.domain) = \"").append(table.getDomain()).append("\";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getSchemaType()))) + message.append(" option(org.schemata.schema.schema_type) = ").append(table.getSchemaType()).append(";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getChannel()))) + message.append(" option(org.schemata.schema.channel) = \"").append(table.getChannel()).append("\";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getEmail()))) + message.append(" option(org.schemata.schema.email) = \"").append(table.getEmail()).append("\";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getStatus()))) + message.append(" option(org.schemata.schema.status) = \"").append(table.getStatus()).append("\";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getComplianceOwner()))) + message.append(" option(org.schemata.schema.compliance_owner) = \"").append(table.getComplianceOwner()).append("\";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleBase()))) + message.append(" option(org.schemata.schema.quality_rule_base) = \"").append(table.getQualityRuleBase()).append("\";\n"); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleSql()))) + // message.append(" option(org.schemata.schema.quality_rule_sql) = \"").append(table.getQualityRuleSql()).append("\";\n"); + // if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getQualityRuleCel()))) + // message.append(" option(org.schemata.schema.quality_rule_cel) = \"").append(table.getQualityRuleCel()).append("\";\n"); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(table.getSubscribers()))) { + message.append(" option(org.schemata.schema.subscribers) = {\n"); + int n = table.getSubscribers().length; + for (int i=0; i dataType.toLowerCase(); + default -> dataType; + }; + } + + private boolean isPrimitiveType(String dataType) { + return switch (dataType) { + case "double", "float", "int32", "int64", "uint32", + "uint64", "sint32", "sint64", "fixed32", "fixed64", "sfixed32", "sfixed64", "bool", "string", "bytes" -> true; + default -> false; + }; + } + + private String addImportPath(Set imports, Field field, String filePath, Map enums) { + var dataType = getDataType(field.getDataType()); + var isSchemataType = field.getDataType().startsWith("google.protobuf") || field.getDataType().startsWith("org.schemata"); + System.out.println(dataType+" "+isSchemataType); + if (isPrimitiveType(dataType) == false && !field.getDataType().equals("ENUM") && !isSchemataType) { + var fullName = field.getDataType(); + var nameSpace = fullName.substring(0, fullName.lastIndexOf(".")); + var name = fullName.substring(fullName.lastIndexOf(".")+1); + var schemaFileAudits = schemaFileAuditService.getByModelNodeId(tableService.findByNameAndNameSpace(name, nameSpace).getId()); + if (filePath.equals(schemaFileAudits.get(0).getPath())) return name; + imports.add(getImportPath(filePath, schemaFileAudits.get(0).getPath())); + return name; + } + if (dataType.equals("ENUM")) { + dataType = field.getEnumPackage()+"."+field.getEnumName(); + if (field.getEnumFilePath().contains("schemata/protobuf/schemata.proto") || + field.getEnumFilePath().contains("google/protobuf/")) return dataType; + var index = filePath.indexOf("/main/schema/"); + System.out.println(filePath+" index "+index); + var path = filePath.substring(index+1); + if (path.equals(field.getEnumFilePath())) { + if (Boolean.FALSE.equals(enums.containsKey(field.getEnumName()))) { + StringBuilder message = new StringBuilder("enum "); + message.append(field.getEnumName()).append(" {\n"); + int n = field.getSymbols().length; + for (int i=0; i referenceFilePath.length()) + n = workingFilePath.length(); + else + n = referenceFilePath.length(); + int i = 0; + int j = 0; + while (i < n) { + if (workingFilePath.charAt(i) == referenceFilePath.charAt(i)) { + if (workingFilePath.charAt(i) == '/') j = ++i; + else i++; + } + else + break; + } + return '"'+referenceFilePath.substring(j)+'"'; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/repository/DomainNodeRepository.java b/connect/src/main/java/com/opsbeach/connect/schemata/repository/DomainNodeRepository.java new file mode 100644 index 0000000..2cb3536 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/repository/DomainNodeRepository.java @@ -0,0 +1,18 @@ +package com.opsbeach.connect.schemata.repository; + +import org.springframework.data.neo4j.repository.Neo4jRepository; +import org.springframework.data.neo4j.repository.query.Query; +import org.springframework.data.repository.query.Param; + +import com.opsbeach.connect.schemata.entity.DomainNode; + +public interface DomainNodeRepository extends Neo4jRepository { + + @Query(""" + MATCH (n:Organization) MATCH (m:DomainNode) WHERE ID(n) = $organizationId AND ID(m) = $domainId + CREATE (n)-[r:HAS]->(m) RETURN ID(r) + """) + Long createOrganizationDomainRelationship(@Param("organizationId") Long organizationId, @Param("domainId") Long domainId); + + void deleteByClientRepoId(Long clientRepoId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/repository/FieldRepostory.java b/connect/src/main/java/com/opsbeach/connect/schemata/repository/FieldRepostory.java new file mode 100644 index 0000000..d392594 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/repository/FieldRepostory.java @@ -0,0 +1,17 @@ +package com.opsbeach.connect.schemata.repository; + +import java.util.List; + +import org.springframework.data.neo4j.repository.Neo4jRepository; +import org.springframework.data.neo4j.repository.query.Query; +import org.springframework.data.repository.query.Param; + +import com.opsbeach.connect.schemata.entity.Field; + +public interface FieldRepostory extends Neo4jRepository { + + List findByPrId(Long prId); + + @Query(value = "MATCH (n:Field)<-[r]-(m:Table) where ID(n)=$id RETURN ID(m)") + Long getTableIdOfField(@Param("id") Long id); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/repository/OrganizationRepository.java b/connect/src/main/java/com/opsbeach/connect/schemata/repository/OrganizationRepository.java new file mode 100644 index 0000000..d354575 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/repository/OrganizationRepository.java @@ -0,0 +1,10 @@ +package com.opsbeach.connect.schemata.repository; + +import org.springframework.data.neo4j.repository.Neo4jRepository; + +import com.opsbeach.connect.schemata.entity.Organization; + +public interface OrganizationRepository extends Neo4jRepository { + + Organization findByClinetId(Long clinetId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepository.java b/connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepository.java new file mode 100644 index 0000000..a99d3c1 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepository.java @@ -0,0 +1,10 @@ +package com.opsbeach.connect.schemata.repository; + +import java.util.List; + +import com.opsbeach.connect.schemata.dto.RedshiftDto; + +// this repo is to fetch schema structure of postgres +public interface SchemaRepository { + List getSchemaByName(String schemaName); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepositoryImpl.java b/connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepositoryImpl.java new file mode 100644 index 0000000..864c209 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/repository/SchemaRepositoryImpl.java @@ -0,0 +1,33 @@ +package com.opsbeach.connect.schemata.repository; + +import java.util.ArrayList; +import java.util.List; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import org.springframework.stereotype.Repository; + +import com.opsbeach.connect.schemata.dto.RedshiftDto; + +// temporary repository to fetch schema from postgres +@Repository +public class SchemaRepositoryImpl implements SchemaRepository { + + @PersistenceContext + private EntityManager entityManager; + + // get schema by schema name + @SuppressWarnings("unchecked") + @Override + public List getSchemaByName(String schemaName) { + List schemaDtos = new ArrayList<>(); + var schemaDto = new RedshiftDto(); + var query = entityManager.createNativeQuery("select * from information_schema.columns where table_schema = ?"); + query.setParameter(1, schemaName); + List list = query.getResultList(); + for (Object[] obj : list) { + schemaDtos.add(schemaDto.toDto(obj)); + } + return schemaDtos; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/repository/TableRepository.java b/connect/src/main/java/com/opsbeach/connect/schemata/repository/TableRepository.java new file mode 100644 index 0000000..1535862 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/repository/TableRepository.java @@ -0,0 +1,55 @@ +package com.opsbeach.connect.schemata.repository; + +import java.util.List; +import java.util.Set; + +import org.springframework.data.neo4j.repository.Neo4jRepository; +import org.springframework.data.neo4j.repository.query.Query; +import org.springframework.data.repository.query.Param; + +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; + +public interface TableRepository extends Neo4jRepository { + + List
findBySchemaTypeAndClientId(SchemaType schemaType, Long clientId); + + Table findByNameAndNameSpaceAndClientId(String name, String nameSpace, Long clientId); + + List
findByPrId(Long prId); + + @Query(""" + MATCH path = (startNode)<-[*]-(endNode:Table) + WHERE ID(startNode) = $tableId + WITH NODES(path) AS nodes + UNWIND nodes AS n + WITH n + WHERE 'Table' IN LABELS(n) + RETURN distinct(ID(n)) as ids + """) + Set getTableIdsConnectedToTable(@Param("tableId") Long tableId); + + @Query(""" + MATCH (a:DomainNode) MATCH (b) WHERE a.clientRepoId = $clientRepoId AND ID(b) in $tableIds + MERGE (a)-[r:CONTAIN]->(b) RETURN type(r) + """) + List createTableDomainRelationShip(@Param("clientRepoId") Long clientRepoId, @Param("tableIds") List tableIds); + + @Query(""" + MATCH (a:Table) OPTIONAL MATCH (a)-[r]->(b:Table) WITH a,b, CASE WHEN b IS NULL THEN a ELSE b END AS n + WHERE a.clientId = $clientId and n.domain IS NOT NULL RETURN distinct(n.domain) + """) + List getAllDomain(@Param("clientId") Long clientId); + + @Query(""" + MATCH (a:Table) OPTIONAL MATCH (a)-[r]->(b:Table) WITH a,b, CASE WHEN b IS NULL THEN a ELSE b END AS n + WHERE a.clientId = $clientId and n.subscribers IS NOT NULL RETURN distinct(n.subscribers) + """) + List getAllSubscribers(@Param("clientId") Long clientId); + + @Query(""" + MATCH (a:Table) OPTIONAL MATCH (a)-[r]->(b:Table) WITH a,b, CASE WHEN b IS NULL THEN a ELSE b END AS n + WHERE a.clientId = $clientId and n.owner IS NOT NULL RETURN distinct(n.owner) + """) + List getAllOwner(@Param("clientId") Long clientId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/service/DomainNodeService.java b/connect/src/main/java/com/opsbeach/connect/schemata/service/DomainNodeService.java new file mode 100644 index 0000000..8f92ead --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/service/DomainNodeService.java @@ -0,0 +1,78 @@ +package com.opsbeach.connect.schemata.service; + +import java.util.List; + +import org.springframework.stereotype.Service; + +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.schemata.dto.TableDto; +import com.opsbeach.connect.schemata.dto.SchemaVisualizerDto; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.graph.SchemaGraph; +import com.opsbeach.connect.schemata.repository.DomainNodeRepository; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class DomainNodeService { + + private final DomainNodeRepository domainNodeRepository; + + private final ResponseMessage responseMessage; + + private final OrganizationService organizationService; + + private final TableService tableService; + + public DomainNode add(DomainNode domain, Long orgId) { + domainNodeRepository.save(domain); + domainNodeRepository.createOrganizationDomainRelationship(orgId, domain.getId()); + return domain; + } + + public DomainNode get(Long id) { + return domainNodeRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.DOMIN))); + } + + public DomainNode update(DomainNode domain) { + get(domain.getId()); + return domainNodeRepository.save(domain); + } + + public List getAll(Long orgId) { + var organization = organizationService.get(orgId); + return organization.getDomains(); + } + + public List getTablesByDominId(Long dominId) { + var tables = get(dominId).getTables(); + return tables.isEmpty() ? List.of() : tables.stream().map(tables.get(0)::toDto).toList(); + } + + public double getSchemaScore(Long domainId, String name) { + var tables = get(domainId).getTables(); + var graph = new SchemaGraph(tables); + return graph.getSchemataScore(name); + } + + public SchemaVisualizerDto getSchemaVisualizerByDomain(Long domainId) { + var tables = get(domainId).getTables(); + return tableService.buildSchemaVisualizerDto(tables); + } + + public DomainNode addDomainNode(String name, Long clientId, Long clientRepoId) { + var organization = organizationService.getByClientId(clientId); + var domainNode = DomainNode.builder().name(name).clientId(clientId).clientRepoId(clientRepoId).build(); + domainNodeRepository.save(domainNode); + domainNodeRepository.createOrganizationDomainRelationship(organization.getId(), domainNode.getId()); + return domainNode; + } + + public void deleteByClientRepoId(Long clientRepoId) { + domainNodeRepository.deleteByClientRepoId(clientRepoId); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/service/OrganizationService.java b/connect/src/main/java/com/opsbeach/connect/schemata/service/OrganizationService.java new file mode 100644 index 0000000..ff09f14 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/service/OrganizationService.java @@ -0,0 +1,56 @@ +package com.opsbeach.connect.schemata.service; + +import java.util.List; + +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.schemata.entity.Organization; +import com.opsbeach.connect.schemata.repository.OrganizationRepository; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class OrganizationService { + + private final OrganizationRepository organizationRepository; + + private final ResponseMessage responseMessage; + + public Organization add(Organization organization) { + return organizationRepository.save(organization); + } + + public Organization add(Long clientId, String clientName) { + var organization = getByClientId(clientId); + if (ObjectUtils.isEmpty(organization)) { + organization = Organization.builder().name(clientName) + .clinetId(clientId) + .build(); + return add(organization); + } + return organization; + } + + public Organization get(Long id) { + return organizationRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.ORGANIZATION))); + } + + public Organization getByClientId(Long clientId) { + return organizationRepository.findByClinetId(clientId); + } + + public Organization update(Organization organization) { + get(organization.getId()); + return add(organization); + } + + public List getAll() { + return organizationRepository.findAll(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/service/RedshiftService.java b/connect/src/main/java/com/opsbeach/connect/schemata/service/RedshiftService.java new file mode 100644 index 0000000..72b057e --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/service/RedshiftService.java @@ -0,0 +1,81 @@ +package com.opsbeach.connect.schemata.service; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.schemata.dto.RedshiftDto; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.repository.SchemaRepository; + +import lombok.RequiredArgsConstructor; + +@Service +@RequiredArgsConstructor +public class RedshiftService { + + // to fetch shema from our postgres db. + private final SchemaRepository schemaRepository; + + private final TableService tableService; + + private final DomainNodeService domainNodeService; + + static final List PRIMITIVE = List.of("integer", "bigint", "text", + "float4", "float8", "varchar", + "character varying", "bool", "boolean", + "bytea", "char", "name", "numeric", + "int8", "int2", "int2vector", "int4"); + + // get schema structure from redshift and save in neo4j + public DomainNode getSchema(Long domainId) { + + var domain = domainNodeService.get(domainId); + + // this line should replace by redshift api, to fetch schema. + var redshiftDtos = schemaRepository.getSchemaByName(domain.getName()); + + Map tableMap = new HashMap<>(); + redshiftDtos.forEach(dto -> { + var table = tableMap.get(dto.getTableName()); + if (table == null) { + table = Table.builder().fields(List.of(createField(dto))).schemaType(SchemaType.ENTITY).name(dto.getTableName().toString()).build(); + } else { + List fields = new ArrayList<>(); + fields.addAll(table.getFields()); + fields.add(createField(dto)); + table.setFields(fields); + } + tableMap.put(dto.getTableName().toString(), table); + }); + + var tables = tableService.addTables(List.copyOf(tableMap.values())); + if (!ObjectUtils.isEmpty(domain.getTables())) { + tables.addAll(domain.getTables()); + } + domain.setTables(tables); + domain = domainNodeService.update(domain); + return domain; + } + + private Field createField(RedshiftDto redshiftDto) { + return Field.builder().schema(redshiftDto.getTableSchema().toString()) + .name(redshiftDto.getColumnName().toString()) + .dataType(redshiftDto.getDataType().toString()) + .defaultValue(cheackNull(redshiftDto.getColumnDefault())) + // .isNullable(redshiftDto.getIsNullable().toString()) + .isPrimitiveType(PRIMITIVE.contains(redshiftDto.getDataType().toString())) + .build(); + } + + private String cheackNull(Object object) { + return object == null ? null : object.toString(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/service/TableService.java b/connect/src/main/java/com/opsbeach/connect/schemata/service/TableService.java new file mode 100644 index 0000000..aaa877e --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/service/TableService.java @@ -0,0 +1,848 @@ +package com.opsbeach.connect.schemata.service; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.neo4j.driver.internal.InternalNode; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; +import org.springframework.data.domain.Pageable; +import org.springframework.data.neo4j.core.Neo4jClient; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.ObjectUtils; +import org.springframework.web.multipart.MultipartFile; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.service.ClientRepoService; +import com.opsbeach.connect.github.service.GitHubService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.dto.FieldDto; +import com.opsbeach.connect.schemata.dto.SchemaValidationDto; +import com.opsbeach.connect.schemata.dto.TableDto; +import com.opsbeach.connect.schemata.dto.TableFilterOptionsDto; +import com.opsbeach.connect.schemata.dto.SchemaVisualizerDto; +import com.opsbeach.connect.schemata.dto.TableCsvDto; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.graph.SchemaGraph; +import com.opsbeach.connect.schemata.repository.FieldRepostory; +import com.opsbeach.connect.schemata.repository.TableRepository; +import com.opsbeach.connect.schemata.validate.SchemaValidator; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.utils.FileUtil; +import com.opsbeach.sharedlib.utils.JsonUtil; +import com.opsbeach.sharedlib.utils.StringUtil; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Service +@RequiredArgsConstructor +public class TableService { + + private final TableRepository tableRepository; + private final FieldRepostory fieldRepostory; + private final ResponseMessage responseMessage; + private final ModelService modelService; + private final SchemaValidator schemaValidator; + private final Neo4jClient neo4jClient; + private final ClientRepoService clientRepoService; + + @Lazy + @Autowired + private GitHubService gitHubService; + + @Lazy @Autowired + private SchemaFileAuditService schemaFileAuditService; + @Value("${github.construct-file-path}") + private String githubFilePath; + private final ObjectMapper objectMapper = new ObjectMapper(); + + @Transactional + public Table addTable(Table table) { + return tableRepository.save(table); + } + + @Transactional + public List
addTables(List
tables) { + return tableRepository.saveAll(tables); + } + + public List
getAll() { + var ids = modelService.getNodeIds(); + return findAllById(ids); + } + + public List
findAllById(List ids) { + return tableRepository.findAllById(ids); + } + + public void deleteByIds(List ids) { + var tables = tableRepository.findAllById(ids); + List fieldIds = new ArrayList<>(); + tables.forEach(table -> { + table.getFields().forEach(field -> { + fieldIds.add(field.getId()); + if (Objects.nonNull(field.getArrayField())) fieldIds.add(field.getArrayField().getId()); + else if (Objects.nonNull(field.getMapField())) fieldIds.add(field.getMapField().getId()); + else if (Objects.nonNull(field.getUnionTypes())) fieldIds.addAll(field.getUnionTypes().stream().map(Field::getId).toList()); + }); + }); + fieldRepostory.deleteAllById(fieldIds); + tableRepository.deleteAllById(ids); + } + + public Table findByNameAndNameSpace(String name, String nameSpace) { + return tableRepository.findByNameAndNameSpaceAndClientId(name, nameSpace, SecurityUtil.getClientId()); + } + + public TableFilterOptionsDto getTableFilterOptions() { + var clientId = SecurityUtil.getClientId(); + var owners = tableRepository.getAllOwner(clientId); + var domains = tableRepository.getAllDomain(clientId); + var subscriberArrs = tableRepository.getAllSubscribers(clientId); + Set subscribers = new HashSet<>(); + subscriberArrs.stream().forEach(subscriber -> { + var subs = JsonUtil.jsonArrayToObjectList(String.valueOf(subscriber), String.class); + subs.forEach(sub -> subscribers.add(sub)); + }); + return new TableFilterOptionsDto(owners, domains, subscribers); + } + + private StringBuilder generateQueryForGetTables(List owners, List domains, List subscribers, Map params) { + boolean isOwnerEmpty = ObjectUtils.isEmpty(owners); + boolean isDomainEmpty = ObjectUtils.isEmpty(domains); + boolean isSubscribersEmpty = ObjectUtils.isEmpty(subscribers); + var query = new StringBuilder(""" + MATCH (a:Table) + OPTIONAL MATCH (a)-[]->(b:Table) + WITH a, b, CASE WHEN b IS NULL THEN a ELSE b END AS selectedNode + WHERE a.clientId = $clientId + """); + params.put("clientId", SecurityUtil.getClientId()); + if (!isOwnerEmpty) { + query.append(" AND selectedNode.owner IN $owners"); + params.put("owners", owners); + if (owners.contains("default")) + query.append(" OR selectedNode.owner IS NULL"); + } + if (!isDomainEmpty) { + query.append(" AND selectedNode.domain IN $domains"); + params.put("domains", domains); + if (domains.contains("default")) + query.append(" OR selectedNode.domain IS NULL"); + } + if (!isSubscribersEmpty) { + query.append(" AND any(subscribers IN selectedNode.subscribers WHERE subscribers IN $subscribers)"); + params.put("subscribers", subscribers); + if (subscribers.contains("default")) + query.append(" OR selectedNode.subscribers IS NULL"); + } + return query; + } + public JsonNode getAll(List owners, List domains, List subscribers, Pageable pageable) { + Map tables = new HashMap<>(); + Map params = new HashMap<>(); + var query = generateQueryForGetTables(owners, domains, subscribers, params); + var getTablesQuery = query.toString(); + var offset = pageable.getPageNumber()*pageable.getPageSize(); + var limit = pageable.getPageSize(); + getTablesQuery = StringUtil.constructStringEmptySeparator(getTablesQuery, " RETURN a,b SKIP "+offset+" LIMIT "+limit); + var countQuery = query.append(" RETURN count(a) as count").toString(); + neo4jClient.query(getTablesQuery).bindAll(params).fetch().all().stream().forEach(map -> { + var value = (InternalNode) map.get("a"); + var table = objectMapper.convertValue(value.asMap(), Table.class); + // table.setId(Long.parseLong(value.elementId())); + // Newly installed neo4j stores id in different format (Need to test in prod) + table.setId(Long.parseLong(value.elementId().split(":")[2])); + value = (InternalNode) map.get("b"); + if (value != null) { + var modifiedTable = objectMapper.convertValue(value.asMap(), Table.class); + modifiedTable.setId(Long.parseLong(value.elementId().split(":")[2])); + table.setModifiedTable(modifiedTable); + } + tables.put(table.getId(), table); + }); + AtomicLong total = new AtomicLong(); + neo4jClient.query(countQuery).bindAll(params).fetch().all().forEach(map -> total.set((long) map.get("count"))); + + Map isFieldChangedMap = checkIsFieldChanged(tables.keySet().toArray(new Long[0])); + List tableDtos = new ArrayList<>(pageable.getPageSize()); + tables.values().forEach(table -> { + table.setFields(null); + if (Objects.nonNull(table.getModifiedTable())) { + transferTableProps(table, table.getModifiedTable()); + table.setModifiedTable(null); + } + var tableDto = table.toDto(table); + if (isFieldChangedMap.containsKey(table.getId())) tableDto.setIsFieldChanged(isFieldChangedMap.get(table.getId())); + tableDtos.add(tableDto); + }); + var payload = JsonNodeFactory.instance.objectNode(); + payload.set("tables", objectMapper.convertValue(tableDtos, JsonNode.class)); + payload.put("total", total.get()); + return payload; + } + + public Map checkIsFieldChanged(Long[] tableIds) { + Map isFieldChangedMap = new HashMap<>(); + Map params = new HashMap<>(); + params.put("tableIds", tableIds); + var isFieldChangedQuery = "MATCH (a:Table)-[r:PROPERTIES]->(b:Field) WHERE ID(a) IN $tableIds AND b.prId IS NOT NULL RETURN distinct(ID(a)) AS tableId, true AS isFieldChanged"; + neo4jClient.query(isFieldChangedQuery).bindAll(params).fetch().all().stream().forEach(map -> { + isFieldChangedMap.put((long) map.get("tableId"), (boolean) map.get("isFieldChanged")); + }); + return isFieldChangedMap; + } + + public TableDto get(Long id) { + var table = getModel(id); + if (!ObjectUtils.isEmpty(table.getModifiedTable())) { + // if the table content is changed, then transfer the latest content. + transferTableProps(table, table.getModifiedTable()); + table.setModifiedTable(null); + } + var fields = new ArrayList(); + var updatedFieldNames = table.getFields().stream().filter(f -> (f.getIsDeleted().equals(Boolean.TRUE))).map(Field::getName).toList(); + table.getFields().stream().filter(f -> (f.getIsDeleted().equals(Boolean.FALSE))) + .forEach(field -> { + if (field.getIsUserChanged().equals(Boolean.TRUE) && updatedFieldNames.contains(field.getName())) { + // if the field is updated field then the above condition will true. + field.setIsDeleted(Boolean.TRUE); // to show that the difference between updated and new field in UI. + } + fields.add(field); + }); + table.setFields(fields); + var tableDto = table.toDto(table); + var clientRepo = modelService.getByNodeId(tableDto.getId()); + tableDto.setDataTypes(fieldDataTypes(clientRepo.getRepoType())); + tableDto.setClientRepoId(clientRepo.getId()); + return tableDto; + } + + public Table getModel(Long id) { + return tableRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.TABLE))); + } + + public Field getFieldModel(Long id) { + return fieldRepostory.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.FIELD))); + } + + public SchemaVisualizerDto getSchemaVisualizerForAll() { + return buildSchemaVisualizerDto(getAll()); + } + + public SchemaVisualizerDto getSchemaVisualizer(Long tableId) { + Set tableIds = tableRepository.getTableIdsConnectedToTable(tableId); + tableIds.add(tableId); + var tables = tableRepository.findAllById(tableIds); + return buildSchemaVisualizerDto(tables); + } + + public SchemaVisualizerDto buildSchemaVisualizerDto(List
tables) { + List tableDtos = new ArrayList<>(); + List fieldDtos = new ArrayList<>(); + List> links = new ArrayList<>(); + List addedTables = new ArrayList<>(); + tables.stream().forEach(table -> { + if (Boolean.FALSE.equals(addedTables.contains(table.getId()))) { + var schemaVisualizerDto = buildSchemaVisualizerDto(table, addedTables); + tableDtos.addAll(schemaVisualizerDto.getTables()); + fieldDtos.addAll(schemaVisualizerDto.getFields()); + links.addAll(schemaVisualizerDto.getLinks()); + addedTables.add(table.getId()); + } + }); + return SchemaVisualizerDto.builder().tables(tableDtos).fields(fieldDtos).links(links).build(); + } + + private SchemaVisualizerDto buildSchemaVisualizerDto(Table table, List addedTables) { + var fields = table.getFields(); + table.setFields(null); + List tableDtos = new LinkedList<>(); + List fieldDtos = new LinkedList<>(); + if (table.getIsUserChanged().equals(Boolean.TRUE) && table.getIsDeleted().equals(Boolean.TRUE)){ + transferTableProps(table, table.getModifiedTable()); + table.setModifiedTable(null); + } + tableDtos.add(table.toDto(table)); + List> links = new LinkedList<>(); + var updatedFieldNames = fields.stream().filter(f -> (f.getIsDeleted().equals(Boolean.TRUE))).map(Field::getName).toList(); + fields.stream().filter(f -> (f.getIsDeleted().equals(Boolean.FALSE))) + .forEach(field -> { + if (field.getIsUserChanged().equals(Boolean.TRUE) && updatedFieldNames.contains(field.getName())) { + // if the field is updated field then the above condition will true. + field.setIsDeleted(Boolean.TRUE); // to show that the difference between updated and new field in UI. + } + createLink(links, table.getId(), field.getId()); + buildSchemaVisualizerDto(field, fieldDtos, tableDtos, links, addedTables); + }); + return SchemaVisualizerDto.builder().tables(tableDtos).fields(fieldDtos).links(links).build(); + } + + private void createLink(List> links, Long sourceId, long targetId) { + links.add(Map.of("source", sourceId, "target", targetId)); + } + + private void buildSchemaVisualizerDto(Field field, List fieldDtos, List tableDtos, + List> links, List addedTables) { + + if (!ObjectUtils.isEmpty(field.getContain())) { + var table = field.getContain(); field.setContain(null); + createLink(links, field.getId(), table.getId()); + if (Boolean.FALSE.equals(addedTables.contains(table.getId()))) { + var schemaVisualizerDto = buildSchemaVisualizerDto(table, addedTables); + tableDtos.addAll(schemaVisualizerDto.getTables()); + fieldDtos.addAll(schemaVisualizerDto.getFields()); + links.addAll(schemaVisualizerDto.getLinks()); + addedTables.add(table.getId()); + } + } + else if (!ObjectUtils.isEmpty(field.getArrayField())) { + var arrayField = field.getArrayField(); field.setArrayField(null); + createLink(links, field.getId(), arrayField.getId()); + buildSchemaVisualizerDto(arrayField, fieldDtos, tableDtos, links, addedTables); + } + else if (!ObjectUtils.isEmpty(field.getMapField())) { + var mapField = field.getMapField(); field.setMapField(null); + createLink(links, field.getId(), mapField.getId()); + buildSchemaVisualizerDto(mapField, fieldDtos, tableDtos, links, addedTables); + } + else if (!ObjectUtils.isEmpty(field.getUnionTypes())) { + var unionFields = field.getUnionTypes(); field.setUnionTypes(null); + unionFields.stream().forEach(unionField -> { + createLink(links, field.getId(), unionField.getId()); + buildSchemaVisualizerDto(unionField, fieldDtos, tableDtos, links, addedTables); + }); + } + else if (!ObjectUtils.isEmpty(field.getReferenceField())) { + // createLink(links, field.getId(), field.getReferenceField().getId()); + var tableId = fieldRepostory.getTableIdOfField(field.getReferenceField().getId()); + createLink(links, field.getId(), tableId); + var table = getModel(tableId); + if (Boolean.FALSE.equals(addedTables.contains(table.getId()))) { + var schemaVisualizerDto = buildSchemaVisualizerDto(table, addedTables); + tableDtos.addAll(schemaVisualizerDto.getTables()); + fieldDtos.addAll(schemaVisualizerDto.getFields()); + links.addAll(schemaVisualizerDto.getLinks()); + addedTables.add(table.getId()); + } + } + fieldDtos.add(field.toDto(field)); + } + + // public List
parseSchemaVisualizerDto(Long rootNodeId, SchemaVisualizerDto schemaVisualizerDto, SchemaVisualizerDto.Purpose purpose) { + // var tableDtoMap = schemaVisualizerDto.getTables().stream().collect(Collectors.toMap(TableDto::getId, Function.identity())); + // var FieldDtoMap = schemaVisualizerDto.getFields().stream().collect(Collectors.toMap(FieldDto::getId, Function.identity())); + // var linkMap = createLinksMap(schemaVisualizerDto.getLinks()); + // List
tables = new ArrayList<>(); + // tables.add(getTableFromSchemaVisualizerDto(rootNodeId, tableDtoMap, FieldDtoMap, linkMap, tables, purpose)); + // return tables; + // } + + // private Multimap createLinksMap(List> links) { + // Multimap linkMap = LinkedHashMultimap.create(); + // links.stream().forEach(link -> { + // linkMap.put(link.get("source"), link.get("target")); + // }); + // return linkMap; + // } + + // private Table getTableFromSchemaVisualizerDto(Long modelId, Map tableDtoMap, Map fieldDtoMap, + // Multimap linkMap, List
tables, SchemaVisualizerDto.Purpose purpose) { + // var tableDto = tableDtoMap.get(modelId); + // var fieldIds = linkMap.get(tableDto.getId()); + // List fields = new ArrayList<>(); + // fieldIds.stream().forEach(fieldId -> { + // fields.add(getFieldFromSchemaVisualizerDto(fieldId, tableDtoMap, fieldDtoMap, linkMap, tables, purpose)); + // }); + // var table = tableDto.toDomain(tableDto); + // table.setFields(fields); + // return table; + // } + + // private Field getFieldFromSchemaVisualizerDto(Long fieldId, Map tableDtoMap, Map fieldDtoMap, + // Multimap linkMap, List
tables, SchemaVisualizerDto.Purpose purpose) { + // var fieldDto = fieldDtoMap.get(fieldId); + // var fieldParent = fieldDto.toDomin(fieldDto); + // var dataType = fieldParent.getDataType(); + // var fieldIds = linkMap.get(fieldDto.getId()).toArray(new Long[0]); + // List unionTypes = new ArrayList<>(); + // // If the field is complex type then the condition will true + // if (fieldIds.length > 0) { + // if (dataType.equalsIgnoreCase("array")) { + // parseNestedField(fieldParent, fieldParent.getItems(), fieldIds, tableDtoMap, fieldDtoMap, linkMap, tables, purpose); + // } + // else if (dataType.equalsIgnoreCase("map")) { + // parseNestedField(fieldParent, fieldParent.getValues(), fieldIds, tableDtoMap, fieldDtoMap, linkMap, tables, purpose); + // } + // else if (dataType.equalsIgnoreCase("union")) { + // for (Long id : fieldIds) { + // unionTypes.add(getFieldFromSchemaVisualizerDto(id, tableDtoMap, fieldDtoMap, linkMap, tables, purpose)); + // fieldParent.setUnionTypes(unionTypes); + // } + // } + // else { + // var table = getTableFromSchemaVisualizerDto(fieldIds[0], tableDtoMap, fieldDtoMap, linkMap, tables, purpose); + // if (purpose.equals(SchemaVisualizerDto.Purpose.VALIDATE)) { + // tables.add(table); + // } else { + // fieldParent.setContain(table); + // } + // } + // } + // return fieldParent; + // } + + // private void parseNestedField(Field fieldParent, String nestedType, Long[] fieldIds, Map tableDtoMap, + // Map fieldDtoMap, Multimap linkMap, List
tables, SchemaVisualizerDto.Purpose purpose) { + // List unionTypes = new ArrayList<>(); + // if (nestedType.equalsIgnoreCase("array")) { + // fieldParent.setArrayField(getFieldFromSchemaVisualizerDto(fieldIds[0], tableDtoMap, fieldDtoMap, linkMap, tables, purpose)); + // } + // else if (nestedType.equalsIgnoreCase("map")) { + // fieldParent.setMapField(getFieldFromSchemaVisualizerDto(fieldIds[0], tableDtoMap, fieldDtoMap, linkMap, tables, purpose)); + // } + // else if (nestedType.equalsIgnoreCase("union")) { + // for (Long id : fieldIds) { + // unionTypes.add(getFieldFromSchemaVisualizerDto(id, tableDtoMap, fieldDtoMap, linkMap, tables, purpose)); + // fieldParent.setUnionTypes(unionTypes); + // } + // } + // else { + // var table = getTableFromSchemaVisualizerDto(fieldIds[0], tableDtoMap, fieldDtoMap, linkMap, tables, purpose); + // if (purpose.equals(SchemaVisualizerDto.Purpose.VALIDATE)) { + // tables.add(table); + // } else { + // fieldParent.setContain(table); + // } + // } + // } + + // public Map computeScores(SchemaVisualizerDto schemaVisualizerDto, Long rootNodeId) { + // var tables = parseSchemaVisualizerDto(rootNodeId, schemaVisualizerDto, SchemaVisualizerDto.Purpose.VALIDATE); + // return computeScores(tables); + // } + + public Map computeScores(List
tables) { + var graph = new SchemaGraph(getAll()); + Map tableScore = new HashMap<>(); + for (Table table : tables) { + tableScore.put(table.getName(), graph.getSchemataScore(StringUtil.constructStringEmptySeparator(table.getNameSpace(),".",table.getName()))); + } + return tableScore; + } + + // public List validateSchema(SchemaVisualizerDto schemaVisualizerDto, Long rootNodeId) { + // var tables = parseSchemaVisualizerDto(rootNodeId, schemaVisualizerDto, SchemaVisualizerDto.Purpose.VALIDATE); + // List results = new ArrayList<>(); + // for (Table table : tables) { + // results.add(schemaValidator.apply(table)); + // for (Field field: table.getFields()) { + // results.add(fieldValidator.apply(field)); + // } + // } + // return results; + // } + + public List getFieldDataTypes(Long tableId) { + var clientRepo = modelService.getByNodeId(tableId); + return fieldDataTypes(clientRepo.getRepoType()); + } + + public List fieldDataTypes(RepoType type) { + return switch (type) { + case AVRO -> List.of("string", "int", "float", "double", "long", "bytes", "boolean", "null"); + case PROTOBUF -> List.of("double", "float", "int32", "int64", "uint32", "uint64", "sint32", "sint64", "fixed32", "fixed64", "sfixed32", "sfixed64", "bool", "string", "bytes"); + case JSON -> List.of("string", "number" ,"integer" ,"boolean", "null"); + case YAML -> List.of(); + }; + } + + // this method is to save the modified table with the old table. + public Table findDeltaForTable(Table tableNew, Long tableId, Long prId) { + var table = getModel(tableId); + if (Boolean.TRUE.equals(compareTable(tableNew, table))) { + // ther is change with new Table. + tableNew.setFields(null); + tableNew.setIsUserChanged(Boolean.TRUE); + tableNew.setClientId(null); // because when we fetch the table we should get the main table only. + table.setIsUserChanged(Boolean.TRUE); + table.setIsDeleted(Boolean.TRUE); + table.setPrId(prId); + table.setModifiedTable(tableNew); + tableRepository.save(table); + } + return table; + } + + // this method is to save newly added table in the file. + // if the new schema is already present then return that or else create new with delta flag and pr_id. + public Table saveNewTableWithDelta(Table tableNew, Long prId) { + var table = findByNameAndNameSpace(tableNew.getName(), tableNew.getNameSpace()); + if (Boolean.TRUE.equals(ObjectUtils.isEmpty(table))) { + tableNew.setIsUserChanged(Boolean.TRUE); + tableNew.setPrId(prId); + tableNew.setFields(null); + tableNew.setClientId(SecurityUtil.getClientId()); // Add clientId for new table. + return tableRepository.save(tableNew); + } + return table; + } + + public List
findDeltaForFields(Map> tableWithNewFields, Long prId, RepoType repoType) { + List
newTables = new ArrayList<>(); + tableWithNewFields.entrySet().forEach(entry -> { + var fullName = entry.getKey(); + var nameSpace = fullName.substring(0, fullName.lastIndexOf(".")); + var name = fullName.substring(fullName.lastIndexOf(".")+1, fullName.length()); + // ASSUMING THAT THE CHAGED TABLE WILL NOT CONTAIN ANY PREVIOUS CHANGES. (i.e. IT IS NEWLY CHANGED TABLE). + var table = findByNameAndNameSpace(name, nameSpace); + List fields = new ArrayList<>(); // this list is to store modified fields. + // get key value pair to identify the current field by new field + var fieldMap = table.getFields().stream().collect(Collectors.toMap(Field::getName, Function.identity())); + AtomicInteger rowNumber = new AtomicInteger(fieldMap.size()+1); + entry.getValue().forEach(fieldNew -> { + // find the field is exist already in schema + var field = fieldMap.get(fieldNew.getName()); + if (ObjectUtils.isEmpty(field)) { + // if field is not present then the incoming field is new field + fieldNew.setIsUserChanged(Boolean.TRUE); + fieldNew.setPrId(prId); + fieldNew.setRowNumber(rowNumber.getAndIncrement()); + checkNestedTable(fieldNew); // if any nested table is present then join it from neo4j by searching. + fields.add(fieldNew); + return; + } + if (Boolean.TRUE.equals(compareField(fieldNew, field, repoType))) { + // if field is present but there is change with new field. + fieldNew.setIsUserChanged(Boolean.TRUE); + fieldNew.setPrId(prId); + fieldNew.setRowNumber(field.getRowNumber()); + field.setPrId(prId); + field.setIsDeleted(Boolean.TRUE); + field.setIsUserChanged(Boolean.TRUE); + fieldNew.setReferenceField(field.getReferenceField()); + checkNestedTable(fieldNew); + fields.add(fieldNew); + } + fields.add(field); + fieldMap.remove(field.getName()); // remove the processed field. + }); + + //if the field is present in fieldMap then that fields are deleted field OR field of other existing PR. + // for (Map.Entry fld : fieldMap.entrySet()) { + fieldMap.entrySet().forEach(fld -> { + var field = fld.getValue(); + // this condition is to check that fields of other PR. + // Because the new incoming field will not contain fields of other PR. + if (field.getIsDeleted().equals(Boolean.FALSE) && field.getIsUserChanged().equals(Boolean.FALSE)) { + field.setPrId(prId); + field.setIsDeleted(Boolean.TRUE); + field.setIsUserChanged(Boolean.TRUE); + } + fields.add(field); + }); + table.setFields(fields); + newTables.add(tableRepository.save(table)); + }); + return newTables; + } + + private void checkNestedTable(Field field) { + if (field.getIsPrimitiveType().equals(Boolean.FALSE)) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + // field.getContain() gives table without ID. so we need to get the table from neo4j and join in it. + var table = findByNameAndNameSpace(field.getContain().getName(), field.getContain().getNameSpace()); + field.setContain(table); + } + switch(field.getDataType()) { + case "array" -> checkNestedTableArrayField(field); + case "map" -> checkNestedTableMapField(field); + case "union" -> checkNestedTableUnionField(field); + } + } + } + + private void checkNestedTableArrayField(Field field) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + var table = findByNameAndNameSpace(field.getContain().getName(), field.getContain().getNameSpace()); + field.setContain(table); + } + switch(field.getItems()) { + case "array" -> checkNestedTableArrayField(field.getArrayField()); + case "map" -> checkNestedTableMapField(field.getMapField()); + case "union" -> checkNestedTableUnionField(field); + } + } + + private void checkNestedTableMapField(Field field) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + var table = findByNameAndNameSpace(field.getContain().getName(), field.getContain().getNameSpace()); + field.setContain(table); + } + switch(field.getValues()) { + case "array" -> checkNestedTableArrayField(field.getArrayField()); + case "map" -> checkNestedTableMapField(field.getMapField()); + case "union" -> checkNestedTableUnionField(field); + } + } + + private void checkNestedTableUnionField(Field field) { + field.getUnionTypes().stream().forEach(unionField -> checkNestedTable(unionField)); + } + + public boolean compareField(Field fieldNew, Field field, RepoType repoType) { + if (compareString(fieldNew.getDataType(), field.getDataType()) && repoType.equals(RepoType.PROTOBUF)) return true; + if (compareString(fieldNew.getDescription(), field.getDescription())) return true; + if (compareBoolean(fieldNew.getIsPii(), field.getIsPii())) return true; + if (compareBoolean(fieldNew.getIsClassified(), field.getIsClassified())) return true; + if (compareBoolean(fieldNew.getDeprecated(), field.getDeprecated())) return true; + return false; + } + + public boolean compareBoolean(Boolean newVal, Boolean oldVal) { + if (ObjectUtils.isEmpty(oldVal)) { + if (ObjectUtils.isEmpty(newVal)) { + return false; + } + return true; + } + if (Boolean.FALSE.equals(oldVal.equals(newVal))) return true; + return false; + } + + public boolean compareString(String newDes, String oldDes) { + if (ObjectUtils.isEmpty(oldDes)) { + if (ObjectUtils.isEmpty(newDes)) { + return false; + } + return true; + } + if (Boolean.FALSE.equals(oldDes.equals(newDes))) return true; + return false; + } + + public boolean compareTable(Table tableNew, Table tableOld) { + if (compareString(tableNew.getDescription(), tableOld.getDescription())) return true; + if (compareString(tableNew.getOwner(), tableOld.getOwner())) return true; + if (compareString(tableNew.getDomain(), tableOld.getDomain())) return true; + if (compareString(tableNew.getComplianceOwner(), tableOld.getComplianceOwner())) return true; + if (compareString(tableNew.getChannel(), tableOld.getChannel())) return true; + if (compareString(tableNew.getEmail(), tableOld.getEmail())) return true; + if (compareString(tableNew.getStatus(), tableOld.getStatus())) return true; + // else if (compareString(tableNew.getSchemaType().name(), tableOld.getSchemaType().name())) return true; + if (!Arrays.deepEquals(tableNew.getSubscribers(), tableOld.getSubscribers())) return true; + if (compareString(tableNew.getQualityRuleBase(), tableOld.getQualityRuleBase())) return true; + if (compareString(tableNew.getQualityRuleSql(), tableOld.getQualityRuleSql())) return true; + if (compareString(tableNew.getQualityRuleCel(), tableOld.getQualityRuleCel())) return true; + return false; + } + + public boolean revertChanges(Long prId, Long clientRepoId) { + doTableChanges(prId, false, clientRepoId); + doFieldChanges(prId, false); + modelService.deleteModelByPrId(prId); + schemaFileAuditService.deleteSchemaFileAuditByPrId(prId); + return true; + } + + private void doFieldChanges(Long prId, boolean acceptChanges) { + var fields = fieldRepostory.findByPrId(prId); + if (fields.isEmpty()) return; + List fieldsToChange = new ArrayList<>(); + List fieldsToDelete = new ArrayList<>(); + fields.forEach(field -> { + if (field.getIsDeleted().equals(Boolean.TRUE) && field.getIsUserChanged().equals(Boolean.TRUE)) { + if (acceptChanges) { + fieldsToDelete.add(field); + } else { + field.setIsDeleted(Boolean.FALSE); + field.setIsUserChanged(Boolean.FALSE); + field.setPrId(null); + fieldsToChange.add(field); + } + } + if (field.getIsDeleted().equals(Boolean.FALSE) && field.getIsUserChanged().equals(Boolean.TRUE)) { + if (acceptChanges) { + field.setIsDeleted(Boolean.FALSE); + field.setIsUserChanged(Boolean.FALSE); + field.setPrId(null); + fieldsToChange.add(field); + } else { + fieldsToDelete.add(field); + } + } + }); + fieldRepostory.saveAll(fieldsToChange); + fieldRepostory.deleteAll(fieldsToDelete); + } + + public void transferTableProps(Table table, Table newTable) { + table.setDescription(newTable.getDescription()); + table.setOwner(newTable.getOwner()); + table.setDomain(newTable.getDomain()); + table.setChannel(newTable.getChannel()); + table.setSchemaType(newTable.getSchemaType()); + table.setEmail(newTable.getEmail()); + table.setStatus(newTable.getStatus()); + table.setComplianceOwner(newTable.getComplianceOwner()); + table.setSubscribers(newTable.getSubscribers()); + table.setQualityRuleBase(newTable.getQualityRuleBase()); + table.setQualityRuleSql(newTable.getQualityRuleSql()); + table.setQualityRuleCel(newTable.getQualityRuleCel()); + } + + private void doTableChanges(Long prId, boolean acceptChanges, Long clientRepoId) { + var tables = tableRepository.findByPrId(prId); + if (tables.isEmpty()) return; + List
tablesToChange = new ArrayList<>(); + List
tablesToDelete = new ArrayList<>(); + tables.forEach(table -> { + if (table.getIsDeleted().equals(Boolean.TRUE) && table.getIsUserChanged().equals(Boolean.TRUE)) { + if (acceptChanges) { + transferTableProps(table, table.getModifiedTable()); // save values from new table to old table + } + tablesToDelete.add(table.getModifiedTable()); // delete the delta node of table. + table.setIsDeleted(Boolean.FALSE); + table.setIsUserChanged(Boolean.FALSE); + table.setModifiedTable(null); + table.setPrId(null); + tablesToChange.add(table); + } + if (table.getIsDeleted().equals(Boolean.FALSE) && table.getIsUserChanged().equals(Boolean.TRUE)) { + if (acceptChanges) { + table.setIsUserChanged(Boolean.FALSE); + table.setPrId(null); + tableRepository.createTableDomainRelationShip(clientRepoId, List.of(table.getId())); + tablesToChange.add(table); + } else { + tablesToDelete.add(table); + } + } + }); + tableRepository.saveAll(tablesToChange); + tableRepository.deleteAll(tablesToDelete); + } + + public boolean acceptChanges(Long prId, Long clientRepoId) { + doTableChanges(prId, true, clientRepoId); + doFieldChanges(prId, true); + modelService.updateModelSetPrIdToNull(prId); + schemaFileAuditService.updateSchemaFileAuditSetPrIdToNull(prId); + return true; + } + + public List addFields(List fields) { + return fieldRepostory.saveAll(fields); + } + + public Field addField(Field field) { + return fieldRepostory.save(field); + } + + public SchemaValidationDto schemaCompare(Map pathTableMap, ClientRepo clientRepo, Long prId) { + return schemaValidator.schemaCompare(pathTableMap, clientRepo, prId); + } + + @Transactional + public Object uploadCsvToGit(List multipartFiles) throws IOException { + var tableIds = getTablesFromCsvFile(multipartFiles).stream().map(Table::getId).toList(); + if (Boolean.FALSE.equals(tableIds.isEmpty())) { + log.info("Generating File Contents"); + var fileContentMap = schemaFileAuditService.generateFileContentOfSchema(tableIds); + gitHubService.commitAndPushInMainBranch(fileContentMap, "Initial commit"); + } + return Status.SUCCESS; + } + + public List
getTablesFromCsvFile(List multipartFiles) throws IOException { + Map tables = new HashMap<>(); + var clientRepo = clientRepoService.getSchemataRepo().get(); + multipartFiles.stream().forEach(file -> { + validateCsvFile(file); + log.info("Reading csv file {}", file.getOriginalFilename()); + var arrayNode = FileUtil.readCsvFile(file); + tables.putAll(getTablesFromArryaNode(arrayNode)); + }); + var fullnames = new ArrayList<>(tables.keySet()); + var models = modelService.findByFullNames(fullnames).stream().map(model -> StringUtil.constructStringEmptySeparator(model.getNameSpace(),".",model.getName())).toList(); + List
newTables = new ArrayList<>(); + log.info("Filtering Already exists tables"); + for (Table table : tables.values()) { + var fullName = StringUtil.constructStringEmptySeparator(table.getNameSpace(),".",table.getName()); + if (Boolean.FALSE.equals(models.contains(fullName))) { + newTables.add(table); + } + } + log.info("Saving new tables from csv files"); + newTables = tableRepository.saveAll(newTables); + newTables.forEach(table -> { + var schemaFileAudit = schemaFileAuditService.createSchemaFileAuditForNewFile(clientRepo, table, Constants.CSV_UPLOAD_ROOT_FILE_PATH); + modelService.createModel(schemaFileAudit, clientRepo.getId(), table); + }); + log.info("Creating Domain Tables RelationShip"); + tableRepository.createTableDomainRelationShip(clientRepo.getId(), newTables.stream().map(Table::getId).toList()); + return newTables; + } + + private void validateCsvFile(MultipartFile multipartFile) { + log.info("Validating file {}", multipartFile.getOriginalFilename()); + if (Boolean.FALSE.equals(FileUtil.isCSVFormat(multipartFile))) + throw new InvalidDataException(ErrorCode.INVALID_FILE, responseMessage.getErrorMessage(ErrorCode.INVALID_FILE, multipartFile.getName())); + var headers = FileUtil.getCsvFileHeaders(multipartFile); + for (String column : TableCsvDto.getCsvHeaders()) { + if (Boolean.FALSE.equals(headers.contains(column))) + throw new InvalidDataException(ErrorCode.UNRECOGNIZED_COLUMN_IN_CSV_FILE_NAME, responseMessage.getErrorMessage(ErrorCode.UNRECOGNIZED_COLUMN_IN_CSV_FILE_NAME, column, multipartFile.getOriginalFilename())); + } + } + + public Map getTablesFromArryaNode(ArrayNode arrayNode) { + var iterator = arrayNode.iterator(); + Map tables = new HashMap<>(); + while (iterator.hasNext()) { + var csvDto = objectMapper.convertValue(iterator.next(), TableCsvDto.class); + var fullName = StringUtil.constructStringEmptySeparator(csvDto.getTableNamespace(), ".", csvDto.getTableName()); + var table = tables.get(fullName); + if (Objects.isNull(table)){ + table = Table.builder().name(csvDto.getTableName()).nameSpace(csvDto.getTableNamespace()).type("object") + .clientId(SecurityUtil.getClientId()).description(csvDto.getTableDescription()).build(); + } + var fields = new ArrayList<>(table.getFields()); + var field = Field.builder().name(csvDto.getColumnName()).schema(fullName).dataType(csvDto.getDataType()) + .rowNumber(fields.size() + 1).isPrimitiveType(Field.isPrimitiveType(csvDto.getDataType())).isPii(csvDto.getIsPii()) + .description(csvDto.getColumnDescription()).isClassified(csvDto.getIsClassified()).build(); + fields.add(field); + table.setFields(fields); + tables.put(fullName, table); + } + return tables; + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldTrigger.java b/connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldTrigger.java new file mode 100644 index 0000000..64602a0 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldTrigger.java @@ -0,0 +1,14 @@ +package com.opsbeach.connect.schemata.validate; + +import java.util.function.Predicate; +import org.apache.commons.lang3.StringUtils; +import org.springframework.util.ObjectUtils; + +import com.opsbeach.connect.schemata.entity.Field; + + +public interface FieldTrigger extends Predicate { + + FieldTrigger isDescriptionEmpty = field -> StringUtils.isBlank(field.getDescription()); + // FieldTrigger isClassificationLevelEmpty = field -> ObjectUtils.isEmpty(field.getIsClassified()); +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldValidator.java b/connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldValidator.java new file mode 100644 index 0000000..5554f07 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/validate/FieldValidator.java @@ -0,0 +1,32 @@ +package com.opsbeach.connect.schemata.validate; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import org.springframework.stereotype.Component; + +import com.opsbeach.connect.schemata.entity.Field; + +// import static com.opsbeach.connect.schemata.validate.FieldTrigger.isClassificationLevelEmpty; +import static com.opsbeach.connect.schemata.validate.FieldTrigger.isDescriptionEmpty; + +@Component +public class FieldValidator implements Function, Validator { + @Override + public Result apply(Field field) { + + List errors = new ArrayList<>(); + for (Map.Entry ruleTrigger : fieldValidatorMap().entrySet()) { + var result = test(ruleTrigger.getKey(), ruleTrigger.getValue(), field); + result.ifPresent(errors::add); + } + return errors.size() == 0 ? new Result(field.getId(), field.getName(), Status.SUCCESS, errors) + : new Result(field.getId(), field.getName(), Status.ERROR, errors); + } + + private Map fieldValidatorMap() { + return Map.of(Rules.FIELD_DESCRIPTION_EMPTY, isDescriptionEmpty); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/validate/Result.java b/connect/src/main/java/com/opsbeach/connect/schemata/validate/Result.java new file mode 100644 index 0000000..125b882 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/validate/Result.java @@ -0,0 +1,7 @@ +package com.opsbeach.connect.schemata.validate; + +import java.util.List; + +public record Result(Long id, String name, Status status, List errorMessages) { + +} diff --git a/src/main/java/org/schemata/validate/Rules.java b/connect/src/main/java/com/opsbeach/connect/schemata/validate/Rules.java similarity index 80% rename from src/main/java/org/schemata/validate/Rules.java rename to connect/src/main/java/com/opsbeach/connect/schemata/validate/Rules.java index 2e84a4a..a2fe3a9 100644 --- a/src/main/java/org/schemata/validate/Rules.java +++ b/connect/src/main/java/com/opsbeach/connect/schemata/validate/Rules.java @@ -1,10 +1,11 @@ -package org.schemata.validate; +package com.opsbeach.connect.schemata.validate; public enum Rules { SCHEMA_DESCRIPTION_EMPTY("Schema description metadata is null or empty"), SCHEMA_DOMAIN_EMPTY("Schema domain metadata is null or empty"), SCHEMA_OWNER_EMPTY("Schema owner metadata is null or empty"), SCHEMA_UNKNOWN_TYPE("UNKNOWN is not a valid type. It should be either ENTITY or EVENT"), + SCHEMA_VALID_ENTITY_WITH_PRIMARY_KEY("Schema defined as Entity but missing the primary key"), FIELD_DESCRIPTION_EMPTY("Field description metadata is null or empty"), FIELD_CLASSIFICATION_EMPTY("The field marked as classified, but the classification level is missing"); diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaTrigger.java b/connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaTrigger.java new file mode 100644 index 0000000..db2d185 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaTrigger.java @@ -0,0 +1,26 @@ +package com.opsbeach.connect.schemata.validate; + +import java.util.function.Predicate; +import org.apache.commons.lang3.StringUtils; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; + + +public interface SchemaTrigger extends Predicate
{ + + SchemaTrigger isDescriptionEmpty = schema -> StringUtils.isBlank(schema.getDescription()); + + SchemaTrigger isOwnerEmpty = schema -> StringUtils.isBlank(schema.getOwner()); + + SchemaTrigger isDomainEmpty = schema -> StringUtils.isBlank(schema.getDomain()); + + // SchemaTrigger isInValidType = schema -> SchemaType.UNKNOWN.name().equalsIgnoreCase(schema.getType()); + + // SchemaTrigger isPrimaryKeyNotExistsForEntity = schema -> { + // if (!schema.getType().equalsIgnoreCase(SchemaType.ENTITY.name())) { + // return false; + // } + // return schema.getFields().stream().filter(Field::getIsPrimaryKey).count() != 1; + // }; +} diff --git a/connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaValidator.java b/connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaValidator.java new file mode 100644 index 0000000..808fce3 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/schemata/validate/SchemaValidator.java @@ -0,0 +1,460 @@ +package com.opsbeach.connect.schemata.validate; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; +import org.springframework.stereotype.Component; +import org.springframework.util.ObjectUtils; + +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.LinkedHashMultimap; +import com.google.common.collect.Multimap; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.PullRequestService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.dto.SchemaValidationDto; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.StringUtil; + +import static com.opsbeach.connect.schemata.validate.SchemaTrigger.*; + +@Component +public class SchemaValidator implements Function, Validator
{ + + @Override + public Result apply(Table schema) { + + List errors = new ArrayList<>(); + for (Map.Entry ruleTrigger : schemaValidatorMap().entrySet()) { + var result = test(ruleTrigger.getKey(), ruleTrigger.getValue(), schema); + result.ifPresent(errors::add); + } + + return errors.size() == 0 ? new Result(schema.getId(), schema.getName(), Status.SUCCESS, errors) + : new Result(schema.getId(), schema.getName(), Status.ERROR, errors); + } + + private Map schemaValidatorMap() { + return Map.of(Rules.SCHEMA_DESCRIPTION_EMPTY, isDescriptionEmpty, Rules.SCHEMA_OWNER_EMPTY, isOwnerEmpty, + Rules.SCHEMA_DOMAIN_EMPTY, isDomainEmpty); + } + + @Value("${github.construct-file-path}") + private String githubFilePath; + + private final TableService tableService; + private final SchemaFileAuditService schemaFileAuditService; + private final ResponseMessage responseMessage; + private final FieldValidator fieldValidator; + private final PullRequestService pullRequestService; + private final ModelService modelService; + + public SchemaValidator(@Lazy TableService tableService, @Lazy SchemaFileAuditService schemaFileAuditService, ResponseMessage responseMessage, + FieldValidator fieldValidator, PullRequestService pullRequestService, ModelService modelService) { + this.tableService = tableService; + this.schemaFileAuditService = schemaFileAuditService; + this.responseMessage = responseMessage; + this.fieldValidator = fieldValidator; + this.pullRequestService = pullRequestService; + this.modelService = modelService; + } + + public SchemaValidationDto schemaCompare(Map pathTableMap, ClientRepo clientRepo, Long prId) { + List errors = new ArrayList<>(); + Map>> errorMap = new HashMap<>(); + ObjectNode changes = JsonNodeFactory.instance.objectNode(); + Set tableFullNames = new HashSet<>(); + for (Map.Entry pathTable : pathTableMap.entrySet()) { + Map> tableErrorMap = new HashMap<>(); + var filePath = pathTable.getKey(); + var githubPath = githubFilePath.replace("{repoFullName}", clientRepo.getFullName()) + .replace("{branch}", clientRepo.getDefaultBranch()) + .replace("{filePath}", filePath); + ObjectNode filechange = changes.putObject(githubPath); + var schemaFileAudit = schemaFileAuditService.getSchemaFileAudit(githubPath, null); //send prId is null then only it will fetch file details of main branch + if (ObjectUtils.isEmpty(schemaFileAudit)) { + validateNewSchema(pathTable.getValue(), filechange, tableFullNames, filePath, prId, tableErrorMap, errors); + continue; // new schema, no need of any validations. + } + var tableOld = tableService.getModel(schemaFileAudit.getRootNodeId()); + var tableNew = pathTable.getValue(); + validateSchema(tableOld, tableNew, filechange, tableFullNames, filePath, prId, tableErrorMap); + if (tableErrorMap.isEmpty()) continue; + errorMap.put(filePath, tableErrorMap); + } + checkSchemaChangesInOtherFiles(changes, errors, tableFullNames, clientRepo); + return constructMessage(errors, changes, errorMap); + } + + private SchemaValidationDto constructMessage(List errors, ObjectNode changes, Map>> errorMap) { + if (errors.isEmpty() && errorMap.isEmpty()) { + return SchemaValidationDto.builder().status(true).build(); + } + return SchemaValidationDto.builder().status(false).errorMessages(errors) + .changes(changes.toPrettyString()).errorMap(errorMap).build(); + } + + public void validateSchema(Table tableOld, Table tableNew, ObjectNode fileChange, Set tableFullNames, + String filePath, Long prId, Map> tableErrorMap) { + // we dont allow schema name change + var tableFullName = tableOld.getNameSpace()+'.'+tableOld.getName(); + List errors = new ArrayList<>(); + if (!tableNew.getName().equals(tableOld.getName())) { + errors.add(responseMessage.getErrorMessage(ErrorCode.TABLE_NAME_CHANGE_NOT_ALLOWED_FROM_TO, tableOld.getName(), tableNew.getName())); + } + if (!ObjectUtils.isEmpty(tableNew.getJsonSchemaId()) && !tableNew.getJsonSchemaId().equals(tableOld.getJsonSchemaId())) { + errors.add(responseMessage.getErrorMessage(ErrorCode.JSON_SCHEMA_ID_CHANGE_NOT_ALLOWED_FROM_TO, tableOld.getJsonSchemaId(), tableNew.getJsonSchemaId())); + } + if (!tableNew.getNameSpace().equals(tableOld.getNameSpace())) { + errors.add(responseMessage.getErrorMessage(ErrorCode.TABLE_NAMESPACE_CHANGE_NOT_ALLOWED_FROM_TO, tableOld.getNameSpace(), tableNew.getNameSpace())); + } + if (Objects.nonNull(tableOld.getOwner()) && Boolean.FALSE.equals(tableOld.getOwner().equals(tableNew.getOwner()))) { + errors.add(responseMessage.getErrorMessage(ErrorCode.TABLE_OWNER_METADATA_CHANGED_FROM_TO_NOT_ALLOWD_TABLE_NAME, tableOld.getOwner(), tableNew.getOwner(), tableOld.getName())); + } + if (Objects.nonNull(tableOld.getDomain()) && Boolean.FALSE.equals(tableOld.getDomain().equals(tableNew.getDomain()))) { + errors.add(responseMessage.getErrorMessage(ErrorCode.TABLE_DOMAIN_METADATA_CHANGED_FROM_TO_NOT_ALLOWD_TABLE_NAME, tableOld.getDomain(), tableNew.getDomain(), tableOld.getName())); + } + Map newTableFields = tableNew.getFields().stream().collect(Collectors.toMap(Field::getName, Function.identity())); + Map oldTableFields = tableOld.getFields().stream().filter(field -> (field.getIsDeleted() == true || field.getPrId() == null)) + .collect(Collectors.toMap(Field::getName, Function.identity())); // filter is to remove fields in PR. + int oldTableSize = oldTableFields.size(); + int newTableSize = newTableFields.size(); + if (oldTableSize > newTableSize) { //field deleted + errors.add(responseMessage.getErrorMessage(ErrorCode.FIELD_DELETED_NOT_ALLOWED)); + } + + ObjectNode tableChange = fileChange.putObject(tableFullName); + ObjectNode fieldChanges = tableChange.putObject("fields"); + + for (Map.Entry oldFieldEntry : oldTableFields.entrySet()) { + // field name dont change + if (newTableFields.get(oldFieldEntry.getKey()) == null) { + var message = responseMessage.getErrorMessage(ErrorCode.FIELD_NAME_UPDATION_NOT_ALLOWED, oldFieldEntry.getKey()); + errors.add(message); + } else { + validateField(newTableFields.get(oldFieldEntry.getKey()), oldFieldEntry.getValue(), tableNew, errors, fieldChanges, fileChange, tableFullNames, filePath, prId, tableErrorMap); + newTableFields.remove(oldFieldEntry.getKey()); + } + } + // this loop is for validate nested table of added new field. + newTableFields.entrySet().stream().forEach(newFieldEntry -> checkNestedTableForNewField(newFieldEntry.getValue(), fileChange, fieldChanges, tableFullNames, filePath, tableNew.getName(), prId, tableErrorMap, errors)); + if (fieldChanges.isEmpty()) tableChange.remove("fields"); + if (tableService.compareTable(tableNew, tableOld)) { + StringUtil.addToJsonNode(tableChange, Table.Prop.DESCRIPTION, tableNew.getDescription()); // need to add more table props which can change. + StringUtil.addToJsonNode(tableChange, Table.Prop.OWNER, tableNew.getOwner()); + StringUtil.addToJsonNode(tableChange, Table.Prop.DOMAIN, tableNew.getDomain()); + StringUtil.addToJsonNode(tableChange, Table.Prop.EMAIL, tableNew.getEmail()); + StringUtil.addToJsonNode(tableChange, Table.Prop.COMPLIANCE_OWNER, tableNew.getComplianceOwner()); + StringUtil.addToJsonNode(tableChange, Table.Prop.CHANNEL, tableNew.getChannel()); + StringUtil.addToJsonNode(tableChange, Table.Prop.STATUS, tableNew.getStatus()); + StringUtil.addToJsonNode(tableChange, Table.Prop.QUALITY_RULE_BASE, tableNew.getQualityRuleBase()); + StringUtil.addToJsonNode(tableChange, Table.Prop.QUALITY_RULE_SQL, tableNew.getQualityRuleSql()); + StringUtil.addToJsonNode(tableChange, Table.Prop.QUALITY_RULE_CEL, tableNew.getQualityRuleCel()); + StringUtil.addArrayToJsonNode(tableChange, Table.Prop.SUBSCRIBERS, tableNew.getSubscribers()); + StringUtil.addArrayToJsonNode(tableChange, Table.Prop.REQUIRED, tableNew.getRequiredFields()); + // addStringToNode(tableChange, Table.Prop.ADDITIONAL_PROPERTIES,); + } + if (tableChange.isEmpty()) { + fileChange.remove(tableFullName); + } else { + tableFullNames.add(tableNew.getNameSpace()+'.'+tableNew.getName()); + // if the changes is done in this table means need to check it and validate. + checkTableChangeInAnotherPr(tableOld, tableNew, prId, errors, filePath); + } + if (errors.isEmpty()) return; + tableErrorMap.put(tableFullName, errors); + } + + private void checkTableChangeInAnotherPr(Table tableOld, Table tableNew, Long prId, List errors, String filePath) { + if (!ObjectUtils.isEmpty(tableOld.getPrId()) && !tableOld.getPrId().equals(prId)) { + var message = responseMessage.getErrorMessage(ErrorCode.TABLE_ALREADY_MODIFIED_IN_PR, tableOld.getName(), pullRequestService.getModel(tableOld.getPrId()).getNumber()); + errors.add(message); + } + var oldPr = tableOld.getFields().stream().filter(field -> !ObjectUtils.isEmpty(field.getPrId())).findFirst(); + if (oldPr.isPresent() && Boolean.FALSE.equals(prId.equals(oldPr.get().getPrId()))) { + var message = responseMessage.getErrorMessage(ErrorCode.TABLE_ALREADY_MODIFIED_IN_PR, tableOld.getName(), pullRequestService.getModel(oldPr.get().getPrId()).getNumber()); + errors.add(message); + } + // NEED TO CHECK NECCESSORY FIELDS IS PRESENT IN TABLE AND FIELD. + checkMetadataIsEmpty(tableNew, errors); + } + + private void checkMetadataIsEmpty(Table table, List errors) { + var schemaError = apply(table); + if (schemaError.status().equals(Status.ERROR)) { + var message = schemaError.errorMessages().stream().map(msg -> responseMessage.getErrorMessage(ErrorCode.TABLE_METADATA_MISSING_NAME, msg, table.getName())).toList(); + errors.add(String.join(" ", message)); + } + table.getFields().stream().forEach(field -> { + var error = fieldValidator.apply(field); + if (error.status().equals(Status.ERROR)) { + var message = error.errorMessages().stream().map(msg -> responseMessage.getErrorMessage(ErrorCode.FIELD_METADATA_MISSING_NAME_IN_TABLE, msg, field.getName(), table.getName())).toList(); + errors.add(String.join(" ", message)); + } + }); + } + + private void addBooleanToNode(ObjectNode node, String key, Boolean value) { + if (value == null) { + node.putNull(key); + } else { + node.put(key, value); + } + } + + private void validateNewSchema(Table tableNew, ObjectNode fileChange, Set tableFullNames, + String filePath, Long prId, Map> tableErrorMap, List errors) { + var tableOld = tableService.findByNameAndNameSpace(tableNew.getName(), tableNew.getNameSpace()); + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(tableOld)) && isMainBranchTable(tableOld)) { + validateSchema(tableOld, tableNew, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } else { + var tableFullName = tableNew.getNameSpace()+'.'+tableNew.getName(); + ObjectNode tableChange = fileChange.putObject(tableFullName); + ObjectNode fieldChanges = tableChange.putObject("fields"); + + tableNew.getFields().stream().forEach(field -> checkNestedTableForNewField(field, fileChange, fieldChanges, tableFullNames, filePath, tableNew.getName(), prId, tableErrorMap, errors)); + checkMetadataIsEmpty(tableNew, errors); + if (fieldChanges.isEmpty()) tableChange.remove("fields"); + StringUtil.addToJsonNode(tableChange, Table.Prop.DESCRIPTION, tableNew.getDescription()); // need to add more table props which can change. + StringUtil.addToJsonNode(tableChange, Table.Prop.OWNER, tableNew.getOwner()); + StringUtil.addToJsonNode(tableChange, Table.Prop.DOMAIN, tableNew.getDomain()); + StringUtil.addToJsonNode(tableChange, Table.Prop.EMAIL, tableNew.getEmail()); + StringUtil.addToJsonNode(tableChange, Table.Prop.COMPLIANCE_OWNER, tableNew.getComplianceOwner()); + StringUtil.addToJsonNode(tableChange, Table.Prop.CHANNEL, tableNew.getChannel()); + StringUtil.addToJsonNode(tableChange, Table.Prop.STATUS, tableNew.getStatus()); + StringUtil.addToJsonNode(tableChange, Table.Prop.QUALITY_RULE_BASE, tableNew.getQualityRuleBase()); + StringUtil.addToJsonNode(tableChange, Table.Prop.QUALITY_RULE_SQL, tableNew.getQualityRuleSql()); + StringUtil.addToJsonNode(tableChange, Table.Prop.QUALITY_RULE_CEL, tableNew.getQualityRuleCel()); + StringUtil.addArrayToJsonNode(tableChange, Table.Prop.SUBSCRIBERS, tableNew.getSubscribers()); + StringUtil.addArrayToJsonNode(tableChange, Table.Prop.REQUIRED, tableNew.getRequiredFields()); + tableFullNames.add(tableNew.getNameSpace()+'.'+tableNew.getName()); + } + } + + private Boolean isMainBranchTable(Table table) { + return ((table.getIsDeleted().equals(Boolean.TRUE) && table.getIsUserChanged().equals(Boolean.TRUE)) || + (table.getIsDeleted().equals(Boolean.FALSE) && table.getIsUserChanged().equals(Boolean.FALSE))); + } + + private void checkNestedTableForNewField(Field field, ObjectNode fileChange, ObjectNode fieldChanges, Set tableFullNames, + String filePath, String tableName, Long prId, Map> tableErrorMap, List errors) { + if (field.getIsPrimitiveType().equals(Boolean.FALSE)) { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) { + validateNewSchema(field.getContain(), fileChange, tableFullNames, filePath, prId, tableErrorMap, errors); + } + switch(field.getDataType()) { + case "array" -> checkNestedTableForNewArrayField(field, fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + case "map" -> checkNestedTableForNewMapField(field, fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + case "union" -> checkNestedTableForNewUnionField(field, fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + } + } + if (fieldChanges != null) { + ObjectNode currentFieldChange = fieldChanges.putObject(field.getName()); + StringUtil.addToJsonNode(currentFieldChange, Field.Prop.DESCRIPTION, field.getDescription()); + StringUtil.addToJsonNode(currentFieldChange, Field.Prop.DATA_TYPE, field.getDataType()); + StringUtil.addArrayToJsonNode(currentFieldChange, Field.Prop.ENUM, field.getSymbols()); + addBooleanToNode(currentFieldChange, Field.Prop.IS_PII, field.getIsPii()); + addBooleanToNode(currentFieldChange, Field.Prop.IS_CLASSIFIED, field.getIsClassified()); + addBooleanToNode(currentFieldChange, Field.Prop.DEPRECATED, field.getDeprecated()); + } + } + + private void checkNestedTableForNewArrayField(Field field, ObjectNode fileChange, ObjectNode fieldChanges, Set tableFullNames, + String filePath, String tableName, Long prId, Map> tableErrorMap, List errors) { + var items = field.getItems(); + if (items.equalsIgnoreCase("array")) checkNestedTableForNewArrayField(field.getArrayField(), fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + else if (items.equalsIgnoreCase("map")) checkNestedTableForNewMapField(field.getMapField(), fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + else if (items.equalsIgnoreCase("union")) checkNestedTableForNewUnionField(field, fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + else if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) validateNewSchema(field.getContain(), fileChange, tableFullNames, filePath, prId, tableErrorMap, errors); + } + + private void checkNestedTableForNewMapField(Field field, ObjectNode fileChange, ObjectNode fieldChanges, Set tableFullNames, + String filePath, String tableName, Long prId, Map> tableErrorMap, List errors) { + var values = field.getValues(); + if (values.equalsIgnoreCase("array")) checkNestedTableForNewArrayField(field.getArrayField(), fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + else if (values.equalsIgnoreCase("map")) checkNestedTableForNewMapField(field.getMapField(), fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + else if (values.equalsIgnoreCase("union")) checkNestedTableForNewUnionField(field, fileChange, fieldChanges, tableFullNames, filePath, tableName, prId, tableErrorMap, errors); + else if (Boolean.FALSE.equals(ObjectUtils.isEmpty(field.getContain()))) validateNewSchema(field.getContain(), fileChange, tableFullNames, filePath, prId, tableErrorMap, errors); + } + + private void checkNestedTableForNewUnionField(Field field, ObjectNode fileChange, ObjectNode fieldChanges, Set tableFullNames, + String filePath, String tableName, Long prId, Map> tableErrorMap, List errors) { + field.getUnionTypes().stream().forEach(unionField -> checkNestedTableForNewField(unionField, fileChange, null, tableFullNames, filePath, tableName, prId, tableErrorMap, errors)); + } + + public void validateField(Field fieldNew, Field fieldOld, Table tableNew, List errors, ObjectNode fieldChanges, ObjectNode fileChange, + Set tableFullNames, String filePath, Long prId, Map> tableErrorMap) { + + // field type dont change + if (!fieldOld.getDataType().equals(fieldNew.getDataType())) { + var message = responseMessage.getErrorMessage(ErrorCode.FIELD_DATATYPE_UPDATION_NOT_ALLOWED_FOR_FIELD, fieldOld.getName()); + errors.add(message); + } + else if (fieldOld.getDataType().equals("union")) { + // check union filed + validateUnionField(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (fieldOld.getDataType().equals("array")) { + // check array field + validateArrayField(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (fieldOld.getDataType().equals("map")) { + // check map field + validateMapField(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (Boolean.FALSE.equals(ObjectUtils.isEmpty(fieldNew.getContain()))) { + validateNestedSchema(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + if (fieldChanges != null && tableService.compareField(fieldNew, fieldOld, RepoType.AVRO)) { // Here is repotype hardcoded because here protobuf will no get validated. + ObjectNode fieldChange = fieldChanges.putObject(fieldNew.getName()); + StringUtil.addToJsonNode(fieldChange, Field.Prop.DESCRIPTION, fieldNew.getDescription()); + StringUtil.addToJsonNode(fieldChange, Field.Prop.DATA_TYPE, fieldNew.getDataType()); + StringUtil.addArrayToJsonNode(fieldChange, Field.Prop.ENUM, fieldNew.getSymbols()); + addBooleanToNode(fieldChange, Field.Prop.IS_PII, fieldNew.getIsPii()); + addBooleanToNode(fieldChange, Field.Prop.IS_CLASSIFIED, fieldNew.getIsClassified()); + addBooleanToNode(fieldChange, Field.Prop.DEPRECATED, fieldNew.getDeprecated()); + } + } + + private void validateNestedSchema(Field fieldNew, Field fieldOld, Table tableNew, List errors, ObjectNode fileChange, + Set tableFullNames, String filePath, Long prId, Map> tableErrorMap) { + Map> errorMap = new HashMap<>(); + validateSchema(fieldOld.getContain(), fieldNew.getContain(), fileChange, tableFullNames, filePath, prId, errorMap); + if (!errorMap.isEmpty()) { + var message = responseMessage.getErrorMessage(ErrorCode.FIELD_NAME_OF_TABLE_HAS_ERROR, fieldOld.getName(), tableNew.getName()); + errors.add(message); + tableErrorMap.putAll(errorMap); + } + } + + private void validateUnionField(Field fieldNew, Field fieldOld, Table tableNew, List errors, ObjectNode fileChange, + Set tableFullNames, String filePath, Long prId, Map> tableErrorMap) { + Map oldUnionFields = fieldOld.getUnionTypes().stream().collect(Collectors.toMap(Field::getDataType, Function.identity())); + Map newUnionFields = fieldNew.getUnionTypes().stream().collect(Collectors.toMap(Field::getDataType, Function.identity())); + if (oldUnionFields.size() == newUnionFields.size()) { + for (Map.Entry oldFieldEntry : oldUnionFields.entrySet()) { + if (newUnionFields.get(oldFieldEntry.getKey()) == null) { + var message = responseMessage.getErrorMessage(ErrorCode.UNION_FIELD_DATATYPE_UPDATION_NOT_ALLOWED_FOR_FIELD, fieldOld.getName()); + errors.add(message); + } else { + validateField(newUnionFields.get(oldFieldEntry.getKey()), oldFieldEntry.getValue(), tableNew, errors, null, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + } + } else { + var message = responseMessage.getErrorMessage(ErrorCode.UNION_FIELD_DATATYPE_UPDATION_NOT_ALLOWED_FOR_FIELD, fieldOld.getName()); + errors.add(message); + } + } + + public void validateArrayField(Field fieldNew, Field fieldOld, Table tableNew, List errors, ObjectNode fileChange, + Set tableFullNames, String filePath, Long prId, Map> tableErrorMap) { + // compare the array type + if (!fieldOld.getItems().equals(fieldNew.getItems())) { + var message = responseMessage.getErrorMessage(ErrorCode.ARRAY_FIELD_TYPE_UPDATION_NOT_ALLOWED_FOR_FIELD, fieldOld.getName()); + errors.add(message); + } + else if(fieldOld.getItems().equals("union")) { + // check union filed + validateUnionField(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (fieldOld.getItems().equals("array")) { + // check array field + validateArrayField(fieldNew.getArrayField(), fieldOld.getArrayField(), tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (fieldOld.getItems().equals("map")) { + // check map field + validateMapField(fieldNew.getMapField(), fieldOld.getMapField(), tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (Boolean.FALSE.equals(ObjectUtils.isEmpty(fieldNew.getContain()))) { + validateNestedSchema(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + } + + public void validateMapField(Field fieldNew, Field fieldOld, Table tableNew, List errors, ObjectNode fileChange, + Set tableFullNames, String filePath, Long prId, Map> tableErrorMap) { + // compare the array type + if (!fieldOld.getValues().equals(fieldNew.getValues())) { + var message = responseMessage.getErrorMessage(ErrorCode.MAP_FIELD_TYPE_UPDATION_NOT_ALLOWED_FOR_FIELD, fieldOld.getName()); + errors.add(message); + } + else if(fieldOld.getValues().equals("union")) { + // check union filed + validateUnionField(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (fieldOld.getValues().equals("array")) { + // check array field + validateArrayField(fieldNew.getArrayField(), fieldOld.getArrayField(), tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (fieldOld.getValues().equals("map")) { + // check map field + validateMapField(fieldNew.getMapField(), fieldOld.getMapField(), tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + else if (Boolean.FALSE.equals(ObjectUtils.isEmpty(fieldNew.getContain()))) { + validateNestedSchema(fieldNew, fieldOld, tableNew, errors, fileChange, tableFullNames, filePath, prId, tableErrorMap); + } + } + + private void checkSchemaChangesInOtherFiles(ObjectNode changes, List errors, Set tableFullNames, ClientRepo clientRepo) { + System.out.println(changes.toPrettyString()); + var models = modelService.findByFullNames(tableFullNames, clientRepo.getId()); + Multimap namePathMap = LinkedHashMultimap.create(); + models.stream().forEach(model -> { + namePathMap.put(model.getNameSpace()+"."+model.getName(), model.getPath()); + }); + var basePath = StringUtil.constructStringEmptySeparator("https://github.com/",clientRepo.getFullName(),"/tree/main/"); + tableFullNames.stream().forEach(tableFullName -> { + // this contain the paths of other files which contain this table. + var paths = namePathMap.get(tableFullName).toArray(new String[0]); + if (paths.length == 0) { + // length will be 0 when there is new table. + var newPaths = new ArrayList(); + var pathIterator = changes.fieldNames(); + // Add new table path to 'paths' variable + // So that we can check the new table changes is reflected in other file which contain same table. + while(pathIterator.hasNext()) { + var path = pathIterator.next(); + if (changes.get(path).has(tableFullName)) newPaths.add(path); + } + paths = newPaths.toArray(new String[0]); + } + for (int i=0; i add(@RequestBody @Valid ConnectDto connectDto) { + return SuccessResponse.statusCreated(connectService.connect(connectDto)); + } + + @Transactional + @GetMapping("/{id}") + public SuccessResponse get(@PathVariable("id") Long id) { + return SuccessResponse.statusOk(connectService.get(id)); + } + + @Transactional + @PutMapping + public SuccessResponse update(@RequestBody @Valid ConnectDto connectDto) { + return SuccessResponse.statusOk(connectService.update(connectDto)); + } + + @Transactional + @GetMapping + public SuccessResponse> getAll() { + return SuccessResponse.statusOk(connectService.getAll()); + } + + @Transactional + @GetMapping("/services") + public SuccessResponse getAllServiceType() { + return SuccessResponse.statusOk(connectService.getAllServiceType()); + } + + @GetMapping("/check") + public SuccessResponse> checkConnectByServiceType() { + return SuccessResponse.statusOk(connectService.checkConnect()); + } + + @GetMapping("/check/{type}") + public SuccessResponse checkGithubConnect(@PathVariable("type") ServiceType serviceType) { + return SuccessResponse.statusOk(connectService.get(serviceType)); + } + + @PatchMapping("/{id}/repo-org") + public SuccessResponse addRepoOrganization(@PathVariable("id") Long id, @RequestParam("repoOrg") String repoOrganization) { + return SuccessResponse.statusOk(connectService.addRepoOrganization(id, repoOrganization)); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/controller/TaskController.java b/connect/src/main/java/com/opsbeach/connect/task/controller/TaskController.java new file mode 100644 index 0000000..e93d79f --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/controller/TaskController.java @@ -0,0 +1,43 @@ +package com.opsbeach.connect.task.controller; + +import java.util.List; + +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.service.TaskService; +import com.opsbeach.sharedlib.response.SuccessResponse; + +import lombok.RequiredArgsConstructor; + +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; + +@RestController +@RequestMapping("v1/task") +@RequiredArgsConstructor +public class TaskController { + + private final TaskService taskService; + + @Transactional + @PostMapping + public SuccessResponse add(@RequestBody TaskDto taskdDto) { + return SuccessResponse.statusCreated(taskService.add(taskdDto)); + } + + @Transactional + @GetMapping + public SuccessResponse> getAll() { + return SuccessResponse.statusOk(taskService.getAll()); + } + + @DeleteMapping("{id}") + public void removeTaskFromScheduler(@PathVariable("id") Long id) { + taskService.delete(id); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/dto/ConnectDto.java b/connect/src/main/java/com/opsbeach/connect/task/dto/ConnectDto.java new file mode 100644 index 0000000..0d50c29 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/dto/ConnectDto.java @@ -0,0 +1,72 @@ +package com.opsbeach.connect.task.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.opsbeach.connect.core.BaseDto; +import com.opsbeach.connect.core.enums.AuthType; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.task.entity.Connect; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class ConnectDto extends BaseDto { + + @Setter + private String headers; + + @JsonProperty("service_type") + private ServiceType serviceType; + + @Setter + @JsonProperty("auth_type") + private AuthType authType; + + private String domain; + + @JsonProperty("user_email") + private String userEmail; + + @Setter + @JsonProperty("auth_token") + private String authToken; + + @Setter + @JsonProperty("refresh_token") + private String refreshToken; + + @JsonProperty("project_key") + private String projectKey; + + @JsonProperty("channel_id") + private String channelId; + + @JsonProperty("user_name") + private String userName; + + @JsonProperty("repo_organization") + private String repoOrganization; + + public Connect toDomin(ConnectDto connectDto) { + return Connect.builder().id(connectDto.getId()) + .clientId(connectDto.getClientId()) + .headers(connectDto.getHeaders()) + .authType(connectDto.getAuthType()) + .domain(connectDto.getDomain()) + .userEmail(connectDto.getUserEmail()) + .authToken(connectDto.getAuthToken()) + .refreshToken(connectDto.getRefreshToken()) + .serviceType(connectDto.getServiceType()) + .projectKey(connectDto.getProjectKey()) + .channelId(connectDto.getChannelId()) + .userName(connectDto.getUserName()) + .repoOrganization(connectDto.getRepoOrganization()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/dto/TaskDto.java b/connect/src/main/java/com/opsbeach/connect/task/dto/TaskDto.java new file mode 100644 index 0000000..36db769 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/dto/TaskDto.java @@ -0,0 +1,55 @@ +package com.opsbeach.connect.task.dto; + +import java.time.LocalDateTime; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.opsbeach.connect.core.BaseDto; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.task.entity.Task; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.Builder.Default; +import lombok.experimental.SuperBuilder; + +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class TaskDto extends BaseDto { + + @JsonProperty("task_type") + private TaskType taskType; + + @JsonProperty("service_type") + private ServiceType serviceType; + + private Long connectId; + + private String url; + + @Setter + @Default + private long executionInterval = 3600000; + + @Setter + @JsonProperty("last_sync_date") + private LocalDateTime lastSyncDate; + + public Task toDomin(TaskDto taskDto) { + return Task.builder().id(taskDto.getId()) + .clientId(taskDto.getClientId()) + .createdAt(taskDto.getCreatedAt()) + .updatedAt(taskDto.getUpdatedAt()) + .serviceType(taskDto.getServiceType()) + .taskType(taskDto.getTaskType()) + .connectId(taskDto.getConnectId()) + .url(taskDto.getUrl()) + .executionInterval(taskDto.getExecutionInterval()) + .lastSyncDate(taskDto.getLastSyncDate()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/entity/Connect.java b/connect/src/main/java/com/opsbeach/connect/task/entity/Connect.java new file mode 100644 index 0000000..2d8b1ac --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/entity/Connect.java @@ -0,0 +1,102 @@ +package com.opsbeach.connect.task.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.core.enums.AuthType; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.task.dto.ConnectDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; +import org.hibernate.annotations.ColumnTransformer; + +@Entity +@Table +@Getter +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class Connect extends BaseModel { + + private String headers; + + @Enumerated(EnumType.STRING) + @Column(name = "service_type") + private ServiceType serviceType; + + @Enumerated(EnumType.STRING) + @Column(name = "auth_type") + private AuthType authType; + + @ColumnTransformer( + read = "pgp_sym_decrypt(domain,'"+Constants.AES_KEY+"')", + write = "pgp_sym_encrypt(?,'"+Constants.AES_KEY+"')" + ) + private String domain; + + @ColumnTransformer( + read = "pgp_sym_decrypt(user_email,'"+Constants.AES_KEY+"')", + write = "pgp_sym_encrypt(?,'"+Constants.AES_KEY+"')" + ) + @Column(name = "user_email") + private String userEmail; + + @ColumnTransformer( + read = "pgp_sym_decrypt(auth_token,'"+Constants.AES_KEY+"')", + write = "pgp_sym_encrypt(?,'"+Constants.AES_KEY+"')" + ) + @Column(name = "auth_token") + private String authToken; + + @ColumnTransformer( + read = "pgp_sym_decrypt(refresh_token,'"+Constants.AES_KEY+"')", + write = "pgp_sym_encrypt(?,'"+Constants.AES_KEY+"')" + ) + @Column(name = "refresh_token") + private String refreshToken; + + @ColumnTransformer( + read = "pgp_sym_decrypt(project_key,'"+Constants.AES_KEY+"')", + write = "pgp_sym_encrypt(?,'"+Constants.AES_KEY+"')" + ) + @Column(name = "project_key") + private String projectKey; + + @ColumnTransformer( + read = "pgp_sym_decrypt(channel_id,'"+Constants.AES_KEY+"')", + write = "pgp_sym_encrypt(?,'"+Constants.AES_KEY+"')" + ) + @Column(name = "channel_id") + private String channelId; + + @Column(name = "user_name") + private String userName; + + @Setter + @Column(name = "repo_organization") + private String repoOrganization; + + public ConnectDto toDto(Connect connect) { + return ConnectDto.builder().id(connect.getId()) + .clientId(connect.getClientId()) + .headers(connect.getHeaders()) + .authType(connect.getAuthType()) + .domain(connect.getDomain()) + .userEmail(connect.getUserEmail()) + .authToken(connect.getAuthToken()) + .refreshToken(connect.getRefreshToken()) + .serviceType(connect.getServiceType()) + .projectKey(connect.getProjectKey()) + .channelId(connect.getChannelId()) + .userName(connect.getUserName()) + .repoOrganization(connect.getRepoOrganization()) + .build(); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/entity/Task.java b/connect/src/main/java/com/opsbeach/connect/task/entity/Task.java new file mode 100644 index 0000000..9cc78aa --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/entity/Task.java @@ -0,0 +1,55 @@ +package com.opsbeach.connect.task.entity; + +import com.opsbeach.connect.core.BaseModel; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.task.dto.TaskDto; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; + +@Entity +@Table +@Getter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class Task extends BaseModel{ + @Enumerated(EnumType.STRING) + @Column(name = "task_type") + private TaskType taskType; + @Enumerated(EnumType.STRING) + @Column(name = "service_type") + private ServiceType serviceType; + @Column(name = "connect_id") + private Long connectId; + private String url; + @Column(name = "execution_interval") + private long executionInterval; + @Setter + @Column(name = "last_sync_date") + private LocalDateTime lastSyncDate; + + public TaskDto toDto(Task task) { + return TaskDto.builder().id(task.getId()) + .clientId(task.getClientId()) + .createdAt(task.getCreatedAt()) + .updatedAt(task.getUpdatedAt()) + .serviceType(task.getServiceType()) + .taskType(task.getTaskType()) + .url(task.getUrl()) + .connectId(task.getConnectId()) + .executionInterval(task.getExecutionInterval()) + .lastSyncDate(task.getLastSyncDate()) + .build(); + } +} \ No newline at end of file diff --git a/connect/src/main/java/com/opsbeach/connect/task/repository/ConnectRepository.java b/connect/src/main/java/com/opsbeach/connect/task/repository/ConnectRepository.java new file mode 100644 index 0000000..1fb17b3 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/repository/ConnectRepository.java @@ -0,0 +1,12 @@ +package com.opsbeach.connect.task.repository; + +import java.util.Optional; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.task.entity.Connect; + +public interface ConnectRepository extends BaseRepository { + + Optional findByServiceTypeAndClientId(ServiceType serviceType, Long clientId); +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/repository/TaskRepository.java b/connect/src/main/java/com/opsbeach/connect/task/repository/TaskRepository.java new file mode 100644 index 0000000..4a0a311 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/repository/TaskRepository.java @@ -0,0 +1,7 @@ +package com.opsbeach.connect.task.repository; + +import com.opsbeach.connect.core.BaseRepository; +import com.opsbeach.connect.task.entity.Task; + +public interface TaskRepository extends BaseRepository { +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/service/ConnectService.java b/connect/src/main/java/com/opsbeach/connect/task/service/ConnectService.java new file mode 100644 index 0000000..2ec7108 --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/service/ConnectService.java @@ -0,0 +1,276 @@ +package com.opsbeach.connect.task.service; + +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.List; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpHeaders; +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import com.fasterxml.jackson.databind.JsonNode; +import com.opsbeach.connect.core.enums.AuthType; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.connect.task.dto.ConnectDto; +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.entity.Connect; +import com.opsbeach.connect.task.repository.ConnectRepository; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.service.App2AppService; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import net.minidev.json.JSONObject; + +@Slf4j +@Service +@RequiredArgsConstructor +public class ConnectService { + + private final ConnectRepository connectRepository; + + private final TaskService taskService; + + private final App2AppService app2AppService; + + private final ResponseMessage responseMessage; + + private final IdSpecifications connectSpecifications; + + @Value("${pager-duty.incidents-url}") + private String listIncidentUrl; + + @Value("${pager-duty.services-url}") + private String listServiceUrl; + + @Value("${pager-duty.incident-metrics-url}") + private String listIncidentMetricsUrl; + + @Value("${pager-duty.log-entry-url}") + private String listLogEntryUrl; + + @Value("${jira.create-ticket-uri}") + private String jiraCreateTicketUri; + + @Value("${jira.get-tickets-uri}") + private String jiraGetTicketsUri; + + @Value("${jira.get-project-uri}") + private String jiraGetProjectUri; + + @Value("${zendesk.ticket-count-uri}") + private String zendeskTicketCountUri; + + @Value("${zendesk.create-ticket-uri}") + private String zendeskCreateTicketUri; + + @Value("${zendesk.get-tickets-uri}") + private String zendeskGetTicketUri; + + @Value("${slack.post-message-url}") + private String slackPostMessageUrl; + + public ConnectDto connect(ConnectDto connectDto) { + addMetricsTasks(); + var serviceType = connectDto.getServiceType(); + switch (serviceType) { + case PAGER_DUTY -> { connectDto = pagerdutyValidation(connectDto); addPagerdutyTask(connectDto.getId()); return connectDto; } + case JIRA -> { connectDto = jiraValidation(connectDto); addJiraTask(connectDto); return connectDto; } + case ZENDESK -> { connectDto = zendeskValidation(connectDto); addZendeskTask(connectDto); return connectDto; } + case SLACK -> { connectDto = slackValidation(connectDto); addSlackTask(connectDto); return connectDto; } + // case GITHUB -> { return add(connectDto); } + default -> throw new InvalidDataException(ErrorCode.INVALID_SERVICE_TYPE, responseMessage.getErrorMessage(ErrorCode.INVALID_SERVICE_TYPE, serviceType.name())); + } + } + + public ConnectDto get(ServiceType serviceType) { + var connect = connectRepository.findOne(connectSpecifications.findByServiceType(serviceType)); + if (connect.isEmpty()) throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, serviceType.name())); + return connect.get().toDto(connect.get()); + } + + public Optional getModel(ServiceType serviceType, Long clientId) { + return connectRepository.findByServiceTypeAndClientId(serviceType, clientId); + } + + private void addMetricsTasks() { + var taskDto = taskService.getByType(ServiceType.METRICS, TaskType.INCIDENT_METRICS); + if (ObjectUtils.isEmpty(taskDto)) { + taskService.add(TaskDto.builder().serviceType(ServiceType.METRICS).taskType(TaskType.INCIDENT_METRICS).build()); + } + taskDto = taskService.getByType(ServiceType.METRICS, TaskType.TICKET_METRICS); + if (ObjectUtils.isEmpty(taskDto)) { + taskService.add(TaskDto.builder().serviceType(ServiceType.METRICS).taskType(TaskType.TICKET_METRICS).build()); + } + } + + public ConnectDto get(Long id) { + var connect = getModel(id); + return connect.toDto(connect); + } + + public Connect getModel(Long id) { + return connectRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.CONNECT))); + } + + public List getAll() { + var connects = connectRepository.findAll(); + return !ObjectUtils.isEmpty(connects) ? connects.stream().map(connects.get(0)::toDto).collect(Collectors.toList()) : List.of(); + } + + public ConnectDto update(ConnectDto connectDto) { + get(connectDto.getId()); + var serviceType = connectDto.getServiceType(); + switch (serviceType) { + case PAGER_DUTY -> { return pagerdutyValidation(connectDto); } + case JIRA -> { return jiraValidation(connectDto); } + case GITHUB -> { + var connect = connectRepository.save(connectDto.toDomin(connectDto)); + return connect.toDto(connect); + } + default -> throw new InvalidDataException(ErrorCode.INVALID_ISSUE_TYPE, "Invalid Type"); + } + } + + public ServiceType[] getAllServiceType() { + return ServiceType.values(); + } + + public String addRepoOrganization(Long id, String repoOrganization) { + var connect = getModel(id); + connect.setRepoOrganization(repoOrganization); + connectRepository.save(connect); + return Status.SUCCESS.name(); + } + + public Map checkConnect() { + Map checkConnections = new HashMap<>(); + var connectMap = connectRepository.findAll().stream().collect(Collectors.toMap(Connect::getServiceType, Function.identity())); + for (ServiceType type : getAllServiceType()) { + if (ObjectUtils.isEmpty(connectMap.get(type))) { + checkConnections.put(type.name(), null); + } else { + checkConnections.put(type.name(), connectMap.get(type).getId()); + } + } + return checkConnections; + } + + public ConnectDto add(ConnectDto connectDto) { + var connect = connectDto.toDomin(connectDto); + connectRepository.save(connect); + return connect.toDto(connect); + } + + // token validation method for given PagerDuty connect details + private ConnectDto pagerdutyValidation(ConnectDto connectDto) { + Map httpHeaders = new HashMap<>(); + httpHeaders.put(HttpHeaders.ACCEPT, Constants.PAGER_DUTY_ACCEPT); + httpHeaders.put(HttpHeaders.CONTENT_TYPE, Constants.PAGER_DUTY_CONTENT_TYPE); + httpHeaders.put(HttpHeaders.AUTHORIZATION, AuthType.TOKEN.getKey().concat(connectDto.getAuthToken())); + app2AppService.httpGet(listIncidentUrl, app2AppService.setHeaders(httpHeaders, null), String.class); + log.info("Token validation for {} of client id '{}' is success", ServiceType.PAGER_DUTY, SecurityUtil.getClientId()); + httpHeaders.remove(HttpHeaders.AUTHORIZATION); + connectDto.setAuthType(AuthType.TOKEN); + connectDto.setHeaders(new JSONObject(httpHeaders).toJSONString()); + return add(connectDto); + } + + private void addPagerdutyTask(Long connectId) { + taskService.add(TaskDto.builder().taskType(TaskType.INCIDENTS).serviceType(ServiceType.PAGER_DUTY).connectId(connectId).url(listIncidentUrl).build()); + taskService.add(TaskDto.builder().taskType(TaskType.SERVICES).serviceType(ServiceType.PAGER_DUTY).connectId(connectId).url(listServiceUrl).build()); + taskService.add(TaskDto.builder().taskType(TaskType.INCIDENT_METRICS).serviceType(ServiceType.PAGER_DUTY).connectId(connectId).url(listIncidentMetricsUrl).build()); + taskService.add(TaskDto.builder().taskType(TaskType.INCIDENT_LOG_ENTRY).serviceType(ServiceType.PAGER_DUTY).connectId(connectId).url(listLogEntryUrl).build()); + } + + // token validation method for given Jira connect details + private ConnectDto jiraValidation(ConnectDto connectDto) { + var url = connectDto.getDomain().concat(jiraGetProjectUri).concat(connectDto.getProjectKey()); + String encoder = Base64.getEncoder().encodeToString(connectDto.getUserEmail().concat(":").concat(connectDto.getAuthToken()).getBytes()); + Map headers = new HashMap<>(); + headers.put(HttpHeaders.AUTHORIZATION, AuthType.BASIC.getKey().concat(" ").concat(encoder)); + app2AppService.httpGet(url, app2AppService.setHeaders(headers, null), String.class); + log.info("Token validation for {} of client id '{}' is success", ServiceType.JIRA, SecurityUtil.getClientId()); + connectDto.setAuthType(AuthType.BASIC); + return add(connectDto); + } + + // create task to create ticket in Jira software. + private void addJiraTask(ConnectDto connectDto) { + var url = connectDto.getDomain().concat(jiraCreateTicketUri); + taskService.add(TaskDto.builder().taskType(TaskType.CREATE_TICKET).serviceType(ServiceType.JIRA).connectId(connectDto.getId()).url(url).build()); + url = connectDto.getDomain().concat(jiraGetTicketsUri); + taskService.add(TaskDto.builder().taskType(TaskType.GET_TICKETS).serviceType(ServiceType.JIRA).connectId(connectDto.getId()).url(url).build()); + } + + // token validation method for given Zendesk connect details + private ConnectDto zendeskValidation(ConnectDto connectDto) { + var zendeskUrl = connectDto.getDomain().concat(zendeskTicketCountUri); + Map headers = new HashMap<>(); + headers.put(HttpHeaders.ACCEPT, Constants.ACCEPT); + if (connectDto.getAuthType().equals(AuthType.BASIC)) { + String encoder = Base64.getEncoder().encodeToString(connectDto.getUserEmail().concat("/token:").concat(connectDto.getAuthToken()).getBytes()); + headers.put(HttpHeaders.AUTHORIZATION, connectDto.getAuthType().getKey().concat(" ").concat(encoder)); + } + if (connectDto.getAuthType().equals(AuthType.BEARER)) { + headers.put(HttpHeaders.AUTHORIZATION, connectDto.getAuthType().getKey().concat(" ").concat(connectDto.getAuthToken())); + } + app2AppService.httpGet(zendeskUrl, app2AppService.setHeaders(headers, null), String.class); + log.info("Token validation for {} of client id '{}' is success", ServiceType.ZENDESK, SecurityUtil.getClientId()); + headers.put(HttpHeaders.CONTENT_TYPE, Constants.CONTENT_TYPE); + headers.remove(HttpHeaders.AUTHORIZATION); + connectDto.setHeaders(new JSONObject(headers).toString()); + return add(connectDto); + } + + // create task to create ticket in Zendesk software. + private void addZendeskTask(ConnectDto connectDto) { + var url = connectDto.getDomain().concat(zendeskCreateTicketUri); + taskService.add(TaskDto.builder().taskType(TaskType.CREATE_TICKET).serviceType(ServiceType.ZENDESK).connectId(connectDto.getId()).url(url).build()); + url = connectDto.getDomain().concat(zendeskGetTicketUri); + taskService.add(TaskDto.builder().taskType(TaskType.GET_TICKETS).serviceType(ServiceType.ZENDESK).connectId(connectDto.getId()).url(url).build()); + } + // Credentials validation for slack + private ConnectDto slackValidation(ConnectDto connectDto) { + var headers = Map.of(HttpHeaders.AUTHORIZATION, connectDto.getAuthType().getKey().concat(" ").concat(connectDto.getAuthToken())); + Map message = new HashMap<>(); + message.put("channel", connectDto.getChannelId()); + message.put("text", "Hello :wave: \nOpsbeach Credential Validation"); + // var responseObject = JsonUtil.convertJsonIntoObject(app2AppService.sendRequest(slackPostMessageUrl, HttpMethod.POST, app2AppService.setHeaders(headers, message)), JsonNode.class); + var responseObject = app2AppService.httpPost(slackPostMessageUrl, app2AppService.setHeaders(headers, message), JsonNode.class); + if (!responseObject.get("ok").asBoolean()) slackException(responseObject.get("error").asText(), connectDto); + log.info("Token validation for {} of client id '{}' is success", ServiceType.SLACK, SecurityUtil.getClientId()); + return add(connectDto); + } + + private void slackException(String message, ConnectDto connectDto) { + if (message.equals("invalid_auth")) { + throw new UnAuthorizedException(ErrorCode.ACCESS_TOKEN_INVALID, responseMessage.getErrorMessage(ErrorCode.ACCESS_TOKEN_INVALID, connectDto.getAuthToken())); + } + if (message.equals("channel_not_found")) { + throw new InvalidDataException(ErrorCode.CHANNEL_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.CHANNEL_NOT_FOUND, connectDto.getChannelId())); + } + if (message.equals("not_in_channel")) { + throw new InvalidDataException(ErrorCode.NOT_IN_CHANNEL, responseMessage.getErrorMessage(ErrorCode.NOT_IN_CHANNEL, "Not a Member in the channel id - ".concat(connectDto.getChannelId()))); + } + throw new InvalidDataException(ErrorCode.SOMETHING_WENT_WRONG, responseMessage.getErrorMessage(ErrorCode.SOMETHING_WENT_WRONG, message)); + } + + private void addSlackTask(ConnectDto connectDto) { + taskService.add(TaskDto.builder().taskType(TaskType.POST_MESSAGE).serviceType(ServiceType.SLACK).connectId(connectDto.getId()).url(slackPostMessageUrl).build()); + } +} diff --git a/connect/src/main/java/com/opsbeach/connect/task/service/TaskService.java b/connect/src/main/java/com/opsbeach/connect/task/service/TaskService.java new file mode 100644 index 0000000..08268ef --- /dev/null +++ b/connect/src/main/java/com/opsbeach/connect/task/service/TaskService.java @@ -0,0 +1,102 @@ +package com.opsbeach.connect.task.service; + +import java.util.List; +import java.util.stream.Collectors; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.core.utils.TaskUtils; +import com.opsbeach.connect.scheduler.SchedulerTaskService; +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.entity.Task; +import com.opsbeach.connect.task.repository.TaskRepository; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Lazy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Service +public class TaskService { + + private final SchedulerTaskService schedulerTaskService; + + private final TaskRepository taskRepository; + + private final ResponseMessage responseMessage; + + private final IdSpecifications taskSpecifications; + + @Value("${scheduler.enabled:false}") + private boolean schedulerEnabled; + + public TaskService(@Lazy SchedulerTaskService schedulerTaskService, TaskRepository taskRepository, + ResponseMessage responseMessage, IdSpecifications taskSpecifications) { + this.schedulerTaskService = schedulerTaskService; + this.taskRepository = taskRepository; + this.responseMessage = responseMessage; + this.taskSpecifications = taskSpecifications; + } + + public TaskDto get(Long id) { + var task = taskRepository.findById(id).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, id.toString(), Constants.TASK))); + return task.toDto(task); + } + + public TaskDto add(TaskDto taskDto) { + Task taskResponse = taskRepository.save(taskDto.toDomin(taskDto)); + // adding task to scheduler + if (schedulerEnabled) { + schedulerTaskService.addTaskToScheduler(taskResponse, + triggerContext -> TaskUtils.findNextExecutionTime(triggerContext, taskResponse).toInstant()); + } + return taskResponse.toDto(taskResponse); + } + + public TaskDto update(TaskDto taskDto) { + var task = taskDto.toDomin(taskDto); + return task.toDto(taskRepository.save(task)); + } + + public List getAll() { + Task task = new Task(); + return taskRepository.findAll().stream().map(task::toDto).collect(Collectors.toList()); + } + + public List getAllForScheduler() { + return taskRepository.findAll(); + } + + public TaskDto getByType(ServiceType serviceType, TaskType taskType) { + Specification baseSpecification = taskSpecifications.findByTaskType(taskType).and(taskSpecifications.findByServiceType(serviceType)); + var task = taskRepository.findOne(baseSpecification).orElse(null); + return task == null ? null : task.toDto(task); + } + + public TaskDto getByTypes(List serviceTypes, TaskType taskType) { + Specification baseSpecification = taskSpecifications.findByTaskType(taskType).and(taskSpecifications.findByServiceTypes(serviceTypes)); + var task = taskRepository.findOne(baseSpecification).orElseThrow(() -> new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, Constants.TASK.concat(" - ").concat(serviceTypes.toString())))); + return task.toDto(task); + } + + public void taskValidation(TaskDto taskDto, TaskType taskType, ServiceType serviceType) { + if(!taskType.equals(taskDto.getTaskType()) || !serviceType.equals(taskDto.getServiceType())){ + log.info(responseMessage.getErrorMessage(ErrorCode.INVALID_ID, Constants.TASK)); + throw new InvalidDataException(ErrorCode.INVALID_ID, responseMessage.getErrorMessage(ErrorCode.INVALID_ID, Constants.TASK)); + } + } + + public void delete(Long id) { + schedulerTaskService.removeTaskFromScheduler(id); + taskRepository.deleteById(id); + } +} diff --git a/connect/src/main/resources/application-url.yml b/connect/src/main/resources/application-url.yml new file mode 100644 index 0000000..26af2f2 --- /dev/null +++ b/connect/src/main/resources/application-url.yml @@ -0,0 +1,52 @@ +application: + web: + baseURL: https://api.aldefi.io/ + user: + baseURL: https://api.aldefi.io/user/v1/ + get-client-url: https://api.aldefi.io/user/v1/client/{id} + update-onboard-status: https://api.aldefi.io/user/v1/client/{id}?isOnboarded={isOnboarded} + analytics: + baseURL: https://api.aldefi.io/analytics/v1/ +jira: + get-user-uri: /rest/api/3/user/search + create-ticket-uri: /rest/api/3/issue + get-tickets-uri: /rest/api/3/search + get-project-uri: /rest/api/3/project/ +pager-duty: + incidents-url: https://api.pagerduty.com/incidents + services-url: https://api.pagerduty.com/services + incident-metrics-url: https://api.pagerduty.com/analytics/raw/incidents + log-entry-url: https://api.pagerduty.com/log_entries +zendesk: + create-ticket-uri: /api/v2/tickets + ticket-count-uri: /api/v2/tickets/count + get-tickets-uri: /api/v2/search +slack: + post-message-url: https://slack.com/api/chat.postMessage +github: + construct-file-path: https://github.com/{repoFullName}/tree/{branch}/{filePath} + construct-pr-url: https://github.com/{repoFullName}/pull/{prNumber} + access_token: https://github.com/login/oauth/access_token + user: https://api.github.com/user + user_repos: https://api.github.com/search/repositories?q=user:{userId} + user_orgs: https://api.github.com/user/orgs + organization_repos: https://api.github.com/orgs/{orgName}/repos + process_event_audit: https://api.aldefi.io/connect/v1/event-audit/process?eventAuditId={eventAuditId} + tarball: https://api.github.com/repos/{FullName}/tarball/{branchName} + refresh-token: https://github.com/login/oauth/access_token + redirect-url: https://api.aldefi.io/connect/v1/github/signin/callback?smClientId={smClientId} + repos-redirect-url: https://schemata.aldefi.io/repos + login-url: https://github.com/login/oauth/authorize?scope=user&client_id={clientID}&redirect_uri={redirectURI} + download_file_url: https://api.github.com/repos/{owner}/{repo}/contents/{path} + create_branch_url: https://api.github.com/repos/{owner}/{repo}/git/refs + get_branch_info_url: https://api.github.com/repos/{owner}/{repo}/git/refs/heads/{branchName} + create_tree_object_url: https://api.github.com/repos/{owner}/{repo}/git/trees + create_commit_url: https://api.github.com/repos/{owner}/{repo}/git/commits + commit_file_url: https://api.github.com/repos/{owner}/{repo}/contents/{filePath} + create_pr_url: https://api.github.com/repos/{owner}/{repo}/pulls + repo-details: https://api.github.com/repos/{owner}/{repo} + create-repo-authenticated-user: https://api.github.com/user/repos + get-user-details: https://api.github.com/users/{userName} + push-commit-to-branch: https://api.github.com/repos/{owner}/{repo}/git/refs/heads/{branchName} + create-pr-comment: https://api.github.com/repos/{owner}/{repo}/issues/{pr_number}/comments + delete-comment: https://api.github.com/repos/{owner}/{repo}/issues/comments/{comment_id} \ No newline at end of file diff --git a/connect/src/main/resources/application.yaml b/connect/src/main/resources/application.yaml new file mode 100644 index 0000000..b9f2af8 --- /dev/null +++ b/connect/src/main/resources/application.yaml @@ -0,0 +1,80 @@ +spring: + datasource: + url: jdbc:postgresql://34.27.156.232:5432/opsbeach?serverTimeZone=UTC&characterEncoding=UTF-8 + driver-class-name: org.postgresql.Driver + username: ${POSTGRES_USER} + password: ${POSTGRES_PASS} + hikari: + schema: analytics + neo4j: + uri: bolt://35.238.222.192:7687 + authentication: + username: ${NEO4J_USER} + password: ${NEO4J_PASS} + data: + neo4j: + database: neo4j + jpa: + hibernate: + ddl-auto: validate + properties: + hibernate: + default_schema: analytics + dialect: org.hibernate.dialect.PostgreSQLDialect + ddl-auto: none + application: + name: analytics + #redis: + # host: localhost + # port: 6379 + threading-future-timeout: 10000 + user: + base-url: https://api.aldefi.io/user/v1/auth/ + client-register: client/register + user-register: user/register + user: user + client: client + user-login: user/login + jwt-add: jwt + audit: audit + jwt-access: access + github: + client-id: ${GH_CL_ID} + client-secret: ${GH_CL_SECRET} + gcloud: + repo-bucket: "schematalabs-github-boostrap" + file-upload-bucket: "schematalabs-file-upload-bootstrap" + project-id: prodenv1 + location-id: us-central1 + queue-id: schematalabs-bootstrap-github + smtp: + username: ${SMTP_USER_NAME} + password: ${SMTP_PASSWORD} + security: + oauth2: + resource: + + client: + registration: + github: + clientId: ${GH_CL_ID} + clientSecret: ${GH_CL_SECRET} +scheduler: + enabled: true +server: + port: 7081 + servlet: + contextPath: /connect + home-path: /connect/bootstrap/ + repo-storage-path: /home/surya-kumar/lakebyte/repos +flyway: + url: jdbc:postgresql://34.27.156.232:5432/opsbeach + schemas: PUBLIC + user: ${POSTGRES_USER} + password: ${POSTGRES_PASS} + +#disable neo4j health check for now +management: + health: + neo4j: + enabled: false \ No newline at end of file diff --git a/connect/src/main/resources/db/migration/V1__Init.sql b/connect/src/main/resources/db/migration/V1__Init.sql new file mode 100644 index 0000000..7237b0d --- /dev/null +++ b/connect/src/main/resources/db/migration/V1__Init.sql @@ -0,0 +1,261 @@ +create schema if not exists analytics; +create extension pgcrypto; + +CREATE TABLE task ( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + task_type varchar(512) NOT NULL, + service_type varchar(512) NOT NULL, + connect_id bigint, + url varchar(512), + execution_interval bigint DEFAULT 3600000, + last_sync_date timestamp without time zone +); + +CREATE TABLE connect ( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + service_type varchar(512) NOT NULL, + auth_type varchar(512) NOT NULL, + headers character varying COLLATE pg_catalog."default", + project_key bytea, + domain bytea, + user_email bytea, + auth_token bytea NOT NULL, + refresh_token bytea, + user_name character varying COLLATE pg_catalog."default", + repo_organization character varying COLLATE pg_catalog."default", + channel_id bytea +); + +CREATE TABLE sla ( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + type character varying COLLATE pg_catalog."default", + sla_time bigint +); + +INSERT INTO sla(created_at, updated_at, created_by, updated_by, client_id, type, sla_time) +VALUES ('2022-03-19 00:00:00', '2022-03-19 00:00:00', 1, 1, 0, 'GITHUB', 86400); + +CREATE TABLE client_repo +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + name character varying COLLATE pg_catalog."default", + owner character varying COLLATE pg_catalog."default", + full_name character varying COLLATE pg_catalog."default", + status character varying COLLATE pg_catalog."default", + default_branch character varying COLLATE pg_catalog."default", + repo_type varchar(25) NOT NULL, + repository_source character varying COLLATE pg_catalog."default", + folder_path character varying COLLATE pg_catalog."default", + connect_id bigint, + CONSTRAINT client_repo_connect_id_fkey FOREIGN KEY (connect_id) REFERENCES connect(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE event_audit +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + client_name character varying COLLATE pg_catalog."default", + type character varying COLLATE pg_catalog."default", + event_id bigint NOT NULL, + status character varying COLLATE pg_catalog."default", + error character varying COLLATE pg_catalog."default", + initiated_by character varying COLLATE pg_catalog."default" +); + +CREATE TABLE domain +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + client_repo_id bigint, + name character varying COLLATE pg_catalog."default", + node_id bigint NOT NULL, + CONSTRAINT domain_client_repo_id_fkey FOREIGN KEY (client_repo_id) REFERENCES client_repo(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE workflow +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + status character varying COLLATE pg_catalog."default", + domain_id bigint, + node_id bigint, + schema_name character varying COLLATE pg_catalog."default", + stack_holders character varying COLLATE pg_catalog."default", + purpose character varying COLLATE pg_catalog."default", + creator character varying COLLATE pg_catalog."default", + title character varying COLLATE pg_catalog."default", + additional_reference character varying COLLATE pg_catalog."default", + rank decimal, + CONSTRAINT workflow_domain_id_fkey FOREIGN KEY (domain_id) REFERENCES domain(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE activity +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + workflow_id bigint NOT NULL, + type character varying COLLATE pg_catalog."default", + source_node_id bigint, + target_node_id bigint, + CONSTRAINT activity_workflow_id_fkey FOREIGN KEY (workflow_id) REFERENCES workflow(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE pull_request +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + number character varying COLLATE pg_catalog."default" NOT NULL, + workflow_id bigint, + client_repo_id bigint NOT NULL, + status character varying COLLATE pg_catalog."default", + source_branch character varying COLLATE pg_catalog."default", + target_branch character varying COLLATE pg_catalog."default", + sha character varying COLLATE pg_catalog."default", + url character varying COLLATE pg_catalog."default", + validation_status varchar(10), + error_message character varying COLLATE pg_catalog."default", + issue_comment_id bigint, + CONSTRAINT pull_request_workflow_id_fkey FOREIGN KEY (workflow_id) REFERENCES workflow(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION, + CONSTRAINT pull_request_client_repo_id_fkey FOREIGN KEY (client_repo_id) REFERENCES client_repo(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE comment +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + node_id bigint, + comments character varying COLLATE pg_catalog."default", + type character varying COLLATE pg_catalog."default", + pull_request_id bigint NOT NULL, + commentable_id bigint, + is_resolved boolean, + CONSTRAINT comment_pull_request_id FOREIGN KEY (pull_request_id) REFERENCES pull_request(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION, + CONSTRAINT comment_commentable_id FOREIGN KEY (commentable_id) REFERENCES comment(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE schema_file_audit +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + name character varying COLLATE pg_catalog."default", + client_repo_id bigint, + file_type character varying COLLATE pg_catalog."default", + path character varying COLLATE pg_catalog."default", + root_node_id bigint, + pull_request_id bigint, + checksum character varying COLLATE pg_catalog."default", + CONSTRAINT schema_file_audit_client_repo_id_fkey FOREIGN KEY (client_repo_id) REFERENCES client_repo(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION, + CONSTRAINT schema_file_audit_pull_request_id_fkey FOREIGN KEY (pull_request_id) REFERENCES pull_request(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE model +( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial NOT NULL, + type character varying COLLATE pg_catalog."default", + name character varying COLLATE pg_catalog."default", + name_space character varying COLLATE pg_catalog."default", + path character varying COLLATE pg_catalog."default", + node_id bigint NOT NULL, + domain_id bigint NOT NULL, + client_repo_id bigint, + schema_file_audit_id bigint, + pull_request_id bigint, + checksum character varying COLLATE pg_catalog."default", + CONSTRAINT model_domain_id_fkey FOREIGN KEY (domain_id) REFERENCES domain(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION, + CONSTRAINT model_client_repo_id_fkey FOREIGN KEY (client_repo_id) REFERENCES client_repo(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION, + CONSTRAINT model_schema_file_audit_id_fkey FOREIGN KEY (schema_file_audit_id) REFERENCES schema_file_audit(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION, + CONSTRAINT model_pull_request_id_fkey FOREIGN KEY (pull_request_id) REFERENCES pull_request(id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/connect/src/main/resources/schemata/protobuf/schemata.proto b/connect/src/main/resources/schemata/protobuf/schemata.proto new file mode 100644 index 0000000..93466f6 --- /dev/null +++ b/connect/src/main/resources/schemata/protobuf/schemata.proto @@ -0,0 +1,590 @@ +syntax = "proto3"; + +package org.schemata.schema; + +import "google/protobuf/descriptor.proto"; + +option java_package = "org.schemata.schema"; +option java_outer_classname = "SchemataBuilder"; + +// MessageType captures the type of the stream. There are two types of stream. +enum SchemaType { + // This is an invalid state. If the entity defined as unknown the validator should throw an exception. + UNKNOWN = 0; + //Entity streams can be mutated in the downstream services. Entity streams often used to represent the current + //state of the entity. In the classical DW concepts Entities typically represents the dimensions. + // Entity must have a primary key field. + ENTITY = 1; + // Event streams are typically immutable in nature. Event streams often used to represent the state change of an Entity. + // In the classical DW concepts Event streams represents the facts. Event streams will not have a primary key field. + EVENT = 2; +} + +enum EventType { + // Invalid Event Type. If the message type is event, set it either LIFECYCLE or ACTIVITY or AGGREGATED + NONE = 0; + // LIFECYCLE event captures the state changes of an Entity. (e.g) User created, User deleted et al. + LIFECYCLE = 1; + // ACTIVITY event captures the events that resulted from one Entity changing the state of another Entity. + // (e.g.) User A purchases Product B. The ACTIVITY event is often the result of a business transaction. + ACTIVITY = 2; + // AGGREGATED event captures the computed metrics over a specified window of time. (e.g) Number of views by a User for a Product. + AGGREGATED = 3; +} + +enum ActivityType { + CREATED = 0; + DELETED = 1; + UPDATED = 2; +} + +enum TimeUnit { + SECONDS = 0; + MINUTES = 1; + HOURS = 2; +} + +// CoreMetadata is the set of attribute apply to both the Message & Field +message CoreMetadata { + // Mandatory Metadata: description of the entity + string description = 50001; + // Optional Metadata: additional comments about the entity + string comment = 50002; + // Optional Metadata: Any related entity that has "hierarchy" or "has a" relationships. + string see_also = 50003; + // Optional Metadata: Additional link reference for further reading. + // It could be a confluent page, An ADR or RFC or a Slack message link. + string reference = 50004; +} + + +extend google.protobuf.MessageOptions { + + // message.description is a Mandatory Metadata + CoreMetadata message_core = 60001; + // Mandatory Metadata: owner of the entity. Usually it is the team name. + string owner = 60002; + // Mandatory Metadata: domain = 'core' indicates the entity is common across all the domains. + // Other possible domains are `sales`, `marketing`, `product` etc + string domain = 60003; + // Mandatory Metadata: define the type of the message. + SchemaType schema_type = 60004; + // Status of the entity. You can have `testing`, `production` or `staging` depends on the lifecycle of schema definition. + string status = 60005; + // Slack or Teams channel name to communicate with the team which owns ths entity + string team_channel = 60006; + // Slack or Teams channel name to alert for any validation errors. + string alert_channel = 60007; + // Type of the event. Set if the Type = 'EVENT' + EventType event_type = 60008; + // Compliance Owner approves which team should have access to this Schema. + string compliance_owner = 60009; + // Compliance Owner Slack/ Teams Channel name + string compliance_channel = 600010; + // subscribers, add list of subscribers of the message. + Subscribers subscribers = 600011; + + string quality_rule_base = 600012; + + // string quality_rule_sql = 600013; + + // string quality_rule_cel = 600014; + + string email = 600013; + + string channel = 600014; +} + +message Subscribers { + repeated string name = 1; +} + +enum ClassificationType { + //Identifiers used by developers to track users for advertising purposes. These include Google Play Advertising IDs, + // Amazon Advertising IDs, Apple's identifierForAdvertising (IDFA), and Apple's identifierForVendor (IDFV). + ADVERTISING_ID = 0; + // An age measured in months or years. + AGE = 1; + // A credit card number is 12 to 19 digits long. They are used for payment transactions globally. + CREDIT_CARD_NUMBER = 2; + + // A credit card track number is a variable length alphanumeric string. It is used to store key cardholder information. + CREDIT_CARD_TRACK_NUMBER = 3; + + // A date. This infoType includes most date formats, including the names of common world holidays. + DATE = 4; + + + // A date that is identified by context as a date of birth. Note: Not recommended for use during latency sensitive operations. + DATE_OF_BIRTH = 5; + + // A domain name as defined by the DNS standard. + DOMAIN_NAME = 6; + + // An email address identifies the mailbox that emails are sent to or from. The maximum length of the domain name is 255 characters, and the maximum length of the local-part is 64 characters. + EMAIL_ADDRESS = 7; + + // A person’s ethnic group. + ETHNIC_GROUP = 8; + + // A common female name. Note: Not recommended for use during latency sensitive operations. + FEMALE_NAME = 9; + + // A first name is defined as the first part of a PERSON_NAME. Note: Not recommended for use during latency sensitive operations. + FIRST_NAME = 10; + + // A person’s gender identity. + GENDER = 11; + + // Alphanumeric and special character strings that may be personally identifying but do not belong to a well-defined category, such as user IDs or medical record numbers. + GENERIC_ID = 12; + + // An International Bank Account Number (IBAN) is an internationally agreed-upon method for identifying bank accounts defined by the International Standard of Organization (ISO) 13616:2007 standard. The European Committee for Banking Standards (ECBS) created ISO 13616:2007. An IBAN consists of up to 34 alphanumeric characters, including elements such as a country code or account number. + IBAN_CODE = 13; + + // An HTTP cookie is a standard way of storing data on a per website basis. This detector will find headers containing these cookies. + HTTP_COOKIE = 14; + + // An Integrated Circuit Card Identifier (ICCID) is used to uniquely identify each SIM card. It includes information, such as the country the card is active in and the mobile network code. + ICCID_NUMBER = 15; + + // The International Classification of Diseases, Ninth Revision, Clinical Modification (ICD-9-CM) lexicon is used to assign diagnostic and procedure codes associated with inpatient, outpatient, and physician office use in the United States. The US National Center for Health Statistics (NCHS) created the ICD-9-CM lexicon. It is based on the ICD-9 lexicon, but provides for more morbidity detail. The ICD-9-CM lexicon is updated annually on October 1. + ICD9_CODE = 16; + + // Like ICD-9-CM codes, the International Classification of Diseases, Tenth Revision, Clinical Modification (ICD-10-CM) lexicon is a series of diagnostic codes. The World Health Organization (WHO) publishes the ICD-10-CM lexicon to describe causes of morbidity and mortality. + ICD10_CODE = 17; + + // An International Mobile Equipment Identity (IMEI) hardware identifier, used to identify mobile phones. + IMEI_HARDWARE_ID = 18; + + // An International Mobile Subscriber Identity (IMEI) identifier, used to identify users on a mobile network. + IMSI_ID = 19; + + // An Internet Protocol (IP) address (either IPv4 or IPv6). + IP_ADDRESS = 20; + + // A last name is defined as the last part of a PERSON_NAME. + LAST_NAME = 21; + + + // A physical address or location. + LOCATION = 22; + + // A media access control address (MAC address), which is an identifier for a network adapter. + MAC_ADDRESS = 23; + // A local media access control address (MAC address), which is an identifier for a network adapter. + MAC_ADDRESS_LOCAL = 24; + //A common male name. + MALE_NAME = 25; + + // Terms that commonly refer to a person's medical condition or health. + MEDICAL_TERM = 26; + + // A name of a chain store, business or organization. + ORGANIZATION_NAME = 27; + + + // A passport number that matches passport numbers for the following countries: Australia, Canada, China, France, Germany, Japan, Korea, Mexico, The Netherlands, Poland, Singapore, Spain, Sweden, Taiwan, United Kingdom, and the United States. + PASSPORT = 28; + + // A full person name, which can include first names, middle names or initials, and last names. Note: Not recommended for use during latency sensitive operations. + PERSON_NAME = 29; + + // A telephone number. + PHONE_NUMBER = 30; + + // A street address. Note: Not recommended for use during latency sensitive operations. + STREET_ADDRESS = 31; + + // A SWIFT code is the same as a Bank Identifier Code (BIC). It's a unique identification code for a particular bank. These codes are used when transferring money between banks, particularly for international wire transfers. Banks also use the codes for exchanging other messages. + SWIFT_CODE = 32; + // A timestamp of a specific time of day. + TIME = 33; + + // A Uniform Resource Locator (URL). + URL = 34; + + // A vehicle identification number (VIN) is a unique 17-digit code assigned to every on-road motor vehicle. + VEHICLE_IDENTIFICATION_NUMBER = 35; + + // The infoType detectors in this section detect credentials and other secret data. + Credentials_And_Secrets = 36; + + + // An authentication token is a machine-readable way of determining whether a particular request has been authorized for a user. This detector currently identifies tokens that comply with OAuth or Bearer authentication. + AUTH_TOKEN = 37; + + // Amazon Web Services account access keys. + AWS_CREDENTIALS = 38; + + // Microsoft Azure certificate credentials for application authentication. + AZURE_AUTH_TOKEN = 39; + + // A basic authentication header is an HTTP header used to identify a user to a server. It is part of the HTTP specification in RFC 1945, section 11. + BASIC_AUTH_HEADER = 40; + + // An encryption key within configuration, code, or log text. + ENCRYPTION_KEY = 41; + + // Google Cloud API key. An encrypted string that is used when calling Google Cloud APIs that don't need to access private user data. + GCP_API_KEY = 42; + + // Google Cloud service account credentials. Credentials that can be used to authenticate with Google API client libraries and service accounts. + GCP_CREDENTIALS = 43; + + // JSON Web Token. JSON Web Token in compact form. Represents a set of claims as a JSON object that is digitally signed using JSON Web Signature. + JSON_WEB_TOKEN = 44; + + // Clear text passwords in configs, code, and other text. + PASSWORD = 45; + + // A weakly hashed password is a method of storing a password that is easy to reverse engineer. The presence of such hashes often indicate that a system's security can be improved. + WEAK_PASSWORD_HASH = 46; + + // An XSRF token is an HTTP header that is commonly used to prevent cross-site scripting attacks. Cross-site scripting is a type of security vulnerability that can be exploited by malicious sites. + XSRF_TOKEN = 47; + + + // An Argentine Documento Nacional de Identidad (DNI), or national identity card, is used as the main identity document for citizens. + ARGENTINA_DNI_NUMBER = 100; + + + // An Australian driver's license number. + AUSTRALIA_DRIVERS_LICENSE_NUMBER = 200; + + // A 9-digit Australian Medicare account number is issued to permanent residents of Australia (except for Norfolk island). The primary purpose of this number is to prove Medicare eligibility to receive subsidized care in Australia. + AUSTRALIA_MEDICARE_NUMBER = 201; + + // An Australian passport number. + AUSTRALIA_PASSPORT = 202; + + // An Australian tax file number (TFN) is a number issued by the Australian Tax Office for taxpayer identification. Every taxpaying entity, such as an individual or an organization, is assigned a unique number. + AUSTRALIA_TAX_FILE_NUMBER = 203; + + + // A 12-digit Belgian national identity card number. + BELGIUM_NATIONAL_ID_CARD_NUMBER = 300; + + + // The Brazilian Cadastro de Pessoas Físicas (CPF) number, or Natural Persons Register number, is an 11-digit number used in Brazil for taxpayer identification. + BRAZIL_CPF_NUMBER = 400; + + + // A Canadian bank account number. + CANADA_BANK_ACCOUNT = 500; + + // The British Columbia Personal Health Number (PHN) is issued to citizens, permanent residents, temporary workers, students, and other individuals who are entitled to health care coverage in the Province of British Columbia. + CANADA_BC_PHN = 501; + + // A driver's license number for each of the ten provinces in Canada (the three territories are currently not covered). + CANADA_DRIVERS_LICENSE_NUMBER = 502; + + // The Ontario Health Insurance Plan (OHIP) number is issued to citizens, permanent residents, temporary workers, students, and other individuals who are entitled to health care coverage in the Province of Ontario. + CANADA_OHIP = 503; + + // A Canadian passport number. + CANADA_PASSPORT = 504; + + // The Québec Health Insurance Number (also known as the RAMQ number) is issued to citizens, permanent residents, temporary workers, students, and other individuals who are entitled to health care coverage in the Province of Québec. + CANADA_QUEBEC_HIN = 505; + + // The Canadian Social Insurance Number (SIN) is the main identifier used in Canada for citizens, permanent residents, and people on work or study visas. With a Canadian SIN and mailing address, one can apply for health care coverage, driver's licenses, and other important services. + CANADA_SOCIAL_INSURANCE_NUMBER = 506; + + + // A Chilean Cédula de Identidad (CDI), or identity card, is used as the main identity document for citizens. + CHILE_CDI_NUMBER = 600; + + + // A Chinese resident identification number. + CHINA_RESIDENT_ID_NUMBER = 700; + + // A Chinese passport number. + CHINA_PASSPORT = 701; + + // A Colombian Cédula de Ciudadanía (CDC), or citizenship card, is used as the main identity document for citizens. + COLOMBIA_CDC_NUMBER = 800; + + + // A Personal Identification Number (CPR, Det Centrale Personregister) is a national ID number in Denmark. It is used with public agencies such as health care and tax authorities. Banks and insurance companies also use it as a customer number. The CPR number is required for people who reside in Denmark, pay tax or own property there. + DENMARK_CPR_NUMBER = 900; + + + // The French Carte Nationale d'Identité Sécurisée (CNI or CNIS) is the French national identity card. It's an official identity document consisting of a 12-digit identification number. This number is commonly used when opening bank accounts and when paying by check. It can sometimes be used instead of a passport or visa within the European Union (EU) and in some other countries. + FRANCE_CNI = 1000; + + // The French Numéro d'Inscription au Répertoire (NIR) is a permanent personal identification number that's also known as the French social security number for services including healthcare and pensions. + FRANCE_NIR = 1001; + + // A French passport number. + FRANCE_PASSPORT = 1002; + + // The French tax identification number is a government-issued ID for all individuals paying taxes in France. + FRANCE_TAX_IDENTIFICATION_NUMBER = 1003; + + + // A Finnish personal identity code, a national government identification number for Finnish citizens used on identity cards, driver's licenses and passports. + FINLAND_NATIONAL_ID_NUMBER = 1100; + + + // A German driver's license number. + GERMANY_DRIVERS_LICENSE_NUMBER = 1200; + + // The German Personalausweis, or identity card, is used as the main identity document for citizens of Germany. + GERMANY_IDENTITY_CARD_NUMBER = 1201; + + // A German passport number. The format of a German passport number is 10 alphanumeric characters, chosen from numerals 0–9 and letters C, F, G, H, J, K, L, M, N, P, R, T, V, W, X, Y, Z. + GERMANY_PASSPORT = 1202; + + // An 11-digit German taxpayer identification number assigned to both natural-born and other legal residents of Germany for the purposes of recording tax payments. + GERMANY_TAXPAYER_IDENTIFICATION_NUMBER = 1203; + + // A German Schufa identification number. Schufa Holding AG is a German credit bureau whose aim is to protect clients from credit risk. + GERMANY_SCHUFA_ID = 1204; + + + // The 香港身份證, or Hong Kong identity card (HKIC), is used as the main identity document for citizens of Hong Kong. + HONG_KONG_ID_NUMBER = 1300; + + + // The Indian Aadhaar number is a 12-digit unique identity number obtained by residents of India, based on their biometric and demographic data. + INDIA_AADHAAR_INDIVIDUAL = 1400; + + // The Indian GST identification number (GSTIN) is a unique identifier required of every business in India for taxation. + INDIA_GST_INDIVIDUAL = 1401; + + // The Indian Personal Permanent Account Number (PAN) is a unique 10-digit alphanumeric identifier used for identification of individuals—particularly people who pay income tax. It's issued by the Indian Income Tax Department. The PAN is valid for the lifetime of the holder. + INDIA_PAN_INDIVIDUAL = 1402; + + + // An Indonesian Single Identity Number (Nomor Induk Kependudukan, or NIK) is the national identification number of Indonesia. The NIK is used as the basis for issuing Indonesian resident identity cards (Kartu Tanda Penduduk, or KTP), passports, driver's licenses and other identity documents. + INDONESIA_NIK_NUMBER = 1500; + + + // An Irish driving license number. + IRELAND_DRIVING_LICENSE_NUMBER = 1600; + + // Eircode is an Irish postal code that uniquely identifies an address. + IRELAND_EIRCODE = 1601; + + // An Irish (IE) passport number. + IRELAND_PASSPORT = 1602; + + // The Irish Personal Public Service Number (PPS number, or PPSN) is a unique number for accessing social welfare benefits, public services, and information in Ireland. + IRELAND_PPSN = 1603; + + + // The Israel identity card number is issued to all Israeli citizens at birth by the Ministry of the Interior. Temporary residents are assigned a number when they receive temporary resident status. + ISRAEL_IDENTITY_CARD_NUMBER = 1700; + + + // An Italy fiscal code number is a unique 16-digit code assigned to Italian citizens as a form of identification. + ITALY_FISCAL_CODE = 1800; + + + // A Japanese bank account number. + JAPAN_BANK_ACCOUNT = 1900; + + // A Japanese driver's license number. + JAPAN_DRIVERS_LICENSE_NUMBER = 1901; + + // The Japanese national identification number—sometimes referred to as "My Number"—is a new national ID number as of January 2016. + JAPAN_INDIVIDUAL_NUMBER = 1902; + + // A Japanese passport number. The passport number consists of two alphabetic characters followed by seven digits. + JAPAN_PASSPORT = 1903; + + + // A Korean passport number. + KOREA_PASSPORT = 2000; + + // A South Korean Social Security number. + KOREA_RRN = 2001; + + + // The Mexico Clave Única de Registro de Población (CURP) number, or Unique Population Registry Code or Personal Identification Code number. The CURP number is an 18-character state-issued identification number assigned by the Mexican government to citizens or residents of Mexico and used for taxpayer identification. + MEXICO_CURP_NUMBER = 2100; + + // A Mexican passport number. + MEXICO_PASSPORT = 2101; + + + // A Dutch Burgerservicenummer (BSN), or Citizen's Service Number, is a state-issued identification number that's on driver's licenses, passports, and international ID cards. + NETHERLANDS_BSN_NUMBER = 2200; + + // A Dutch passport number. + NETHERLANDS_PASSPORT = 2201; + + + // Norway‘s Fødselsnummer, National Identification Number, or Birth Number is assigned at birth, or on migration into the country. It is registered with the Norwegian Tax Office. + NORWAY_NI_NUMBER = 2300; + + + // A Paraguayan Cédula de Identidad Civil (CIC), or civil identity card, is used as the main identity document for citizens. + PARAGUAY_CIC_NUMBER = 2400; + + + // A Peruvian Documento Nacional de Identidad (DNI), or national identity card, is used as the main identity document for citizens. + PERU_DNI_NUMBER = 2500; + + + // The PESEL number is the national identification number used in Poland. It is mandatory for all permanent residents of Poland, and for temporary residents staying there longer than 2 months. It is assigned to just one person and cannot be changed. + POLAND_PESEL_NUMBER = 2600; + + // The Polish identity card number. is a government identification number for Polish citizens. Every citizen older than 18 years must have an identity card. The local Office of Civic Affairs issues the card, and each card has its own unique number. + POLAND_NATIONAL_ID_NUMBER = 2601; + + // A Polish passport number. Polish passport is an international travel document for Polish citizens. It can also be used as a proof of Polish citizenship. + POLAND_PASSPORT = 2602; + + + // A Portuguese Cartão de cidadão (CDC), or Citizen Card, is used as the main identity, Social Security, health services, taxpayer, and voter document for citizens. + PORTUGAL_CDC_NUMBER = 2700; + + + // A unique set of nine alpha-numeric characters on the Singapore National Registration Identity Card. + SINGAPORE_NATIONAL_REGISTRATION_ID_NUMBER = 2800; + + // A Singaporean passport number. + SINGAPORE_PASSPORT = 2801; + + + // A South Africa ID number. + SOUTH_AFRICA_ID_NUMBER = 2900; + + + // The Spanish Código de Identificación Fiscal (CIF) was the tax identification system used in Spain for legal entities until 2008. It was then replaced by the Número de Identificación Fiscal (NIF) for natural and juridical persons. + SPAIN_CIF_NUMBER = 3000; + + // A Spain national identity number. + SPAIN_DNI_NUMBER = 3001; + + // A Spanish driver's license number. + SPAIN_DRIVERS_LICENSE_NUMBER = 3002; + + // The Spanish Número de Identificación de Extranjeros (NIE) is an identification number for foreigners living or doing business in Spain. An NIE number is needed for key transactions such as opening a bank account, buying a car, or setting up a mobile phone contract. + SPAIN_NIE_NUMBER = 3003; + + // The Spanish Número de Identificación Fiscal (NIF) is a government identification number for Spanish citizens. An NIF number is needed for key transactions such as opening a bank account, buying a car, or setting up a mobile phone contract. + SPAIN_NIF_NUMBER = 3004; + + // A Spanish Ordinary Passport (Pasaporte Ordinario) number. There are 4 different types of passports in Spain. This detector is for the Ordinary Passport (Pasaporte Ordinario) type, which is issued for ordinary travel, such as vacations and business trips. + SPAIN_PASSPORT = 3005; + + // The Spanish Social Security number (Número de Afiliación a la Seguridad Social) is a 10-digit sequence that identifies a person in Spain for all interactions with the country's Social Security system. + SPAIN_SOCIAL_SECURITY_NUMBER = 3006; + + + // A Swedish Personal Identity Number (personnummer), a national government identification number for Swedish citizens. + SWEDEN_NATIONAL_ID_NUMBER = 3100; + + // A Swedish passport number. + SWEDEN_PASSPORT = 3101; + + + // A Taiwanese passport number. + TAIWAN_PASSPORT = 3200; + + + // The Thai บัตรประจำตัวประชาชนไทย, or identity card, is used as the main identity document for Thai nationals. + THAILAND_NATIONAL_ID_NUMBER = 3300; + + + // A unique Turkish personal identification number, assigned to every citizen of Turkey. + TURKEY_ID_NUMBER = 3400; + + + // The Scotland Community Health Index Number (CHI number) is a 10-digit sequence used to uniquely identify a patient within National Health Service Scotland (NHS Scotland). + SCOTLAND_COMMUNITY_HEALTH_INDEX_NUMBER = 3500; + + // A driver's license number for the United Kingdom of Great Britain and Northern Ireland (UK). + UK_DRIVERS_LICENSE_NUMBER = 3501; + + // A National Health Service (NHS) number is the unique number allocated to a registered user of the three public health services in England, Wales, and the Isle of Man. + UK_NATIONAL_HEALTH_SERVICE_NUMBER = 3502; + + // The National Insurance number (NINO) is a number used in the United Kingdom (UK) in the administration of the National Insurance or social security system. It identifies people, and is also used for some purposes in the UK tax system. The number is sometimes referred to as NI No or NINO. + UK_NATIONAL_INSURANCE_NUMBER = 3503; + + // A United Kingdom (UK) passport number. + UK_PASSPORT = 3504; + + // A United Kingdom (UK) Unique Taxpayer Reference (UTR) number. This number, comprised of a string of 10 decimal digits, is an identifier used by the UK government to manage the taxation system. Unlike other identifiers, such as the passport number or social insurance number, the UTR is not listed on official identity cards. + UK_TAXPAYER_REFERENCE = 3505; + + + // An American Bankers' Committee on Uniform Security Identification Procedures (CUSIP) number is a 9-character alphanumeric code that identifies a North American financial security. + AMERICAN_BANKERS_CUSIP_ID = 3600; + + // Drug product name or active ingredient registered by the United States Food and Drug Administration (FDA). + FDA_CODE = 3601; + + // A United States Adoption Taxpayer Identification Number (ATIN) is a type of United States Tax Identification Number (TIN). An ATIN is issued by the Internal Revenue Service (IRS) to individuals who are in the process of legally adopting a US citizen or resident child. + US_ADOPTION_TAXPAYER_IDENTIFICATION_NUMBER = 3602; + + // The American Bankers Association (ABA) Routing Number (also called the transit number) is a nine-digit code. It's used to identify the financial institution that's responsible to credit or entitled to receive credit for a check or electronic transaction. + US_BANK_ROUTING_MICR = 3603; + + // A US Drug Enforcement Administration (DEA) number is assigned to a health care provider by the US DEA. It allows the health care provider to write prescriptions for controlled substances. The DEA number is often used as a general "prescriber number" that is a unique identifier for anyone who can prescribe medication. + US_DEA_NUMBER = 3604; + + // A driver's license number for the United States. Format can vary depending on the issuing state. + US_DRIVERS_LICENSE_NUMBER = 3605; + + // A United States Employer Identification Number (EIN) is also known as a Federal Tax Identification Number, and is used to identify a business entity. + US_EMPLOYER_IDENTIFICATION_NUMBER = 3606; + + // The US National Provider Identifier (NPI) is a unique 10-digit identification number issued to health care providers in the United States by the Centers for Medicare and Medicaid Services (CMS). The NPI has replaced the unique provider identification number (UPIN) as the required identifier for Medicare services. It's also used by other payers, including commercial healthcare insurers. + US_HEALTHCARE_NPI = 3607; + + // A United States Individual Taxpayer Identification Number (ITIN) is a type of Tax Identification Number (TIN), issued by the Internal Revenue Service (IRS). An ITIN is a tax processing number only available for certain nonresident and resident aliens, their spouses, and dependents who cannot get a Social Security Number (SSN). + US_INDIVIDUAL_TAXPAYER_IDENTIFICATION_NUMBER = 3608; + + // A United States passport number. + US_PASSPORT = 3609; + + // A United States Preparer Taxpayer Identification Number (PTIN) is an identification number that all paid tax return preparers must use on US federal tax returns or claims for refund submitted to the US Internal Revenue Service (IRS). + US_PREPARER_TAXPAYER_IDENTIFICATION_NUMBER = 3610; + + // A United States Social Security number (SSN) is a 9-digit number issued to US citizens, permanent residents, and temporary residents. This detector will not match against numbers with all zeroes in any digit group (that is, 000-##-####, ###-00-####, or ###-##-0000), against numbers with 666 in the first digit group, or against numbers whose first digit is 9. + US_SOCIAL_SECURITY_NUMBER = 3611; + + // A United States state name. + US_STATE = 3612; + + // A US toll-free telephone number. + US_TOLLFREE_PHONE_NUMBER = 3613; + + // A vehicle identification number (VIN) is a unique 17-digit code assigned to every on-road motor vehicle in North America. + US_VEHICLE_IDENTIFICATION_NUMBER = 3614; + + + // A Uruguayan Cédula de Identidad (CDI), or identity card, is used as the main identity document for citizens. + URUGUAY_CDI_NUMBER = 3700; + + + // A Venezuelan Cédula de Identidad (CDI), or national identity card, is used as the main identity document for citizens. + VENEZUELA_CDI_NUMBER = 3800; + + +} + +extend google.protobuf.FieldOptions { + // message.description is a Mandatory Metadata + CoreMetadata field_core = 70001; + // Set true if the field contains classified data (Optional). + bool is_classified = 70002; + // Set the classification level if is_classified is true (This is Mandatory if is_classified set to true) + string classification_level = 70003; + // Specify the product type. product_type is an useful annotation to represent a field in a business perspective. + // (e.g) user_id can be an INT field, but in the system design it could represent External Users rather than internal users. + string product_type = 70004; + // Set true if the field is a primary key. This must be true if the Schema type is Entity + bool is_primary_key = 70005; + // Type of the classification: Refer: https://cloud.google.com/dlp/docs/infotypes-reference + ClassificationType classification_type = 70006; + // Is PII field + bool is_pii = 70007; + // Set weather the field is depricated or not + bool depricated = 70008; +} \ No newline at end of file diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/ActivityServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/ActivityServiceTest.java new file mode 100644 index 0000000..81ade23 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/ActivityServiceTest.java @@ -0,0 +1,90 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.dto.ActivityDto; +import com.opsbeach.connect.github.entity.Activity; +import com.opsbeach.connect.github.repository.ActivityRepository; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class ActivityServiceTest { + + @InjectMocks + private ActivityService activityService; + + @Mock + private ActivityRepository activityRepository; + + @Mock + private WorkflowService workflowService; + + @Mock + private ResponseMessage responseMessage; + + @Mock + private IdSpecifications activitySpecifications; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private ActivityDto getActivityDto() { + return ActivityDto.builder().id(1L).build(); + } + + @Test + public void addTest() { + var activityDto = getActivityDto(); + var activity = activityDto.toDomain(activityDto); + when(activityRepository.save(any())).thenReturn(activity); + var response = activityService.add(activityDto); + assertEquals(activityDto.getId(), response.getId()); + } + + @Test + public void getTest() { + var activityDto = getActivityDto(); + var activity = activityDto.toDomain(activityDto); + when(activityRepository.findById(activity.getId())).thenReturn(Optional.of(activity)); + var response = activityService.get(activity.getId()); + assertEquals(activityDto.getId(), response.getId()); + + assertThrows(RecordNotFoundException.class, () -> { activityService.get(2L); }); + } + + @Test + public void findAllByWorkflowIdTest() { + var activityDto = getActivityDto(); + var activity = activityDto.toDomain(activityDto); + when(activityRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of(activity)); + var response = activityService.findAllByWorkflowId(2L); + assertEquals(1, response.size()); + assertEquals(activityDto.getId(), response.get(0).getId()); + + when(activityRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of()); + response = activityService.findAllByWorkflowId(2L); + assertEquals(0, response.size()); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/ClientRepoServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/ClientRepoServiceTest.java new file mode 100644 index 0000000..cb51c77 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/ClientRepoServiceTest.java @@ -0,0 +1,361 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.avro.SchemaParseException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.config.annotation.AlreadyBuiltException; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; +import org.yaml.snakeyaml.Yaml; + +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.databind.DatabindException; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.dto.GitHubDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.EventAudit; +import com.opsbeach.connect.github.entity.ClientRepo.RepoSource; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.repository.ClientRepoRepository; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.processor.avro.AvroSchema; +import com.opsbeach.connect.schemata.processor.json.JsonSchema; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoSchema; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.connect.task.dto.ConnectDto; +import com.opsbeach.connect.task.service.ConnectService; +import com.opsbeach.sharedlib.dto.ClientDto; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.AlreadyExistException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.service.GoogleCloudService; +import com.opsbeach.sharedlib.utils.StringUtil; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; + +public class ClientRepoServiceTest { + + @InjectMocks + private ClientRepoService clientRepoService; + @Mock + private ClientRepoRepository clientRepoRepository; + @Mock + private EventAuditService eventAuditService; + @Mock + private GitHubService gitHubService; + @Mock + private ResponseMessage responseMessage; + @Mock + private App2AppService app2AppService; + @Spy + private IdSpecifications cIdSpecifications; + @Mock + private CriteriaBuilder criteriaBuilder; + @Mock + private CriteriaQuery criteriaQuery; + @Mock + private Root root; + @Mock + private TypedQuery typedQuery; + @Mock + private EntityManager entityManager; + @Mock + private DomainNodeService domainNodeService; + @Mock + private DomainService domainService; + @Mock + private ConnectService connectService; + @Mock + private ApplicationConfig applicationConfig; + @Mock + private GoogleCloudService googleCloudService; + @Mock + private AvroSchema avroSchema; + @Mock + private JsonSchema jsonSchema; + @Mock + private ProtoSchema protoSchema; + private Map gcloud; + private Object homePath; + @Mock + private ModelService modelService; + @Mock + private SchemaFileAuditService schemaFileAuditService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + @BeforeEach + public void init() throws StreamReadException, DatabindException, IOException { + InputStream inputStream = new FileInputStream(new File("src/test/resources/application-test.yaml")); + Yaml yaml = new Yaml(); + Map> data = yaml.load(inputStream); + gcloud = data.get("gcloud"); + homePath = data.get("home-path"); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + private ClientRepo createClientRepo() { + return ClientRepo.builder().id(1L).name("Table").fullName("full_name").build(); + } + + @Test + public void getTest() { + var clientRepo = createClientRepo(); + when(clientRepoRepository.findById(clientRepo.getId())).thenReturn(Optional.of(clientRepo)); + var response = clientRepoService.get(clientRepo.getId()); + assertEquals(response.getName(), clientRepo.getName()); + + when(clientRepoRepository.findById(clientRepo.getId())).thenThrow(RecordNotFoundException.class); + assertThrows(RecordNotFoundException.class, () -> clientRepoService.get(clientRepo.getId())); + } + + @Test + public void getAllTest() { + assertEquals(0, clientRepoService.getAll().size()); + + var clientRepo = createClientRepo(); + when(clientRepoRepository.findAll()).thenReturn(List.of(clientRepo)); + var response = clientRepoService.getAll(); + assertEquals(response.get(0).getName(), clientRepo.getName()); + } + + private GitHubDto createGitHubDto() { + var selectedRepos = Map.of("Selected Repo", RepoType.AVRO); + return GitHubDto.builder().connectId(1L).selectedRepos(selectedRepos) + .repos(List.of("opsconnect")).repoOwner("opsbeach").build(); + } + + @Test + public void addTest() { + var clientRepo = createClientRepo(); + mockEntityManager(); + when(typedQuery.getResultList()).thenReturn(List.of(clientRepo)); + when(typedQuery.getSingleResult()).thenReturn(clientRepo); + when(entityManager.createQuery(criteriaQuery)).thenReturn(typedQuery); + assertThrows(AlreadyExistException.class, () -> clientRepoService.add(createGitHubDto())); + + when(typedQuery.getResultList()).thenReturn(List.of()); + when(entityManager.createQuery(criteriaQuery)).thenReturn(typedQuery); + when(clientRepoRepository.save(any(ClientRepo.class))).thenReturn(clientRepo); + var eventAudit = EventAudit.builder().id(1L).build(); + mockApplicationUser(); + ReflectionTestUtils.setField(clientRepoService, "getClientUrl", "clientUrl"); + when(app2AppService.setHeaders(anyMap(), eq(null))).thenReturn(null); + when(app2AppService.httpGet(anyString(), eq(null), eq(ClientDto.class))).thenReturn(ClientDto.builder().name("client").build()); + when(eventAuditService.addAll(ArgumentMatchers.>any())).thenReturn(List.of(eventAudit)); + var response = clientRepoService.add(createGitHubDto()); + assertEquals(response, Status.SUCCESS.name()); + } + + private void mockEntityManager() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createQuery(ClientRepo.class)).thenReturn(criteriaQuery); + when(criteriaQuery.from(ClientRepo.class)).thenReturn(root); + + when(criteriaQuery.select(root)).thenReturn(criteriaQuery); + when(criteriaQuery.where(any(Predicate.class))).thenReturn(criteriaQuery); + } + + @Test + public void getByFullNameTest() { + var clientRepo = createClientRepo(); + mockEntityManager(); + when(typedQuery.getResultList()).thenReturn(List.of(clientRepo)); + when(typedQuery.getSingleResult()).thenReturn(clientRepo); + + when(entityManager.createQuery(criteriaQuery)).thenReturn(typedQuery); + var response = clientRepoService.getByFullName("full_name"); + assertEquals(response.getName(), clientRepo.getName()); + + when(typedQuery.getResultList()).thenReturn(List.of()); + assertThrows(RecordNotFoundException.class, () -> clientRepoService.getByFullName("full_name")); + } + + @Test + public void getActiveRepoIdsTest() { + var clientRepo = createClientRepo(); + when(clientRepoRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of(clientRepo)); + var response = clientRepoService.getActiveRepoIds(); + assertEquals(response.get(0), clientRepo.getId()); + + when(clientRepoRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of()); + response = clientRepoService.getActiveRepoIds(); + assertEquals(response.size(), 0); + } + + @Test + public void updateStatusTest() { + var clientRepo = createClientRepo(); + when(clientRepoRepository.findById(anyLong())).thenReturn(Optional.of(clientRepo)); + when(clientRepoRepository.save(any(ClientRepo.class))).thenReturn(clientRepo); + var response = clientRepoService.updateStatus(1L, ClientRepo.Status.ACTIVE); + assertEquals(response.getName(), clientRepo.getName()); + } + + @Test + public void getRepoTypesTest() { + assertTrue(Arrays.equals(RepoType.values(), clientRepoService.getRepoTypes())); + } + + private ClientRepo getClientRepo() { + return ClientRepo.builder().id(1L).name("name").fullName("fullName").defaultBranch("default_branch").build(); + } + + private void mockCreateClientRepoMethod(ClientRepo clientRepo) { + when(clientRepoRepository.save(any(ClientRepo.class))).thenReturn(clientRepo); + mockApplicationUser(); + ReflectionTestUtils.setField(clientRepoService, "getClientUrl", "{id}"); + when(app2AppService.setHeaders(anyMap(), eq(null))).thenReturn(null); + when(app2AppService.httpGet(anyString(), eq(null), eq(ClientDto.class))).thenReturn(ClientDto.builder().id(1L).build()); + when(domainNodeService.addDomainNode(anyString(), anyLong(), anyLong())).thenReturn(DomainNode.builder().id(1L).build()); + } + + @Test + public void createNewRepoTest() { + var gitHubDto = GitHubDto.builder().repoOwner("owner").privateRepoName("repo").connectId(1L).build(); + assertThrows(AlreadyBuiltException.class , () -> clientRepoService.createNewRepo(gitHubDto)); + when(gitHubService.getRepoDetails(anyString(), anyString(), anyLong())).thenThrow(RecordNotFoundException.class); + var newRepo = JsonNodeFactory.instance.objectNode(); + newRepo.put("name", "name").put("full_name", "fullName").put("default_branch", "default_branch"); + newRepo.putObject("owner").put("login", "owner"); + when(gitHubService.createNewRepo(anyString(), anyString(), anyLong())).thenReturn(newRepo); + var clientRepo = getClientRepo(); + mockCreateClientRepoMethod(clientRepo); + var response = clientRepoService.createNewRepo(gitHubDto); + assertEquals(clientRepo.getName(), response.getName()); + } + + @Test + public void getSchemataRepoTest() { + var clientRepo = getClientRepo(); + when(clientRepoRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(clientRepo)); + var response = clientRepoService.getSchemataRepo(); + assertEquals(clientRepo.getName(), response.get().getName()); + + when(clientRepoRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.empty()); + var connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.GITHUB).build(); + when(connectService.get(ServiceType.GITHUB)).thenReturn(connectDto); + assertThrows(RecordNotFoundException.class, () -> clientRepoService.getSchemataRepo()); + + connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.GITHUB).repoOrganization("org").build(); + when(connectService.get(ServiceType.GITHUB)).thenReturn(connectDto); + var newRepo = JsonNodeFactory.instance.objectNode(); + newRepo.put("name", "name").put("full_name", "fullName").put("default_branch", "default_branch"); + newRepo.putObject("owner").put("login", "owner"); + when(gitHubService.getRepoDetails(anyString(), anyString(), any(ConnectDto.class))).thenReturn(newRepo); + mockCreateClientRepoMethod(clientRepo); + response = clientRepoService.getSchemataRepo(); + assertEquals(clientRepo.getName(), response.get().getName()); + + when(gitHubService.getRepoDetails(anyString(), anyString(), any(ConnectDto.class))).thenThrow(RecordNotFoundException.class); + when(gitHubService.createNewRepo(anyString(), anyString(), any(ConnectDto.class))).thenReturn(newRepo); + response = clientRepoService.getSchemataRepo(); + assertEquals(clientRepo.getName(), response.get().getName()); + } + + @Test + public void initialLoadingTest() throws IOException { + var eventAudit = EventAudit.builder().id(1L).eventId(1L).initiatedBy("initiatedBy") + .clientName("clientName").clientId(1L).build(); + mockClientRepoAndCreateZipFile(RepoType.AVRO, eventAudit); + mockApplicationUser(); + when(applicationConfig.getGcloud()).thenReturn(gcloud); + ReflectionTestUtils.setField(clientRepoService, "homePath", homePath); + ReflectionTestUtils.setField(clientRepoService, "updateOnboardStatusUrl", "updateOnboardStatusUrl"); + + var response = clientRepoService.initialLoading(eventAudit); + assertEquals(response.getId(), eventAudit.getId()); + + mockClientRepoAndCreateZipFile(RepoType.JSON, eventAudit); + response = clientRepoService.initialLoading(eventAudit); + assertEquals(response.getId(), eventAudit.getId()); + + mockClientRepoAndCreateZipFile(RepoType.PROTOBUF, eventAudit); + response = clientRepoService.initialLoading(eventAudit); + assertEquals(response.getId(), eventAudit.getId()); + + mockClientRepoAndCreateZipFile(RepoType.YAML, eventAudit); + response = clientRepoService.initialLoading(eventAudit); + assertEquals(response.getId(), eventAudit.getId()); + + mockClientRepoAndCreateZipFile(null, eventAudit); + assertThrows(SchemaParseException.class, () -> clientRepoService.initialLoading(eventAudit)); + } + + private void mockClientRepoAndCreateZipFile(RepoType repoType, EventAudit eventAudit) throws IOException { + var clientRepo = ClientRepo.builder().id(1L).connectId(1L).fullName("fullName").name("name") + .repoType(repoType).defaultBranch("defaultBranch").repositorySource(RepoSource.GITHUB).build(); + when(clientRepoRepository.findById(clientRepo.getId())).thenReturn(Optional.of(clientRepo)); + var repoFolderPath = StringUtil.constructStringEmptySeparator(homePath.toString(), eventAudit.getInitiatedBy(), "-", clientRepo.getName()); + new File(repoFolderPath).mkdir(); + Files.copy(Paths.get("src/test/resources/sampleRepo/repo-bucket.tar.gz"), Paths.get(repoFolderPath.concat("/repo-bucket.tar.gz")), StandardCopyOption.REPLACE_EXISTING); + } + + @Test + public void getModelTest() { + assertThrows(RecordNotFoundException.class, () -> clientRepoService.getModel(1L)); + } + + @Test + public void rollBackRecordsCreatedByIdTest() { + clientRepoService.rollBackRecordsCreatedById(1L); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/CommentServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/CommentServiceTest.java new file mode 100644 index 0000000..3aded26 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/CommentServiceTest.java @@ -0,0 +1,85 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.when; + +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.github.dto.CommentDto; +import com.opsbeach.connect.github.repository.CommentRepository; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class CommentServiceTest { + + @InjectMocks + private CommentService commentService; + + @Mock + private CommentRepository commentRepository; + + @Mock + private PullRequestService pullRequestService; + + @Mock + private ResponseMessage responseMessage; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private CommentDto getCommentDto() { + return CommentDto.builder().id(1L).build(); + } + + @Test + public void addTest() { + var commentDto = getCommentDto(); + var comment = commentDto.toDomain(getCommentDto()); + when(commentRepository.save(any())).thenReturn(comment); + var response = commentService.add(commentDto); + assertEquals(comment.getId(), response.getId()); + + commentDto = CommentDto.builder().id(2L).isResolved(Boolean.FALSE).commentableId(1L).build(); + when(commentRepository.findById(anyLong())).thenReturn(Optional.of(comment)); + response = commentService.add(commentDto); + assertEquals(comment.getCommentableId(), response.getCommentableId()); + } + + @Test + public void getTest() { + var commentDto = getCommentDto(); + var comment = commentDto.toDomain(getCommentDto()); + when(commentRepository.findById(1L)).thenReturn(Optional.of(comment)); + var request = commentService.get(1L); + assertEquals(comment.getId(), request.getId()); + + assertThrows(RecordNotFoundException.class, () -> { commentService.get(2L); }); + } + + @Test + public void updateIsResolvedTest() { + var commentDto = getCommentDto(); + var comment = commentDto.toDomain(getCommentDto()); + when(commentRepository.findById(1L)).thenReturn(Optional.of(comment)); + comment.setIsResolved(Boolean.TRUE); + when(commentRepository.save(any())).thenReturn(comment); + var response = commentService.updateIsResolved(1L, Boolean.TRUE); + assertEquals(Boolean.TRUE, response.isResolved()); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/DomainServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/DomainServiceTest.java new file mode 100644 index 0000000..a2fc85c --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/DomainServiceTest.java @@ -0,0 +1,111 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.dto.DomainDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.repository.DomainRepository; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class DomainServiceTest { + + @InjectMocks + private DomainService domainService; + + @Mock + private DomainRepository domainRepository; + + @Mock + private ResponseMessage responseMessage; + + @Mock + private IdSpecifications domainSpecifications; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private DomainDto getDomainDto() { + return DomainDto.builder().id(1L).name("Domain").build(); + } + + @Test + public void addTest() { + var domainDto = getDomainDto(); + var domain = domainDto.toDomain(domainDto); + when(domainRepository.save(any(Domain.class))).thenReturn(domain); + var response = domainService.add(domainDto); + assertEquals(domain.getName(), response.getName()); + } + + @Test + public void getTest() { + var domainDto = getDomainDto(); + var domain = domainDto.toDomain(domainDto); + when(domainRepository.findById(1L)).thenReturn(Optional.of(domain)); + var response = domainService.get(1L); + assertEquals(domain.getName(), response.getName()); + + assertThrows(RecordNotFoundException.class, () -> { domainService.get(2L); }); + } + + @Test + public void getAllTest() { + var domainDto = getDomainDto(); + var domain = domainDto.toDomain(domainDto); + when(domainRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of(domain)); + var response = domainService.getAll(null); + assertEquals(1, response.size()); + assertEquals(domain.getName(), response.get(0).getName()); + + when(domainRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of()); + response = domainService.getAll(1L); + assertEquals(0, response.size()); + } + + @Test + public void addDomainTest() { + var clientRepo = ClientRepo.builder().id(1L).clientId(2L).fullName("fullName").build(); + var domain = Domain.builder().id(1L).nodeId(1L).name(clientRepo.getFullName()).build(); + when(domainRepository.save(any(Domain.class))).thenReturn(domain); + var response = domainService.addDomain(clientRepo, 1L); + assertEquals(domain.getName(), response.getName()); + assertEquals(domain.getNodeId(), response.getNodeId()); + } + + @Test + public void getDefaultDomainTest() { + var domain = Domain.builder().id(1L).nodeId(1L).name("FullName").build(); + when(domainRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(domain)); + var response = domainService.getDefaultDomain("FullName"); + assertEquals(domain.getName(), response.getName()); + } + + @Test + public void deleteByClientRepoIdTest() { + domainService.deleteByClientRepoId(1L); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/EventAuditServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/EventAuditServiceTest.java new file mode 100644 index 0000000..6e351e2 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/EventAuditServiceTest.java @@ -0,0 +1,167 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.config.annotation.AlreadyBuiltException; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.dto.EventAuditDto; +import com.opsbeach.connect.github.entity.EventAudit; +import com.opsbeach.connect.github.entity.EventAudit.Type; +import com.opsbeach.connect.github.repository.EventAuditRepository; +import com.opsbeach.connect.task.dto.ConnectDto; +import com.opsbeach.connect.task.service.ConnectService; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.GoogleCloudService; + +public class EventAuditServiceTest { + + @InjectMocks + private EventAuditService eventAuditService; + + @Mock + private EventAuditRepository eventAuditRepository; + + @Mock + private ConnectService connectService; + + @Mock + private ResponseMessage responseMessage; + + @Mock + private IdSpecifications eventAuditSpecifications; + + @Mock + private GoogleCloudService googleCloudService; + + @Mock + private ClientRepoService clientRepoService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + @Test + public void addTest() { + when(connectService.get(anyLong())).thenReturn(ConnectDto.builder().id(1L).build()); + var eventAuditDto = EventAuditDto.builder().id(1L).build(); + var eventAudit = eventAuditDto.toDomain(eventAuditDto); + when(eventAuditRepository.save(any(EventAudit.class))).thenReturn(eventAudit); + var response = eventAuditService.add(eventAuditDto); + assertEquals(eventAudit.getId(), response.getId()); + } + + @Test + public void getTest() { + var eventAudit = EventAudit.builder().id(1L).build(); + when(eventAuditRepository.findById(1L)).thenReturn(Optional.of(eventAudit)); + var response = eventAuditService.get(1L); + assertEquals(eventAudit.getId(), response.getId()); + + assertThrows(RecordNotFoundException.class, () -> { eventAuditService.get(2L); }); + } + + @Test + public void getAllTest() { + assertEquals(0, eventAuditService.getAll().size()); + var eventAudit = EventAudit.builder().id(1L).build(); + when(eventAuditRepository.findAll()).thenReturn(List.of(eventAudit)); + var response = eventAuditService.getAll(); + assertEquals(1, response.size()); + } + + @Test + public void getInitialLoadStatusTest() { + assertEquals(0, eventAuditService.getInitialLoadStatus().size()); + var eventAudit = EventAudit.builder().id(1L).status(EventAudit.Status.PENDING).build(); + when(eventAuditRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of(eventAudit)); + var response = eventAuditService.getInitialLoadStatus(); + assertEquals(1, response.size()); + assertEquals(eventAudit.getStatus(), response.get(0).getStatus()); + } + + @Test + public void addAllTest() { + var eventAudit = EventAudit.builder().id(1L).status(EventAudit.Status.PENDING).build(); + when(eventAuditRepository.saveAll(anyList())).thenReturn(List.of(eventAudit)); + var response = eventAuditService.addAll(List.of(eventAudit)); + assertEquals(1, response.size()); + assertEquals(eventAudit.getStatus(), response.get(0).getStatus()); + } + + @Test + public void updateStatusTest() { + var eventAudit = EventAudit.builder().id(1L).status(EventAudit.Status.PENDING).build(); + when(eventAuditRepository.findById(1L)).thenReturn(Optional.of(eventAudit)); + eventAudit.setStatus(EventAudit.Status.IN_PROGRESS); + when(eventAuditRepository.save(any(EventAudit.class))).thenReturn(eventAudit); + var response = eventAuditService.updateStatus(1L, EventAudit.Status.IN_PROGRESS); + assertEquals(EventAudit.Status.IN_PROGRESS, response.getStatus()); + + eventAudit.setStatus(EventAudit.Status.COMPLETED); + when(eventAuditRepository.save(any(EventAudit.class))).thenReturn(eventAudit); + response = eventAuditService.updateStatus(1L, EventAudit.Status.COMPLETED); + assertEquals(EventAudit.Status.COMPLETED, response.getStatus()); + } + + @Test + public void processEventAuditTest() { + var eventAudit = EventAudit.builder().id(1L).type(Type.REPOSITORY_INITIAL_PULL).clientId(1L) + .status(EventAudit.Status.COMPLETED).build(); + when(eventAuditRepository.findById(1L)).thenReturn(Optional.of(eventAudit)); + mockApplicationUser(); + assertThrows(AlreadyBuiltException.class, () -> eventAuditService.processEventAudit(1L)); + + eventAudit.setStatus(EventAudit.Status.PENDING); + when(eventAuditRepository.findById(1L)).thenReturn(Optional.of(eventAudit)); + ReflectionTestUtils.setField(eventAuditService, "clientRepoService", clientRepoService); + assertTrue(eventAuditService.processEventAudit(1L)); + + when(clientRepoService.initialLoading(any(EventAudit.class))).thenThrow(SchemaParserException.class); + assertTrue(eventAuditService.processEventAudit(1L)); + + eventAudit = EventAudit.builder().id(1L).type(Type.CSV_FILE_UPLOAD).clientId(1L) + .status(EventAudit.Status.PENDING).build(); + when(eventAuditRepository.findById(1L)).thenReturn(Optional.of(eventAudit)); + assertTrue(eventAuditService.processEventAudit(1L)); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/GitHubServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/GitHubServiceTest.java new file mode 100644 index 0000000..7bf8605 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/GitHubServiceTest.java @@ -0,0 +1,551 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.time.LocalDateTime; +import java.util.Base64; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; +import org.springframework.web.util.UriComponentsBuilder; +import org.yaml.snakeyaml.Yaml; + +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.databind.DatabindException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.github.dto.GithubActionDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.Workflow; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.schemata.dto.SchemaValidationDto; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.connect.task.dto.ConnectDto; +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.entity.Connect; +import com.opsbeach.connect.task.service.ConnectService; +import com.opsbeach.connect.task.service.TaskService; +import com.opsbeach.sharedlib.dto.ClientDto; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.service.GoogleCloudService; +import com.opsbeach.sharedlib.utils.Constants; + +public class GitHubServiceTest { + + @InjectMocks + private GitHubService gitHubService; + @Mock + private ApplicationConfig applicationConfig; + @Mock + private ConnectService connectService; + @Mock + private App2AppService app2AppService; + @Mock + private TaskService taskService; + @Mock + private ClientRepoService clientRepoService; + @Mock + private GoogleCloudService googleCloudService; + @Mock + private PullRequestService pullRequestService; + @Mock + private SchemaFileAuditService schemaFileAuditService; + @Mock + private TableService tableService; + @Mock + private WorkflowService workflowService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + private ObjectMapper mapper = new ObjectMapper(); + private JsonNode githubResponses; + private Map github; + private Map gcloud; + private String githubRedirectURI; + private String loginUrl; + private String reposRedirectURI; + private String accessTokenUrl; + private String userUrl; + private String repoDetailsUrl; + private String githubRefreshTokenUrl; + private String userOrgUrl; + private String clientReposUrl; + private String orgReposUrl; + private String tarballUrl; + private String constructPrUrl; + private String downloadFileUrl; + private String deleteComment; + private String createPrComment; + private String getBranchInfoUrl; + private String createTreeObjectUrl; + private String createCommitUrl; + private String createBranchUrl; + private String createPrUrl; + private String pushCommitToBranch; + private String githubFilePath; + private String createRepoForAuthenticatedUser; + private String getUserDetailsUrl; + + @BeforeEach + public void init() throws StreamReadException, DatabindException, IOException { + InputStream inputStream = new FileInputStream(new File("src/test/resources/github-response.json")); + githubResponses = mapper.readValue(inputStream, JsonNode.class); + + inputStream = new FileInputStream(new File("src/test/resources/application-test.yaml")); + Yaml yaml = new Yaml(); + Map> data = yaml.load(inputStream); + gcloud = data.get("gcloud"); + github = data.get("github"); + githubRedirectURI = github.get("redirect-url"); + loginUrl = github.get("login-url"); + reposRedirectURI = github.get("repos-redirect-url"); + accessTokenUrl = github.get("access_token"); + userUrl = github.get("user"); + repoDetailsUrl = github.get("repo-details"); + githubRefreshTokenUrl = github.get("refresh-token"); + userOrgUrl = github.get("user_orgs"); + clientReposUrl = github.get("user_repos"); + orgReposUrl = github.get("organization_repos"); + tarballUrl = github.get("tarball"); + constructPrUrl = github.get("construct-pr-url"); + downloadFileUrl = github.get("download_file_url"); + deleteComment = github.get("delete-comment"); + createPrComment = github.get("create-pr-comment"); + getBranchInfoUrl = github.get("get_branch_info_url"); + createTreeObjectUrl = github.get("create_tree_object_url"); + createCommitUrl = github.get("create_commit_url"); + createBranchUrl = github.get("create_branch_url"); + createPrUrl = github.get("create_pr_url"); + pushCommitToBranch = github.get("push-commit-to-branch"); + githubFilePath = github.get("construct-file-path"); + createRepoForAuthenticatedUser = github.get("create-repo-authenticated-user"); + getUserDetailsUrl = github.get("get-user-details"); + } + + private ConnectDto getConnectDto() { + return ConnectDto.builder().id(1L).clientId(0L).authToken("token").refreshToken("refreshToken").userName("owner").build(); + } + + private ClientRepo getClientRepo(RepoType repoType) { + return ClientRepo.builder().id(1L).fullName("owner/repo").defaultBranch("main").connectId(1L) + .clientId(1L).owner("owner").name("repo").repoType(repoType).build(); + } + + @Test + public void logInRedirectTest() { + mockApplicationUser(); + String uri = githubRedirectURI.replace("{smClientId}", "0"); + String url = loginUrl.replace("{clientID}", github.get(Constants.CLIENT_ID)).replace("{redirectURI}", uri); + when(applicationConfig.getGithub()).thenReturn(github); + ReflectionTestUtils.setField(gitHubService, "githubRedirectURI", githubRedirectURI); + ReflectionTestUtils.setField(gitHubService, "loginUrl", loginUrl); + var response = gitHubService.logInRedirect(); + System.out.println(response); + assertEquals(response.getLoginRedirectURL(), url); + } + + @Test + public void getTokenTest() { + var smClientId = 0L; + var connect = Connect.builder().id(1L).clientId(0L).authToken("token").build(); + when(connectService.getModel(ServiceType.GITHUB, smClientId)).thenReturn(Optional.empty()); + ReflectionTestUtils.setField(gitHubService, "reposRedirectURI", reposRedirectURI); + var accessTokenResponse = githubResponses.get("access_token").asText(); + ReflectionTestUtils.setField(gitHubService, "accessTokenUrl", accessTokenUrl); + when(app2AppService.getHttpResponse(accessTokenUrl, HttpMethod.POST, null)).thenReturn(accessTokenResponse); + ReflectionTestUtils.setField(gitHubService, "userUrl", userUrl); + when(app2AppService.httpGet(userUrl, null, JsonNode.class)).thenReturn(githubResponses.get("user_details")); + var connectDto = getConnectDto(); + when(connectService.add(any(ConnectDto.class))).thenReturn(connectDto); + var result = reposRedirectURI + "?success=true" + "&connect_id=" + connectDto.getId(); + assertEquals(gitHubService.getToken("code", smClientId), result); + + accessTokenResponse = githubResponses.get("invalid_access_code").asText(); + when(app2AppService.getHttpResponse(accessTokenUrl, HttpMethod.POST, null)).thenReturn(accessTokenResponse); + assertEquals(gitHubService.getToken("code", smClientId), reposRedirectURI + "?success=false"); + + when(connectService.getModel(ServiceType.GITHUB, smClientId)).thenReturn(Optional.of(connect)); + assertEquals(gitHubService.getToken("code", smClientId), result); + } + + @Test + public void getDefaultBranchTest() { + var connectDto = getConnectDto(); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "repoDetailsUrl", repoDetailsUrl); + var url = repoDetailsUrl.replace("{owner}", "owner").replace("{repo}", "repo"); + when(app2AppService.httpGet(url, null, JsonNode.class)).thenReturn(githubResponses.get("repo_details")); + var response = gitHubService.getDefaultBranch("owner", "repo", 1L); + assertEquals(response, githubResponses.get("repo_details").get("default_branch").asText()); + + response = gitHubService.getDefaultBranch(null, "repo", 1L); + assertEquals(response, githubResponses.get("repo_details").get("default_branch").asText()); + } + + @Test + public void generateNewTokenTest() { + var taskDto = TaskDto.builder().id(1L).createdAt(LocalDateTime.now().minusHours(1)).connectId(1L).executionInterval(28800000).build(); + when(taskService.get(anyLong())).thenReturn(taskDto); + gitHubService.generateNewToken(1L); + + taskDto.setCreatedAt(LocalDateTime.now().minusHours(10)); + when(taskService.get(anyLong())).thenReturn(taskDto); + var connectDto = getConnectDto(); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "githubRefreshTokenUrl", githubRefreshTokenUrl); + when(applicationConfig.getGithub()).thenReturn(github); + var url = UriComponentsBuilder.fromUriString(githubRefreshTokenUrl) + .queryParam("refresh_token", connectDto.getRefreshToken()) + .queryParam("client_id", github.get(Constants.CLIENT_ID)) + .queryParam("client_secret", github.get(Constants.CLIENT_SECRET)) + .queryParam("grant_type", "refresh_token").buildAndExpand().toUriString(); + var accessTokenResponse = githubResponses.get("access_token").asText(); + when(app2AppService.getHttpResponse(url, HttpMethod.POST, null)).thenReturn(accessTokenResponse); + gitHubService.generateNewToken(1L); + } + + @Test + public void getUserOrganizationTest() { + when(connectService.get(anyLong())).thenReturn(getConnectDto()); + ReflectionTestUtils.setField(gitHubService, "userOrgUrl", userOrgUrl); + when(app2AppService.httpGetEntities(userOrgUrl, null, JsonNode.class)).thenReturn(List.of(githubResponses.get("user_details"))); + var response = gitHubService.getUserOrganization(1L); + assertEquals(response.size(), 1); + assertEquals(response.get(0), githubResponses.get("user_details").get("login").asText()); + } + + @Test + public void getReposTest() { + var connectDto = getConnectDto(); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "clientReposUrl", clientReposUrl); + var url = clientReposUrl.replace("{userId}", connectDto.getUserName()); + when(app2AppService.httpGet(url, null, JsonNode.class)).thenReturn(githubResponses.get("user_repos")); + var response = gitHubService.getRepos(1L, null); + assertEquals(response.getRepos().size(), 1); + assertEquals(response.getRepos().get(0), githubResponses.get("user_repos").get("items").get(0).get("name").asText()); + + ReflectionTestUtils.setField(gitHubService, "orgReposUrl", orgReposUrl); + url = orgReposUrl.replace("{orgName}", connectDto.getUserName()); + when(app2AppService.httpGetEntities(url, null, JsonNode.class)).thenReturn(List.of(githubResponses.get("repo_details"))); + response = gitHubService.getRepos(1L, connectDto.getUserName()); + assertEquals(response.getRepos().size(), 1); + assertEquals(response.getRepos().get(0), githubResponses.get("repo_details").get("name").asText()); + } + + private void mockDownloadTarballMethod(ClientRepo clientRepo, byte[] content) { + ReflectionTestUtils.setField(gitHubService, "tarballUrl", tarballUrl); + var url = tarballUrl.replace("{FullName}", clientRepo.getFullName()).replace("{branchName}", clientRepo.getDefaultBranch()); + when(app2AppService.restTemplateExchange(url, HttpMethod.GET, null, byte[].class)).thenReturn(content); + when(clientRepoService.getClient()).thenReturn(ClientDto.builder().id(1L).name("opsbeach").build()); + when(applicationConfig.getGcloud()).thenReturn(gcloud); + } + + @Test + public void downloadTarballTest() { + var clientRepo = getClientRepo(RepoType.AVRO); + var content = "Conetent".getBytes(); + mockDownloadTarballMethod(clientRepo, content); + var response = gitHubService.downloadTarball(clientRepo, getConnectDto()); + assertEquals(response, content); + } + + private void mockDownloadFileMethod(ClientRepo clientRepo, GithubActionDto githubActionDto, PullRequest pullRequest) { + ReflectionTestUtils.setField(gitHubService, "downloadFileUrl", downloadFileUrl); + var url = downloadFileUrl.replace("{owner}", clientRepo.getFullName().split("/")[0]) + .replace("{repo}", clientRepo.getFullName().split("/")[1]) + .replace("{path}", githubActionDto.getFilesChanged()); + url = UriComponentsBuilder.fromUriString(url).queryParam("ref", pullRequest.getSourceBranch()).toUriString(); + var fileContent = JsonNodeFactory.instance.objectNode().put("content", Base64.getEncoder().encode("Content".getBytes())); + when(app2AppService.httpGet(url, null, JsonNode.class)).thenReturn(fileContent); + } + + @Test + public void pullRequestActionTest() throws IOException { + var githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("open") + .raisedBy("user").filesChanged("filePath").build(); + var clientRepo = getClientRepo(RepoType.AVRO); + var connectDto = getConnectDto(); + mockApplicationUser(); + when(clientRepoService.getByFullName(anyString())).thenReturn(clientRepo); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "constructPrUrl", constructPrUrl); + var pullRequest = PullRequest.builder().id(1L).number("1").sourceBranch("develop").status(PullRequest.Status.OPEN).build(); + when(pullRequestService.addModel(any(PullRequest.class))).thenReturn(pullRequest); + when(pullRequestService.updateModel(any(PullRequest.class))).thenReturn(pullRequest); + mockDownloadFileMethod(clientRepo, githubActionDto, pullRequest); + + var response = mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), Map.class); + assertTrue(mapper.convertValue(response.get("isMerge"), Boolean.class)); + + pullRequest.setSha("sha"); + when(pullRequestService.findByRepoIdAndNumber(anyLong(), anyString())).thenReturn(pullRequest); + response = mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), Map.class); + assertTrue(mapper.convertValue(response.get("isMerge"), Boolean.class)); + + pullRequest.setStatus(PullRequest.Status.CLOSED); + response = mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), Map.class); + assertTrue(mapper.convertValue(response.get("isMerge"), Boolean.class)); + + pullRequest.setSha("sha1"); + pullRequest.setStatus(PullRequest.Status.OPEN); + pullRequest.setValidationStatus(Status.SUCCESS); + clientRepo = getClientRepo(RepoType.PROTOBUF); + mockDownloadTarballMethod(clientRepo, "Content".getBytes()); + when(clientRepoService.getByFullName(anyString())).thenReturn(clientRepo); + response = mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), Map.class); + assertTrue(mapper.convertValue(response.get("isMerge"), Boolean.class)); + + githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("closed") + .raisedBy("user").filesChanged("filePath").build(); + response = mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), Map.class); + assertTrue(mapper.convertValue(response.get("isMerge"), Boolean.class)); + + githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("merged") + .raisedBy("user").filesChanged("filePath").build(); + assertEquals(mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), String.class), "Changes Accepted."); + + pullRequest.setWorkflowId(1L); + ReflectionTestUtils.setField(gitHubService, "workflowService", workflowService); + assertEquals(mapper.convertValue(gitHubService.pullRequestAction(githubActionDto), String.class), "Changes Accepted."); + + assertThrows(IllegalArgumentException.class, () -> + gitHubService.pullRequestAction(GithubActionDto.builder().prNumber("1").repoName("owner/repo").status("opened").build())); + } + + @Test + public void validateSchemaTest() throws IllegalArgumentException, IOException { + var githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("closed") + .raisedBy("user").filesChanged("filePath").schemaValidationMessage("message").build(); + var clientRepo = getClientRepo(RepoType.PROTOBUF); + var connectDto = getConnectDto(); + mockApplicationUser(); + when(clientRepoService.getByFullName(anyString())).thenReturn(clientRepo); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "constructPrUrl", constructPrUrl); + var pullRequest = PullRequest.builder().id(1L).number("1").sourceBranch("develop").status(PullRequest.Status.OPEN).build(); + when(pullRequestService.addModel(any(PullRequest.class))).thenReturn(pullRequest); + when(pullRequestService.updateModel(any(PullRequest.class))).thenReturn(pullRequest); + ReflectionTestUtils.setField(gitHubService, "downloadFileUrl", downloadFileUrl); + var response = mapper.convertValue(gitHubService.validateSchema(githubActionDto), JsonNode.class); + assertTrue(response.get("status").asBoolean()); + + githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("open") + .raisedBy("user").filesChanged("filePath").build(); + pullRequest.setIssueCommentId(1L); + when(pullRequestService.findByRepoIdAndNumber(anyLong(), anyString())).thenReturn(pullRequest); + ReflectionTestUtils.setField(gitHubService, "deleteComment", deleteComment); + response = mapper.convertValue(gitHubService.validateSchema(githubActionDto), JsonNode.class); + assertTrue(response.get("status").asBoolean()); + + var schemaValidationMessage = "Incompatible fields: Summary[filename=category.proto, schemaName=Category, fieldName=is_active, fieldType=TYPE_BOOL] "; + githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("open") + .raisedBy("user").schemaValidationMessage(schemaValidationMessage).filesChanged("filePath").build(); + ReflectionTestUtils.setField(gitHubService, "createPrComment", createPrComment); + when(app2AppService.setHeaders(anyMap(), any(JsonNode.class))).thenReturn(new HttpEntity("body")); + when(app2AppService.httpPost(anyString(), any(), any())).thenReturn(githubResponses.get("pr_comment")); + response = mapper.convertValue(gitHubService.validateSchema(githubActionDto), JsonNode.class); + assertFalse(response.get("status").asBoolean()); + + clientRepo = getClientRepo(RepoType.AVRO); + when(clientRepoService.getByFullName(anyString())).thenReturn(clientRepo); + githubActionDto = GithubActionDto.builder().prName("prName").prNumber("1").repoName("owner/repo") + .sourceBranch("develop").targetBranch("main").sha("sha").status("open") + .raisedBy("user").filesChanged("filePath").build(); + mockDownloadFileMethod(clientRepo, githubActionDto, pullRequest); + var table = Table.builder().id(1L).build(); + when(schemaFileAuditService.getTablesFromFileContent(any(byte[].class), anyBoolean(), anyString())).thenReturn(List.of(table)); + pullRequest.setIssueCommentId(null); + when(pullRequestService.findByRepoIdAndNumber(anyLong(), anyString())).thenReturn(pullRequest); + var schemaValidationDto = SchemaValidationDto.builder().status(true).build(); + when(tableService.schemaCompare(anyMap(), any(ClientRepo.class), anyLong())).thenReturn(schemaValidationDto); + + response = mapper.convertValue(gitHubService.validateSchema(githubActionDto), JsonNode.class); + assertTrue(response.get("status").asBoolean()); + + schemaValidationDto = SchemaValidationDto.builder().status(false).errorMessages(List.of()) + .errorMap(Map.of("filePath", Map.of("schema", List.of("Invalid Field")))).build(); + when(tableService.schemaCompare(anyMap(), any(ClientRepo.class), anyLong())).thenReturn(schemaValidationDto); + response = mapper.convertValue(gitHubService.validateSchema(githubActionDto), JsonNode.class); + assertFalse(response.get("status").asBoolean()); + + when(schemaFileAuditService.getTablesFromFileContent(any(byte[].class), anyBoolean(), anyString())).thenThrow(new SchemaParserException("Invalid Avro File")); + response = mapper.convertValue(gitHubService.validateSchema(githubActionDto), JsonNode.class); + assertFalse(response.get("status").asBoolean()); + } + + private void mockGetBranchInfoMethod(ClientRepo clientRepo) { + var url = getBranchInfoUrl.replace("{owner}", clientRepo.getOwner()).replace("{repo}", clientRepo.getName()).replace("{branchName}", clientRepo.getDefaultBranch()); + ReflectionTestUtils.setField(gitHubService, "getBranchInfoUrl", getBranchInfoUrl); + when(app2AppService.httpGet(url, null, JsonNode.class)).thenReturn(githubResponses.get("branch_info")); + } + + private void mockPushFilesToGitMethod(ClientRepo clientRepo) { + var url = createTreeObjectUrl.replace("{owner}", clientRepo.getOwner()).replace("{repo}", clientRepo.getName()); + ReflectionTestUtils.setField(gitHubService, "createTreeObjectUrl", createTreeObjectUrl); + when(app2AppService.httpPost(url, null, JsonNode.class)).thenReturn(githubResponses.get("create_tree_object")); + } + + private void mockCreateCommitMethod(ClientRepo clientRepo) { + var url = createCommitUrl.replace("{owner}", clientRepo.getOwner()).replace("{repo}", clientRepo.getName()); + ReflectionTestUtils.setField(gitHubService, "createCommitUrl", createCommitUrl); + when(app2AppService.httpPost(url, null, JsonNode.class)).thenReturn(githubResponses.get("create_commit")); + } + + private void mockPushCommitToBranchMethod(ClientRepo clientRepo) { + var url = pushCommitToBranch.replace("{owner}", clientRepo.getOwner()).replace("{repo}", clientRepo.getName()).replace("{branchName}", clientRepo.getDefaultBranch()); + ReflectionTestUtils.setField(gitHubService, "pushCommitToBranch", pushCommitToBranch); + when(app2AppService.httpPost(url, null, JsonNode.class)).thenReturn(githubResponses.get("push_commit_to_branch")); + } + + @Test + public void commitAndPushInMainBranchTest() { + var clientRepo = getClientRepo(RepoType.PROTOBUF); + var connectDto = getConnectDto(); + when(clientRepoService.get(anyLong())).thenReturn(clientRepo.toDto(clientRepo)); + when(connectService.get(anyLong())).thenReturn(connectDto); + mockGetBranchInfoMethod(clientRepo); + mockPushFilesToGitMethod(clientRepo); + mockCreateCommitMethod(clientRepo); + mockPushCommitToBranchMethod(clientRepo); + var response = gitHubService.commitAndPushInMainBranch(generateFileContentMap(clientRepo), "commit message"); + assertEquals(response, githubResponses.get("push_commit_to_branch").get("sha").asText()); + } + + private Map generateFileContentMap(ClientRepo clientRepo) { + var githubPath = githubFilePath.replace("{repoFullName}", clientRepo.getFullName()) + .replace("{branch}", clientRepo.getDefaultBranch()).replace("{filePath}", "src/product.proto"); + var schemaFileAudit = SchemaFileAudit.builder().id(1L).clientRepoId(clientRepo.getId()).path(githubPath).build(); + return Map.of(schemaFileAudit, "Content"); + } + + private void mockCreateBranchMethod(ClientRepo clientRepo) { + var url = createBranchUrl.replace("{owner}", clientRepo.getOwner()).replace("{repo}", clientRepo.getName()).replace("{branchName}", clientRepo.getDefaultBranch()); + ReflectionTestUtils.setField(gitHubService, "createBranchUrl", createBranchUrl); + when(app2AppService.httpPost(url, null, JsonNode.class)).thenReturn(githubResponses.get("branch_info")); + } + + private void mockRaisePrMethod(ClientRepo clientRepo) { + var url = createPrUrl.replace("{owner}", clientRepo.getOwner()).replace("{repo}", clientRepo.getName()); + ReflectionTestUtils.setField(gitHubService, "createPrUrl", createPrUrl); + when(app2AppService.httpPost(url, null, JsonNode.class)).thenReturn(githubResponses.get("pr_info")); + } + + @Test + public void commitAndRaisePrTest() { + var clientRepo = getClientRepo(RepoType.PROTOBUF); + var connectDto = getConnectDto(); + when(clientRepoService.get(anyLong())).thenReturn(clientRepo.toDto(clientRepo)); + when(connectService.get(anyLong())).thenReturn(connectDto); + mockGetBranchInfoMethod(clientRepo); + mockPushFilesToGitMethod(clientRepo); + mockCreateCommitMethod(clientRepo); + mockCreateBranchMethod(clientRepo); + mockRaisePrMethod(clientRepo); + var workflow = Workflow.builder().id(1L).title("title").purpose("purpose").build(); + ReflectionTestUtils.setField(gitHubService, "constructPrUrl", constructPrUrl); + var response = gitHubService.commitAndRaisePr(generateFileContentMap(clientRepo), workflow); + assertNull(response); + } + + @Test + public void createNewRepoTest() { + var clientRepo = getClientRepo(RepoType.PROTOBUF); + var connectDto = getConnectDto(); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "createRepoForAuthenticatedUser", createRepoForAuthenticatedUser); + when(app2AppService.setHeaders(anyMap(), any(JsonNode.class))).thenReturn(new HttpEntity("body")); + when(app2AppService.httpPost(anyString(), any(), any())).thenReturn(githubResponses.get("repo_details")); + var response = gitHubService.createNewRepo(null, clientRepo.getName(), 1L); + assertEquals(response.get("id").asInt(), githubResponses.get("repo_details").get("id").asInt()); + + var url = getUserDetailsUrl.replace("{userName}", clientRepo.getOwner()); + ReflectionTestUtils.setField(gitHubService, "getUserDetailsUrl", getUserDetailsUrl); + when(app2AppService.httpGet(url, null, JsonNode.class)).thenReturn(JsonNodeFactory.instance.objectNode().put("type", "user")); + response = gitHubService.createNewRepo(clientRepo.getOwner(), clientRepo.getName(), 1L); + assertEquals(response.get("id").asInt(), githubResponses.get("repo_details").get("id").asInt()); + + ReflectionTestUtils.setField(gitHubService, "orgReposUrl", orgReposUrl); + when(app2AppService.httpGet(url, null, JsonNode.class)).thenReturn(JsonNodeFactory.instance.objectNode().put("type", "Organization")); + response = gitHubService.createNewRepo(clientRepo.getOwner(), clientRepo.getName(), 1L); + assertEquals(response.get("id").asInt(), githubResponses.get("repo_details").get("id").asInt()); + } + + @Test + public void createPrCommentTest() { + var connectDto = getConnectDto(); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "createPrComment", createPrComment); + when(app2AppService.setHeaders(anyMap(), any(JsonNode.class))).thenReturn(new HttpEntity("body")); + when(app2AppService.httpPost(anyString(), any(), any())).thenReturn(githubResponses.get("pr_comment")); + var response = gitHubService.createPrComment("owner", "repo", "1", "message", 1L); + assertEquals(response.get("id").asInt(), githubResponses.get("pr_comment").get("id").asInt()); + } + + @Test + public void deleteCommentTest() { + var connectDto = getConnectDto(); + when(connectService.get(anyLong())).thenReturn(connectDto); + ReflectionTestUtils.setField(gitHubService, "deleteComment", deleteComment); + gitHubService.deleteComment("owner", "repo", 1L, 1L); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/ModelServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/ModelServiceTest.java new file mode 100644 index 0000000..e9a52c3 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/ModelServiceTest.java @@ -0,0 +1,294 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.dto.AutoCompleteModelDto; +import com.opsbeach.connect.github.dto.DomainDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.repository.ModelRepository; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaDelete; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.CriteriaUpdate; +import jakarta.persistence.criteria.Expression; +import jakarta.persistence.criteria.Path; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; + +public class ModelServiceTest { + + @InjectMocks + private ModelService modelService; + @Mock + private ModelRepository modelRepository; + @Spy + private IdSpecifications modelSpecifications; + @Mock + private ClientRepoService clientRepoService; + @Mock + private DomainService domainService; + @Mock + private TableService tableService; + @Mock + private CriteriaBuilder criteriaBuilder; + @Mock + private CriteriaQuery criteriaQuery; + @Mock + private Root root; + @Mock + private TypedQuery typedQuery; + @Mock + private EntityManager entityManager; + @Mock + private Expression expression; + @Mock + private CriteriaUpdate criteriaUpdate; + @Mock + private CriteriaDelete criteriaDelete; + @Mock + private ResponseMessage responseMessage; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private List getModels() { + return List.of(Model.builder().name("model").clientRepoId(1L).build()); + } + + private void mockEntityManager() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createQuery(Model.class)).thenReturn(criteriaQuery); + when(criteriaQuery.from(Model.class)).thenReturn(root); + + when(criteriaQuery.select(root)).thenReturn(criteriaQuery); + when(criteriaQuery.where(any(Predicate.class))).thenReturn(criteriaQuery); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + @Test + public void addAllTest() { + var models = getModels(); + when(modelRepository.saveAll(ArgumentMatchers.>any())).thenReturn(models); + var response = modelService.addAll(models); + assertEquals(models.get(0).getName(), response.get(0).getName()); + } + + @Test + public void getAllTest() { + var models = getModels(); + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(models); + when(clientRepoService.getActiveRepoIds()).thenReturn(List.of(1L)); + var response = modelService.getAll(null, null, null); + assertEquals(models.get(0).getName(), response.get(0).getName()); + + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of()); + response = modelService.getAll(1L, 2L, "path"); + assertEquals(0, response.size()); + } + + @Test + public void findBySchemaFileAuditTest() { + var models = getModels(); + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(models); + var response = modelService.findBySchemaFileAudit(1L); + assertEquals(models.get(0).getName(), response.get(0).getName()); + } + + @Test + public void addModelTest() { + var model = getModels().get(0); + when(modelRepository.save(any(Model.class))).thenReturn(model); + var response = modelService.addModel(model); + assertEquals(model.getName(), response.getName()); + } + + @Test + public void getModelTest() { + var model = getModels().get(0); + when(modelRepository.findById(1L)).thenReturn(Optional.of(model)); + var response = modelService.getModel(1L); + assertEquals(model.getName(), response.getName()); + + assertThrows(RecordNotFoundException.class, () -> modelService.getModel(2L)); + } + + @Test + public void getNodeIdsTest() { + var models = getModels(); + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(models); + var response = modelService.getNodeIds(); + assertEquals(models.get(0).getNodeId(), response.get(0)); + + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of()); + response = modelService.getNodeIds(); + assertEquals(0, response.size()); + } + + @Test + @SuppressWarnings("unchecked") + public void findByFullNames() { + var models = getModels(); + mockEntityManager(); + Path path = mock(Path.class); + when(root.get("name")).thenReturn(path); + when(criteriaBuilder.concat(null, ".")).thenReturn(expression); + when(criteriaBuilder.concat(any(Expression.class), any(Path.class))).thenReturn(expression); + when(entityManager.createQuery(criteriaQuery)).thenReturn(typedQuery); + when(typedQuery.getResultList()).thenReturn(models); + var response = modelService.findByFullNames(Set.of("names"), 1L); + assertEquals(response.get(0).getName(), models.get(0).getName()); + + response = modelService.findByFullNames(List.of("names")); + assertEquals(response.get(0).getName(), models.get(0).getName()); + } + + @Test + public void updateModelSetPrIdToNullTest() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createCriteriaUpdate(Model.class)).thenReturn(criteriaUpdate); + when(criteriaUpdate.from(Model.class)).thenReturn(root); + + when(criteriaUpdate.set("pullRequestId", null)).thenReturn(criteriaUpdate); + when(criteriaUpdate.where(any(Predicate.class))).thenReturn(criteriaUpdate); + when(entityManager.createQuery(criteriaUpdate)).thenReturn(typedQuery); + when(typedQuery.executeUpdate()).thenReturn(2); + assertEquals(modelService.updateModelSetPrIdToNull(3L), 2); + } + + @Test + public void deleteModelByPrIdTest() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createCriteriaDelete(Model.class)).thenReturn(criteriaDelete); + when(criteriaDelete.from(Model.class)).thenReturn(root); + + when(criteriaDelete.where(any(Predicate.class))).thenReturn(criteriaDelete); + when(entityManager.createQuery(criteriaDelete)).thenReturn(typedQuery); + when(typedQuery.executeUpdate()).thenReturn(2); + assertEquals(modelService.deleteModelByPrId(3L), 2); + } + + @Test + public void findByNameLikeTest() { + mockApplicationUser(); + var autoCompleteModelDto = new AutoCompleteModelDto() { + @Override + public Long getNodeId() { + return 1L; + } + + @Override + public String getName() { + return "name"; + } + + @Override + public String getNameSpace() { + return "nameSpace"; + } + }; + when(modelRepository.findByNameLike(anyString(), anyLong(), anyString())).thenReturn(List.of(autoCompleteModelDto)); + var response = modelService.findByNameLike("name"); + assertEquals(response.get(0).getName(), autoCompleteModelDto.getName()); + } + + @Test + public void getTableRepoTypeTest() { + var models = getModels(); + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(models); + when(clientRepoService.getModel(anyLong())).thenReturn(ClientRepo.builder().repoType(RepoType.AVRO).build()); + var response = modelService.getByNodeId(2L); + assertEquals(response.getRepoType(), RepoType.AVRO); + } + + @Test + public void findModelByNameAndNameSpaceTest() { + var models = getModels(); + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(models); + var response = modelService.findModelByNameAndNameSpace("name", "nameSpace"); + assertEquals(response.get(0).getName(), models.get(0).getName()); + } + + @Test + public void createModelsTest() { + var schemaFileAudit = SchemaFileAudit.builder().id(1L).path("path").checksum("checksum").build(); + var domain = Domain.builder().id(1L).clientRepoId(2L).build(); + var table = Table.builder().id(1L).name("model").nameSpace("nameSpace").type("RECORD").build(); + when(modelRepository.saveAll(anyList())).thenReturn(getModels()); + var models = modelService.createModels(List.of(table), schemaFileAudit, domain); + assertEquals(models.get(0).getName(), table.getName()); + assertNull(models.get(0).getPullRequestId()); + } + + @Test + public void createModelTest() { + var schemaFileAudit = SchemaFileAudit.builder().id(1L).path("path").checksum("checksum").build(); + var domain = DomainDto.builder().id(1L).clientRepoId(2L).build(); + var table = Table.builder().id(1L).name("model").nameSpace("nameSpace").type("RECORD").build(); + when(domainService.getAll(anyLong())).thenReturn(List.of(domain)); + when(modelRepository.save(any(Model.class))).thenReturn(getModels().get(0)); + var model = modelService.createModel(schemaFileAudit, 1L, table); + assertEquals(model.getName(), table.getName()); + } + + @Test + public void deleteAllByClientRepoIdTest() { + modelService.deleteAllByClientRepoId(1L); + var models = getModels(); + when(modelRepository.findAll(ArgumentMatchers.>any())).thenReturn(models); + ReflectionTestUtils.setField(modelService, "tableService", tableService); + modelService.deleteAllByClientRepoId(1L); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/PullRequestServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/PullRequestServiceTest.java new file mode 100644 index 0000000..abd2d64 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/PullRequestServiceTest.java @@ -0,0 +1,256 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.time.LocalDateTime; +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.dto.PullRequestDto; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.PullRequest.Status; +import com.opsbeach.connect.github.repository.PullRequestRepository; +import com.opsbeach.connect.metrics.dto.SlaDto; +import com.opsbeach.connect.metrics.service.SlaService; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; + +public class PullRequestServiceTest { + + @InjectMocks + private PullRequestService pullRequestService; + + @Mock + private PullRequestRepository pullRequestRepository; + + @Mock + private WorkflowService workflowService; + + @Mock + private ResponseMessage responseMessage; + + @Spy + private IdSpecifications pIdSpecifications; + + @Mock + private SlaService slaService; + + @Mock + private CriteriaBuilder criteriaBuilder; + + @Mock + private CriteriaQuery criteriaQuery; + + @Mock + private Root root; + + @Mock + private TypedQuery typedQuery; + + @Mock + private EntityManager entityManager; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + private void mockEntityManager() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createQuery(PullRequest.class)).thenReturn(criteriaQuery); + when(criteriaQuery.from(PullRequest.class)).thenReturn(root); + + when(criteriaQuery.select(root)).thenReturn(criteriaQuery); + when(criteriaQuery.where(any(Predicate.class))).thenReturn(criteriaQuery); + } + + private PullRequestDto getPullRequestDto() { + return PullRequestDto.builder().id(1L).workflowId(2L).build(); + } + + @Test + public void addModelTest() { + var pullRequest = PullRequest.builder().id(1L).workflowId(2L).build(); + when(pullRequestRepository.save(any(PullRequest.class))).thenReturn(pullRequest); + var response = pullRequestService.addModel(pullRequest); + assertEquals(pullRequest.getId(), response.getId()); + } + + @Test + public void getTest() { + var pullRequestDto = getPullRequestDto(); + var pullRequest = pullRequestDto.toDomain(pullRequestDto); + when(pullRequestRepository.findById(pullRequestDto.getId())).thenReturn(Optional.of(pullRequest)); + var response = pullRequestService.get(pullRequest.getId()); + assertEquals(pullRequestDto.getId(), response.getId()); + + assertThrows(RecordNotFoundException.class, () -> { pullRequestService.get(2L); }); + } + + @Test + public void updateStatusTest() { + var pullRequestDto = getPullRequestDto(); + var pullRequest = pullRequestDto.toDomain(pullRequestDto); + when(pullRequestRepository.findById(anyLong())).thenReturn(Optional.of(pullRequest)); + when(pullRequestRepository.save(any())).thenReturn(pullRequest); + var response = pullRequestService.updateStatus(1L, PullRequest.Status.OPEN); + assertEquals(pullRequestDto.getId(), response.getId()); + } + + @Test + public void findByRepoIdAndNumberTest() { + var pullRequestDto = getPullRequestDto(); + var pullRequest = pullRequestDto.toDomain(pullRequestDto); + when(pullRequestRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(pullRequest)); + var response = pullRequestService.findByRepoIdAndNumber(1L, "404"); + assertEquals(pullRequest.getNumber(), response.getNumber()); + + when(pullRequestRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.empty()); + response = pullRequestService.findByRepoIdAndNumber(1L, "404"); + assertNull(pullRequestService.findByRepoIdAndNumber(1L, "404")); + } + + @Test + public void updateModel() { + var pullRequestDto = getPullRequestDto(); + var pullRequest = pullRequestDto.toDomain(pullRequestDto); + when(pullRequestRepository.findById(pullRequestDto.getId())).thenReturn(Optional.of(pullRequest)); + when(pullRequestRepository.save(any(PullRequest.class))).thenReturn(pullRequest); + var response = pullRequestService.updateModel(pullRequest); + assertEquals(pullRequestDto.getId(), response.getId()); + } + + @Test + public void getAllTest() { + var pullRequestDto = getPullRequestDto(); + var pullRequest = pullRequestDto.toDomain(pullRequestDto); + Page pageList = new PageImpl<>(List.of(pullRequest)); + when(pullRequestRepository.findAll(any(Pageable.class))).thenReturn(pageList); + var response = pullRequestService.getAll(Pageable.ofSize(3)); + assertEquals(pullRequestDto.getId(), response.get(0).getId()); + assertEquals(pageList.getSize(), response.size()); + + when(pullRequestRepository.findAll(any(PageRequest.class))).thenReturn(new PageImpl<>(List.of())); + response = pullRequestService.getAll(Pageable.ofSize(3)); + assertEquals(0, response.size()); + } + + @Test + public void getCountWithWorkflowTest() { + when(pullRequestRepository.count(ArgumentMatchers.>any())).thenReturn(2L); + assertEquals(2L, pullRequestService.getCountWithWorkflow(1L)); + } + + @Test + public void getCountTest() { + when(pullRequestRepository.count(ArgumentMatchers.>any())).thenReturn(10L); + var response = pullRequestService.getCount(null, LocalDateTime.now()); + assertEquals(10L, response); + + when(pullRequestRepository.count(ArgumentMatchers.>any())).thenReturn(5L); + response = pullRequestService.getCount(Status.OPEN, LocalDateTime.now()); + assertEquals(5L, response); + response = pullRequestService.getCount(Status.OPEN, null); + assertEquals(5L, response); + } + + @Test + public void prCountBySlaTimeTest() { + when(slaService.getByType(ServiceType.GITHUB)).thenReturn(SlaDto.builder().slaTime(123).type(ServiceType.GITHUB).build()); + when(pullRequestRepository.count(ArgumentMatchers.>any())).thenReturn(5L); + mockApplicationUser(); + assertEquals(5L, pullRequestService.prCountBySlaTime(false)); + assertEquals(5L, pullRequestService.prCountBySlaTime(true)); + } + + @Test + public void getDashboardMetricsTest() { + var pullRequests = List.of( + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.MERGED).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.OPEN).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.REOPENED).updatedAt(LocalDateTime.now().minusDays(3).plusHours(3)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(9)).status(PullRequest.Status.MERGED).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(9)).status(PullRequest.Status.OPEN).updatedAt(LocalDateTime.now().minusDays(4)).workflowId(2L).build() + ); + when(slaService.getByType(ServiceType.GITHUB)).thenReturn(SlaDto.builder().slaTime(123).type(ServiceType.GITHUB).build()); + mockApplicationUser(); + mockEntityManager(); + when(entityManager.createQuery(criteriaQuery)).thenReturn(typedQuery); + when(typedQuery.getResultList()).thenReturn(pullRequests); + var response = pullRequestService.getDashboardMetrics(); + assertEquals(2L, response.getOpenPrsCount()); + assertEquals(1L, response.getClosePrsCount()); + assertEquals(100, response.getOpenPrPercent()); + assertEquals(50, response.getTotalPrPercent()); + + when(typedQuery.getResultList()).thenReturn(List.of()); + response = pullRequestService.getDashboardMetrics(); + + pullRequests = List.of( + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.MERGED).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.OPEN).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.REOPENED).updatedAt(LocalDateTime.now().minusDays(3).plusHours(3)).workflowId(2L).build() + ); + when(typedQuery.getResultList()).thenReturn(pullRequests); + response = pullRequestService.getDashboardMetrics(); + } + + @Test + public void getSlaMeanCountTest() { + var pullRequests = List.of( + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.MERGED).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(3)).status(PullRequest.Status.OPEN).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now()).status(PullRequest.Status.CLOSED).updatedAt(LocalDateTime.now().minusDays(3).plusHours(3)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now().minusDays(9)).status(PullRequest.Status.MERGED).updatedAt(LocalDateTime.now().minusHours(4)).workflowId(2L).build(), + PullRequest.builder().id(1L).createdAt(LocalDateTime.now()).status(PullRequest.Status.OPEN).updatedAt(LocalDateTime.now().minusDays(4)).workflowId(2L).build() + ); + var response = pullRequestService.getSlaMeanCount(pullRequests, 123); + assertEquals(3, response.get("slaTimeExceded")); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/SchemaFileAuditServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/SchemaFileAuditServiceTest.java new file mode 100644 index 0000000..ce2b92c --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/SchemaFileAuditServiceTest.java @@ -0,0 +1,415 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; +import org.yaml.snakeyaml.Yaml; + +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.databind.DatabindException; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.ClientRepo.RepoSource; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.repository.SchemaFileAuditRepository; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.processor.avro.AvroSchema; +import com.opsbeach.connect.schemata.processor.json.JsonSchema; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoSchema; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.OrganizationService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.GoogleCloudService; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaDelete; +import jakarta.persistence.criteria.CriteriaUpdate; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; + +public class SchemaFileAuditServiceTest { + + @InjectMocks + private SchemaFileAuditService schemaFileAuditService; + @Mock + private SchemaFileAuditRepository schemaFileAuditRepository; + @Mock + private EventAuditService eventAuditService; + @Mock + private DomainNodeService domainNodeService; + @Mock + private DomainService domainService; + @Mock + private ModelService modelService; + @Mock + private OrganizationService organizationService; + @Mock + private GoogleCloudService googleCloudService; + @Spy + private IdSpecifications scmFileAuditpecifications; + @Mock + private TableService tableService; + @Mock + private JsonSchema jsonSchema; + @Mock + private AvroSchema avroSchema; + @Mock + private CriteriaBuilder criteriaBuilder; + @Mock + private CriteriaUpdate criteriaUpdate; + @Mock + private CriteriaDelete criteriaDelete; + @Mock + private Root root; + @Mock + private TypedQuery typedQuery; + @Mock + private EntityManager entityManager; + @Mock + private ResponseMessage responseMessage; + @Mock + private ProtoSchema protoSchema; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private Object homePath; + private String githubFilePath; + + @BeforeEach + public void init() throws StreamReadException, DatabindException, IOException { + InputStream inputStream = new FileInputStream(new File("src/test/resources/application-test.yaml")); + Yaml yaml = new Yaml(); + Map> data = yaml.load(inputStream); + homePath = data.get("home-path"); + githubFilePath = data.get("github").get("construct-file-path"); + } + + private SchemaFileAudit getSchemaFileAudit() { + return SchemaFileAudit.builder().id(1L).name("avro_testing").fileType("avsc").path("path").build(); + } + + @Test + public void createSchemaFileAuditWhileInitialLoadingTest() { + var schemaFileAudit = getSchemaFileAudit(); + var filePath = "src/test/resources/schema_1/avro/avro_testing.avsc"; + when(schemaFileAuditRepository.save(any(SchemaFileAudit.class))).thenReturn(schemaFileAudit); + ReflectionTestUtils.setField(schemaFileAuditService, "homePath", homePath); + var clientRepo = ClientRepo.builder().id(1L) + .fullName("fullName") + .defaultBranch("main") + .repositorySource(RepoSource.GITHUB) + .build(); + var response = schemaFileAuditService.createSchemaFileAuditWhileInitialLoading(filePath, clientRepo, 1L); + assertEquals(response.getName(), schemaFileAudit.getName()); + } + + @Test + public void createSchemaFileAuditForNewFileTest() { + var schemaFileAudit = getSchemaFileAudit(); + var clientRepo = ClientRepo.builder().id(1L).repoType(RepoType.AVRO).fullName("fullName").defaultBranch("main").build(); + var table = Table.builder().id(1L).name("product").nameSpace("com.acme").build(); + var filePath = "src/test/resources/schema_1/avro"; + when(schemaFileAuditRepository.save(any(SchemaFileAudit.class))).thenReturn(schemaFileAudit); + ReflectionTestUtils.setField(schemaFileAuditService, "githubFilePath", githubFilePath); + var response = schemaFileAuditService.createSchemaFileAuditForNewFile(clientRepo, table, filePath); + assertEquals(response.getName(), schemaFileAudit.getName()); + + clientRepo = ClientRepo.builder().id(1L).repoType(RepoType.PROTOBUF).fullName("fullName").defaultBranch("main").build(); + response = schemaFileAuditService.createSchemaFileAuditForNewFile(clientRepo, table, filePath.concat("/")); + assertEquals(response.getName(), schemaFileAudit.getName()); + + clientRepo = ClientRepo.builder().id(1L).repoType(RepoType.JSON).fullName("fullName").defaultBranch("main").build(); + when(tableService.addTable(any(Table.class))).thenReturn(table); + response = schemaFileAuditService.createSchemaFileAuditForNewFile(clientRepo, table, null); + assertEquals(response.getName(), schemaFileAudit.getName()); + } + + @Test + public void getTablesFromFileContentTest() throws IOException { + ReflectionTestUtils.setField(schemaFileAuditService, "avroSchema", avroSchema); + var response = schemaFileAuditService.getTablesFromFileContent(null, null, "avsc"); + assertEquals(0, response.size()); + + ReflectionTestUtils.setField(schemaFileAuditService, "jsonSchema", jsonSchema); + response = schemaFileAuditService.getTablesFromFileContent(null, null, "json"); + assertEquals(0, response.size()); + + assertNull(schemaFileAuditService.getTablesFromFileContent(null, null, "proto")); + } + + private static final String PULL_REQUEST_ID = "pullRequestId"; + + @Test + public void updateSchemaFileAuditSetPrIdToNullTest() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createCriteriaUpdate(SchemaFileAudit.class)).thenReturn(criteriaUpdate); + when(criteriaUpdate.from(SchemaFileAudit.class)).thenReturn(root); + + when(criteriaBuilder.equal(root.get(PULL_REQUEST_ID), 1L)).thenReturn(mock(Predicate.class)); + when(criteriaUpdate.where(any(Predicate.class))).thenReturn(criteriaUpdate); + when(criteriaUpdate.set(PULL_REQUEST_ID, null)).thenReturn(criteriaUpdate); + when(entityManager.createQuery(criteriaUpdate)).thenReturn(typedQuery); + when(typedQuery.executeUpdate()).thenReturn(0); + + var response = schemaFileAuditService.updateSchemaFileAuditSetPrIdToNull(1L); + assertEquals(response, 0); + } + + @Test + public void deleteSchemaFileAuditByPrIdTest() { + when(entityManager.getCriteriaBuilder()).thenReturn(criteriaBuilder); + when(criteriaBuilder.createCriteriaDelete(SchemaFileAudit.class)).thenReturn(criteriaDelete); + when(criteriaDelete.from(SchemaFileAudit.class)).thenReturn(root); + + when(criteriaBuilder.equal(root.get(PULL_REQUEST_ID), 1L)).thenReturn(mock(Predicate.class)); + when(criteriaDelete.where(any(Predicate.class))).thenReturn(criteriaDelete); + when(entityManager.createQuery(criteriaDelete)).thenReturn(typedQuery); + when(typedQuery.executeUpdate()).thenReturn(0); + + var response = schemaFileAuditService.deleteSchemaFileAuditByPrId(1L); + assertEquals(response, 0); + } + + @Test + public void getModelTest() { + var schemaFileAudit = getSchemaFileAudit(); + when(schemaFileAuditRepository.findById(1L)).thenReturn(Optional.of(schemaFileAudit)); + var response = schemaFileAuditService.getModel(1L); + assertEquals(response.getName(), schemaFileAudit.getName()); + + assertThrows(RecordNotFoundException.class, () -> schemaFileAuditService.getModel(2L)); + } + + @Test + public void getByModelNodeIdTest() { + var schemaFileAudit = getSchemaFileAudit(); + when(schemaFileAuditRepository.findByModelNodeId(1L)).thenReturn(List.of(schemaFileAudit)); + var response = schemaFileAuditService.getByModelNodeId(1L); + assertEquals(response.size(), 1); + assertEquals(response.get(0).getName(), schemaFileAudit.getName()); + } + + @Test + public void getByModelNodeIdsTest() { + var schemaFileAudit = getSchemaFileAudit(); + when(schemaFileAuditRepository.findByModelNodeIds(List.of(1L))).thenReturn(List.of(schemaFileAudit)); + var response = schemaFileAuditService.getByModelNodeIds(List.of(1L)); + assertEquals(response.size(), 1); + assertEquals(response.get(0).getName(), schemaFileAudit.getName()); + } + + @Test + public void getSchemaFileAuditTest() { + assertNull(schemaFileAuditService.getSchemaFileAudit("path1")); + + var schemaFileAudit = getSchemaFileAudit(); + when(schemaFileAuditRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(schemaFileAudit)); + var response = schemaFileAuditService.getSchemaFileAudit("path1"); + assertEquals(response.getName(), schemaFileAudit.getName()); + } + + @Test + public void getSchemaFileAuditTwoTest() { + assertNull(schemaFileAuditService.getSchemaFileAudit("path1", 1L)); + + var schemaFileAudit = getSchemaFileAudit(); + when(schemaFileAuditRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(schemaFileAudit)); + var response = schemaFileAuditService.getSchemaFileAudit("path1", 1L); + assertEquals(response.getName(), schemaFileAudit.getName()); + } + + @Test + public void getAllTest() { + var schemaFileAudit = getSchemaFileAudit(); + when(schemaFileAuditRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of(schemaFileAudit)); + var response = schemaFileAuditService.getAll(null); + assertEquals(1, response.size()); + assertEquals(schemaFileAudit.getName(), response.get(0).getName()); + + when(schemaFileAuditRepository.findAll(ArgumentMatchers.>any())).thenReturn(List.of()); + response = schemaFileAuditService.getAll(1L); + assertEquals(0, response.size()); + } + + @Test + public void saveDeltaForProtoSchemaTest() { + var schemaFileAudit = getSchemaFileAudit(); + var clientRepo = ClientRepo.builder().id(1L).repoType(RepoType.AVRO).fullName("fullName").defaultBranch("main").build(); + var pullRequest = PullRequest.builder().id(1L).sourceBranch("develop").build(); + String[] filePaths = {"src/test/resources/schema_1/avro/avro_testing.avsc", "src/test/resources/schema_1/json/product.json"}; + var table = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(Field.builder().id(1L).build())).build(); + var fileTables = Map.of(filePaths[0], List.of(table), filePaths[1], List.of(table)); + ReflectionTestUtils.setField(schemaFileAuditService, "protoSchema", protoSchema); + when(protoSchema.getTablesOfFilePaths(filePaths, clientRepo, pullRequest.getSourceBranch())).thenReturn(fileTables); + var domain = Domain.builder().id(1L).name(clientRepo.getFullName()).build(); + when(domainService.getDefaultDomain(anyString())).thenReturn(domain); + ReflectionTestUtils.setField(schemaFileAuditService, "githubFilePath", githubFilePath); + var model = Model.builder().id(1L).nodeId(table.getId()).name(table.getName()).nameSpace(table.getNameSpace()).build(); + when(tableService.findDeltaForFields(anyMap(), anyLong(), any(RepoType.class))).thenReturn(List.of(table, table)); + + schemaFileAudit = SchemaFileAudit.builder().id(1L).name("avro_testing").fileType("avsc").path("path").rootNodeId(1L).build(); + when(schemaFileAuditRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(schemaFileAudit)); + when(modelService.findBySchemaFileAudit(anyLong())).thenReturn(List.of(model)); + when(tableService.findDeltaForTable(any(Table.class), anyLong(), anyLong())).thenReturn(table); + var response = schemaFileAuditService.saveDeltaForProtoSchema(filePaths, clientRepo, pullRequest); + assertEquals(response.size(), 2); + } + + @Test + public void saveDeltaTest() throws IOException { + var schemaFileAudit = getSchemaFileAudit(); + var clientRepo = ClientRepo.builder().id(1L).repoType(RepoType.AVRO).fullName("fullName").defaultBranch("main").build(); + var pullRequest = PullRequest.builder().id(1L).sourceBranch("develop").build(); + String[] filePaths = {"src/test/resources/schema_1/avro/avro_testing.avsc", "src/test/resources/schema_1/json/product.avsc"}; + var table = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(Field.builder().id(1L).build())).build(); + var fileTables = Map.of(filePaths[0], table.toString().getBytes(), filePaths[1], table.toString().getBytes()); + ReflectionTestUtils.setField(schemaFileAuditService, "avroSchema", avroSchema); + when(avroSchema.getTables(table.toString().getBytes(), Boolean.FALSE)).thenReturn(List.of(table)); + var domain = Domain.builder().id(1L).name(clientRepo.getFullName()).build(); + when(domainService.getDefaultDomain(anyString())).thenReturn(domain); + when(schemaFileAuditRepository.save(any(SchemaFileAudit.class))).thenReturn(schemaFileAudit); + ReflectionTestUtils.setField(schemaFileAuditService, "githubFilePath", githubFilePath); + when(tableService.saveNewTableWithDelta(any(Table.class), anyLong())).thenReturn(table); + var model = Model.builder().id(1L).nodeId(table.getId()).name(table.getName()).nameSpace(table.getNameSpace()).build(); + when(modelService.createModel(any(Table.class), any(SchemaFileAudit.class), any(Domain.class), anyLong())).thenReturn(model); + when(modelService.addModel(any(Model.class))).thenReturn(model); + when(tableService.findDeltaForFields(anyMap(), anyLong(), any(RepoType.class))).thenReturn(List.of(table, table)); + var response = schemaFileAuditService.saveDelta(fileTables, clientRepo, pullRequest.getId()); + assertEquals(response.size(), 2); + } + + @Test + public void saveDeltaTestFail() { + var clientRepo = ClientRepo.builder().id(1L).repoType(RepoType.AVRO).fullName("fullName").defaultBranch("main").build(); + String[] filePaths = {"src/test/resources/schema_1/avro/avro_testing.avsc", "src/test/resources/schema_1/json/product.avsc"}; + var table = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(Field.builder().id(1L).build())).build(); + var fileTables = Map.of(filePaths[0], table.toString().getBytes(), filePaths[1], table.toString().getBytes()); + ReflectionTestUtils.setField(schemaFileAuditService, "avroSchema", avroSchema); + try { + when(avroSchema.getTables(table.toString().getBytes(), Boolean.FALSE)).thenThrow(IOException.class); + } catch (IOException e) { + e.printStackTrace(); + } + assertThrows(SchemaParserException.class, () -> schemaFileAuditService.saveDelta(fileTables, clientRepo, 1L)); + } + + private Table getTable() { + var nameField = Field.builder().id(1L).name("name").dataType("string").isPrimitiveType(Boolean.TRUE).rowNumber(1).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).build(); + var scoreFieldDelete = Field.builder().id(2L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).rowNumber(2).prId(1L).isUserChanged(Boolean.TRUE).isDeleted(Boolean.TRUE).build(); + var scoreFieldUpdated = Field.builder().id(3L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).rowNumber(2).prId(1L).isUserChanged(Boolean.TRUE).isDeleted(Boolean.FALSE).build(); + + List fields = new ArrayList<>(); + fields.add(nameField); fields.add(scoreFieldDelete); fields.add(scoreFieldUpdated); + return Table.builder().id(1L).name("student").modifiedTable(Table.builder().id(2L).name("student").owner("core").build()) + .isUserChanged(Boolean.TRUE).isDeleted(Boolean.TRUE).fields(fields).build(); + } + + @Test + public void generateFileContentOfSchemaTest() { + ReflectionTestUtils.setField(schemaFileAuditService, "protoSchema", protoSchema); + assertEquals(0, schemaFileAuditService.generateFileContentOfSchema(10L, RepoType.PROTOBUF).size()); + + var table = getTable(); + var schemaFileAudit = SchemaFileAudit.builder().id(1L).name("avro_testing").fileType("avsc").path("path") + .rootNodeId(table.getId()).build(); + ReflectionTestUtils.setField(schemaFileAuditService, "avroSchema", avroSchema); + when(schemaFileAuditRepository.findByModelNodeId(anyLong())).thenReturn(List.of(schemaFileAudit)); + when(tableService.findAllById(anyList())).thenReturn(List.of(table)); + var response = schemaFileAuditService.generateFileContentOfSchema(table.getId(), RepoType.AVRO); + assertEquals(response.size(), 1); + } + + @Test + public void generateFileContentOfSchemaTest2() { + var table = getTable(); + var schemaFileAudit = SchemaFileAudit.builder().id(1L).name("Json_testing").fileType("json").path("path") + .rootNodeId(table.getId()).build(); + ReflectionTestUtils.setField(schemaFileAuditService, "jsonSchema", jsonSchema); + when(schemaFileAuditRepository.findByModelNodeIds(anyList())).thenReturn(List.of(schemaFileAudit)); + when(tableService.findAllById(anyList())).thenReturn(List.of(table)); + var response = schemaFileAuditService.generateFileContentOfSchema(List.of(table.getId())); + assertEquals(response.size(), 1); + } + + @Test + public void filterFieldsTest() { + var scoreFieldDelete = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).rowNumber(1).isUserChanged(Boolean.TRUE).isDeleted(Boolean.TRUE).build(); + var scoreFieldUpdated = Field.builder().id(2L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).rowNumber(1).isUserChanged(Boolean.TRUE).isDeleted(Boolean.FALSE).build(); + + var arrayField = Field.builder().id(3L).rowNumber(2).name("arrayField").dataType("array").isPrimitiveType(Boolean.FALSE).items("string").build(); + var mapField = Field.builder().id(4L).name("mapFiled").dataType("map").rowNumber(3).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).values("string").isPrimitiveType(Boolean.TRUE).build(); + var unionField = Field.builder().id(5L).name("unionfield").dataType("union").rowNumber(4).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).unionTypes(List.of(Field.builder().id(8L).rowNumber(4).dataType("null").isPrimitiveType(Boolean.TRUE).build(), Field.builder().id(9L).rowNumber(4).dataType("int").isPrimitiveType(Boolean.TRUE).build())).isPrimitiveType(Boolean.FALSE).build(); + var tableField = Field.builder().id(6L).name("tablefiled").dataType("Marks").rowNumber(5).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).contain(Table.builder().id(10L).name("Marks").schemaType(SchemaType.ENTITY).fields(List.of(Field.builder().id(11L).rowNumber(1).name("Marks").dataType("string").isPrimitiveType(Boolean.TRUE).build())).build()).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.TRUE).isDeleted(Boolean.TRUE).prId(1L).build(); + + var arrayOfArray = Field.builder().id(7L).name("arrayOfArray").dataType("array").rowNumber(2).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).items("array").arrayField(arrayField).build(); + var arrayOfUnion = Field.builder().id(8L).name("ArrayOfUnion").dataType("array").rowNumber(3).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).items("union").unionTypes(unionField.getUnionTypes()).build(); + var arrayOfObject = Field.builder().id(9L).name("ArrayOfObject").dataType("array").rowNumber(4).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).items("table").contain(tableField.getContain()).build(); + var arrayOfMap = Field.builder().id(10L).name("ArrayOfMap").dataType("array").rowNumber(5).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).items("map").mapField(mapField).build(); + + var mapOfArray = Field.builder().id(11L).name("mapOfArray").dataType("map").rowNumber(6).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).values("array").arrayField(arrayField).build(); + var mapOfUnion = Field.builder().id(12L).name("MapOfUnion").dataType("map").rowNumber(7).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).values("union").unionTypes(unionField.getUnionTypes()).build(); + var mapOfObject = Field.builder().id(13L).name("MapOfObject").dataType("map").rowNumber(8).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).values("table").contain(tableField.getContain()).build(); + var mapOfMap = Field.builder().id(14L).name("MapOfMap").dataType("map").rowNumber(9).isPrimitiveType(Boolean.FALSE).isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).values("map").mapField(mapField).build(); + + List fields = new ArrayList<>(); + fields.add(scoreFieldDelete); fields.add(scoreFieldUpdated); fields.add(unionField); + fields.add(arrayOfUnion); fields.add(arrayOfObject); fields.add(arrayOfMap); fields.add(arrayOfArray); + fields.add(mapOfUnion); fields.add(mapOfObject); fields.add(mapOfMap); fields.add(mapOfArray); + + var table = Table.builder().id(12L).name("student").schemaType(SchemaType.ENTITY) + .isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).fields(fields).build(); + + var response = schemaFileAuditService.filterFields(table); + assertEquals(response.getFields().size(), fields.size()-1); + } + + @Test + public void deleteAllByClientRepoIdTest() { + schemaFileAuditService.deleteAllByClientRepoId(1L); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/github/service/WorkflowServiceTest.java b/connect/src/test/java/com/opsbeach/connect/github/service/WorkflowServiceTest.java new file mode 100644 index 0000000..25d3237 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/github/service/WorkflowServiceTest.java @@ -0,0 +1,435 @@ +package com.opsbeach.connect.github.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.opsbeach.connect.github.dto.WorkflowDto; +import com.opsbeach.connect.github.dto.WorkflowDto.FieldDto; +import com.opsbeach.connect.github.dto.WorkflowDto.TableDto; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.PullRequest; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.Workflow; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.repository.WorkflowRepository; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.repository.FieldRepostory; +import com.opsbeach.connect.schemata.repository.TableRepository; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.connect.schemata.validate.FieldValidator; +import com.opsbeach.connect.schemata.validate.Result; +import com.opsbeach.connect.schemata.validate.SchemaValidator; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.AlreadyExistException; +import com.opsbeach.sharedlib.exception.BadRequestException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class WorkflowServiceTest { + + @InjectMocks + private WorkflowService workflowService; + @Mock + private DomainService domainService; + @Mock + private WorkflowRepository workflowRepository; + @Mock + private ResponseMessage responseMessage; + @Mock + private TableRepository tableRepository; + @Mock + private ClientRepoService clientRepoService; + @Mock + private SchemaFileAuditService schemaFileAuditService; + @Mock + private GitHubService gitHubService; + @Mock + private SchemaValidator schemaValidator; + @Mock + private FieldValidator fieldValidator; + @Mock + private FieldRepostory fieldRepostory; + @Mock + private ModelService modelService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + private WorkflowDto getWorkflowDto() { + return WorkflowDto.builder().id(1L).domainId(2L).status(Workflow.Status.PR_CLOSED).rank(0).build(); + } + + private Workflow getWorkflow() { + return Workflow.builder().id(1L).domainId(2L).status(Workflow.Status.PR_CLOSED).rank(0).build(); + } + + @Test + public void addTest() { + var workflowDto = getWorkflowDto(); + var workflow = workflowDto.toDomain(workflowDto); + when(workflowRepository.save(any())).thenReturn(workflow); + var response = workflowService.add(workflowDto); + assertEquals(workflowDto.getDomainId(), response.getDomainId()); + } + + @Test + public void getTest() { + var workflowDto = getWorkflowDto(); + var workflow = workflowDto.toDomain(workflowDto); + when(workflowRepository.findById(workflowDto.getId())).thenReturn(Optional.of(workflow)); + var response = workflowService.get(workflowDto.getId()); + assertEquals(workflowDto.getDomainId(), response.getDomainId()); + + assertThrows(RecordNotFoundException.class, () -> { workflowService.get(2L); }); + } + + @Test + public void getAllTest() { + var workflowDto = getWorkflowDto(); + var workflow = workflowDto.toDomain(workflowDto); + when(workflowRepository.findAll()).thenReturn(List.of(workflow)); + var response = workflowService.getAll(); + assertEquals(1, response.size()); + assertEquals(workflowDto.getDomainId(), response.get(0).getDomainId()); + + when(workflowRepository.findAll()).thenReturn(List.of()); + response = workflowService.getAll(); + assertEquals(0, response.size()); + } + + @Test + public void updateStatusTest() { + var workflow = getWorkflow(); + when(workflowRepository.findById(workflow.getId())).thenReturn(Optional.of(workflow)); + when(workflowRepository.save(any(Workflow.class))).thenReturn(workflow); + var response = workflowService.updateStatus(1L, Workflow.Status.PR_CLOSED); + assertEquals(response.getStatus(), Workflow.Status.PR_CLOSED); + } + + private class TableAndFieldChanges { + + public static Table tableOld() { + var fieldId = Field.builder().id(1L).name("id").dataType("int").description("primary key").isPrimitiveType(Boolean.TRUE).build(); + var fieldName = Field.builder().id(2L).name("name").dataType("string").description("Name of the product").isPrimitiveType(Boolean.TRUE).build(); + // var fieldPrice = Field.builder().id(3L).name("price").dataType("long").description("Price of the Product").build(); + List fields = new ArrayList<>(3); + fields.add(fieldId); fields.add(fieldName); //fields.add(fieldPrice); + return Table.builder().id(1L).name("product").nameSpace("com.acme").description("Product schema") + .owner("core").domain("E-commerce").fields(fields).build(); + } + + public static Table tableNew() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").description("primary key").isPrimitiveType(Boolean.TRUE).build(); + var fieldIdNew = Field.builder().id(1L).name("id").dataType("int").description("Unique Identifier of the Product").isUserChanged(Boolean.TRUE).isPrimitiveType(Boolean.TRUE).build(); + var fieldName = Field.builder().id(2L).name("name").dataType("string").description("Name of the product").isUserChanged(Boolean.FALSE).isPrimitiveType(Boolean.TRUE).build(); + var fieldPrice = Field.builder().id(3L).name("price").dataType("long").description("Price of the Product").isUserChanged(Boolean.TRUE).build(); + List fields = new ArrayList<>(3); + fields.add(fieldIdOld); fields.add(fieldIdNew); fields.add(fieldName); fields.add(fieldPrice); + return Table.builder().id(1L).name("product").nameSpace("com.acme").description("Product schema") + .isUserChanged(Boolean.TRUE).owner("core").domain("E-commerce").fields(fields).build(); + } + + public static TableDto workflowTable() { + var fieldId = new FieldDto(1L, "id", "int", "Unique Identifier of the Product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + var fieldName = new FieldDto(2L, "name", "string", "Name of the product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + var fieldPrice = new FieldDto(3L, "price", "string", "Price of the Product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + List fieldDtos = new ArrayList<>(3); + fieldDtos.add(fieldId); fieldDtos.add(fieldName); fieldDtos.add(fieldPrice); + return new TableDto(1L, "com.acme", "product", null, "Product schema", "core", "core", null, null, null, null, "Active", fieldDtos, null, null, null); + } + + } + + private class FieldMapping { + + public static Table tableOld() { + var fieldId = Field.builder().id(1L).name("id").dataType("int").description("primary key").isPrimitiveType(Boolean.TRUE).build(); + var fieldName = Field.builder().id(2L).name("name").dataType("string").description("Name of the product").isPrimitiveType(Boolean.TRUE).build(); + var fieldFilterId = Field.builder().id(3L).name("filter_id").dataType("long").description("Primary Key of Filter Table").isPrimitiveType(Boolean.TRUE).build(); + List fields = new ArrayList<>(); + fields.add(fieldId); fields.add(fieldName); fields.add(fieldFilterId); + return Table.builder().id(1L).name("product").nameSpace("com.acme").description("Product schema") + .owner("core").domain("E-commerce").jsonSchemaId("jsonSchemaId").fields(fields).build(); + } + + public static Table tableBrand() { + var fieldId = Field.builder().id(1L).name("id").dataType("int").description("Unique Identifier of the Brand").isPrimitiveType(Boolean.TRUE).build(); + var fieldName = Field.builder().id(2L).name("name").dataType("string").description("Name of the Brand").isUserChanged(Boolean.FALSE).isPrimitiveType(Boolean.TRUE).build(); + List fields = new ArrayList<>(); + fields.add(fieldId); fields.add(fieldName); + return Table.builder().id(1L).name("brand").nameSpace("com.acme").description("Brand schema") + .owner("core").domain("E-commerce").jsonSchemaId("jsonSchemaId").fields(fields).build(); + } + + public static Table tableNew() { + var fieldId = Field.builder().id(1L).name("id").dataType("int").description("Unique Identifier of the Product").isPrimitiveType(Boolean.TRUE).build(); + var fieldName = Field.builder().id(2L).name("name").dataType("string").description("Name of the product").isUserChanged(Boolean.FALSE).isPrimitiveType(Boolean.TRUE).build(); + var fieldFilterId = Field.builder().id(3L).name("filter_id").dataType("long").description("Primary Key of Filter Table").isPrimitiveType(Boolean.TRUE) + .referenceField(Field.builder().id(1L).name("id").dataType("long").build()).build(); + var fieldUserId = Field.builder().id(4L).name("user_id").dataType("long").description("Primary Key of User Table").isPrimitiveType(Boolean.TRUE) + .referenceField(Field.builder().id(4L).name("id").dataType("long").build()).build(); + var fieldBrand = Field.builder().id(5L).name("brand").dataType("com.acme.brand").description("Brand Of the Product").isUserChanged(Boolean.TRUE) + .isPrimitiveType(Boolean.FALSE).contain(tableBrand()).build(); + List fields = new ArrayList<>(); + fields.add(fieldId); fields.add(fieldName); fields.add(fieldFilterId); fields.add(fieldUserId); fields.add(fieldBrand); + return Table.builder().id(1L).name("product").nameSpace("com.acme").description("Product schema") + .owner("core").domain("E-commerce").fields(fields).build(); + } + + public static TableDto workflowTable() { + var fieldId = new FieldDto(1L, "id", "int", "Unique Identifier of the Product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + var fieldName = new FieldDto(2L, "name", "string", "Name of the product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + var fieldFilter = new FieldDto(3L, "filter_id", "long", "Primary Key of Filter Table", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, 1L); + var fieldPrice = new FieldDto(null, "user_id", "long", "Primary Key of User Table", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, 4L); + var fieldBrand = new FieldDto(null, "brand", "com.acme.brand", "Brand of the Product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + List fieldDtos = new ArrayList<>(); + fieldDtos.add(fieldId); fieldDtos.add(fieldName); fieldDtos.add(fieldFilter); fieldDtos.add(fieldPrice); fieldDtos.add(fieldBrand); + return new TableDto(1L, "com.acme", "product", null, "Product schema", "core", "E-commerce", null, null, null, null, "Active", fieldDtos, null, null, null); + } + } + + @Test + public void saveAndRaisePrTest() { + var table = TableAndFieldChanges.tableOld(); + var workflowDto = WorkflowDto.builder().table(TableAndFieldChanges.workflowTable()).build(); + var clientRepo = ClientRepo.builder().id(1L).name("name").fullName("fullName").connectId(1L).build(); + when(clientRepoService.getModel(anyLong())).thenReturn(clientRepo); + var workflow = Workflow.builder().id(1L).status(Workflow.Status.NEW).title("changes-in-table-"+workflowDto.getTable().name()) + .purpose("modifying schema "+workflowDto.getTable().name()).build(); + when(workflowRepository.save(any(Workflow.class))).thenReturn(workflow); + TableService tableService = new TableService(tableRepository, fieldRepostory, responseMessage, null, null, null, null); + ReflectionTestUtils.setField(workflowService, "tableService", tableService); + when(tableRepository.findById(workflowDto.getTable().id())).thenReturn(Optional.of(table)); + when(schemaValidator.apply(any(Table.class))).thenReturn(new Result(null, null, Status.SUCCESS, null)); + when(fieldValidator.apply(any(Field.class))).thenReturn(new Result(null, null, Status.SUCCESS, null)); + var tableNew = TableAndFieldChanges.tableNew(); + when(tableRepository.save(any(Table.class))).thenReturn(tableNew); + var pullRequest = PullRequest.builder().id(1L).workflowId(workflow.getId()).build(); + when(gitHubService.commitAndRaisePr(anyMap(), any(Workflow.class))).thenReturn(pullRequest); + var response = new ObjectMapper().convertValue(workflowService.saveAndRaisePr(workflowDto, clientRepo.getId()), WorkflowDto.class); + assertEquals(response.getTitle(), workflow.getTitle()); + + mockApplicationUser(); + table = FieldMapping.tableOld(); + workflowDto = WorkflowDto.builder().table(FieldMapping.workflowTable()).build(); + when(tableRepository.findById(workflowDto.getTable().id())).thenReturn(Optional.of(table)); + when(fieldRepostory.findById(1L)).thenReturn(Optional.of(Field.builder().id(1L).name("id").dataType("long").build())); + when(fieldRepostory.findById(4L)).thenReturn(Optional.of(Field.builder().id(4L).name("id").dataType("long").build())); + when(tableRepository.findByNameAndNameSpaceAndClientId(anyString(), anyString(), anyLong())).thenReturn(FieldMapping.tableBrand()); + tableNew = FieldMapping.tableNew(); + when(tableRepository.save(any(Table.class))).thenReturn(tableNew); + response = new ObjectMapper().convertValue(workflowService.saveAndRaisePr(workflowDto, clientRepo.getId()), WorkflowDto.class); + assertEquals(response.getTitle(), workflow.getTitle()); + } + + private class NewTableCreation { + + public static Table tableNew() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").description("primary key").isPrimitiveType(Boolean.TRUE).build(); + var fieldName = Field.builder().id(2L).name("name").dataType("string").description("Name of the product").isUserChanged(Boolean.FALSE).isPrimitiveType(Boolean.TRUE).build(); + var fieldPrice = Field.builder().id(3L).name("price").dataType("long").description("Price of the Product").isUserChanged(Boolean.TRUE).build(); + List fields = new ArrayList<>(3); + fields.add(fieldIdOld); fields.add(fieldName); fields.add(fieldPrice); + return Table.builder().id(1L).name("product").nameSpace("com.acme").description("Product schema") + .isUserChanged(Boolean.TRUE).owner("core").domain("E-commerce").fields(fields).build(); + } + + public static TableDto workflowTable() { + var fieldId = new FieldDto(null, "id", "int", "Unique Identifier of the Product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + var fieldName = new FieldDto(null, "name", "string", "Name of the product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + var fieldPrice = new FieldDto(null, "price", "string", "Price of the Product", Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null); + List fieldDtos = new ArrayList<>(3); + fieldDtos.add(fieldId); fieldDtos.add(fieldName); fieldDtos.add(fieldPrice); + return new TableDto(null, "com.acme", "product", null, "Product schema", "core", "E-commerce", null, null, null, null, "Active", fieldDtos, null, null, null); + } + } + + @Test + public void saveAndRaisePrNewTableTest() { + var table = NewTableCreation.tableNew(); + var workflowDto = WorkflowDto.builder().table(NewTableCreation.workflowTable()).build(); + var clientRepo = ClientRepo.builder().id(1L).name("name").fullName("fullName").repoType(RepoType.PROTOBUF).connectId(1L).build(); + when(clientRepoService.getModel(anyLong())).thenReturn(clientRepo); + var workflow = Workflow.builder().id(1L).status(Workflow.Status.NEW).title("changes-in-table-"+workflowDto.getTable().name()) + .purpose("modifying schema "+workflowDto.getTable().name()).build(); + when(workflowRepository.save(any(Workflow.class))).thenReturn(workflow); + when(schemaValidator.apply(any(Table.class))).thenReturn(new Result(null, null, Status.SUCCESS, null)); + when(fieldValidator.apply(any(Field.class))).thenReturn(new Result(null, null, Status.SUCCESS, null)); + TableService tableService = new TableService(tableRepository, fieldRepostory, responseMessage, null, null, null, null); + ReflectionTestUtils.setField(workflowService, "tableService", tableService); + when(tableRepository.save(any(Table.class))).thenReturn(table); + var schemaFileAudit = SchemaFileAudit.builder().id(1L).clientRepoId(clientRepo.getId()).build(); + when(schemaFileAuditService.createSchemaFileAuditForNewFile(any(ClientRepo.class), any(Table.class), anyString())).thenReturn(schemaFileAudit); + var model = Model.builder().id(1L).schemaFileAuditId(schemaFileAudit.getId()).name(table.getName()).nameSpace(table.getNameSpace()).nodeId(table.getId()).build(); + when(modelService.createModel(any(SchemaFileAudit.class), anyLong(), any(Table.class))).thenReturn(model); + var pullRequest = PullRequest.builder().id(1L).workflowId(workflow.getId()).build(); + when(gitHubService.commitAndRaisePr(anyMap(), any(Workflow.class))).thenReturn(pullRequest); + var response = new ObjectMapper().convertValue(workflowService.saveAndRaisePr(workflowDto, clientRepo.getId()), WorkflowDto.class); + assertEquals(response.getTitle(), workflow.getTitle()); + + clientRepo = ClientRepo.builder().id(1L).name("name").fullName("fullName").repoType(RepoType.JSON).connectId(1L).build(); + when(clientRepoService.getModel(anyLong())).thenReturn(clientRepo); + response = new ObjectMapper().convertValue(workflowService.saveAndRaisePr(workflowDto, clientRepo.getId()), WorkflowDto.class); + assertEquals(response.getTitle(), workflow.getTitle()); + } + + private void fieldNameChangedTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "ID", "int", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto1 = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto1, 1L)); + } + + private void fieldDatatypeChangedTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "long", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void newFieldWithNameAlreayExistsTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "int", null, null, null, null, null); + var fieldDto2 = new FieldDto(null, "id", "long", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto, fieldDto2), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + when(fieldValidator.apply(any(Field.class))).thenReturn(new Result(null, null, Status.SUCCESS, null)); + assertThrows(AlreadyExistException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void fieldMetadataEmptyTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + when(fieldValidator.apply(any(Field.class))).thenReturn(new Result(null, null, Status.ERROR, List.of("Empty desc"))); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void tableNameSpaceChangedTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.xcme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void tableNameChangedTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "products", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void tableAlreadyChangedTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").isUserChanged(Boolean.TRUE).fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void fieldAlreadyChangedTest() { + var fieldIdOld = Field.builder().id(1L).name("id").dataType("int").isUserChanged(Boolean.TRUE).build(); + var tableOld = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(fieldIdOld)).build(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(tableOld)); + var fieldDto = new FieldDto(1L, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(1L, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void newTableAlreadyExistsTest() { + when(modelService.findModelByNameAndNameSpace("com.acme", "product")).thenReturn(List.of(Model.builder().build())); + var fieldDto = new FieldDto(null, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(null, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(AlreadyExistException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + private void tableMetadataEmptyTest() { + when(schemaValidator.apply(any(Table.class))).thenReturn(new Result(null, null, Status.ERROR, List.of("Empty desc"))); + var fieldDto = new FieldDto(null, "id", "int", null, null, null, null, null); + var tableDto = new TableDto(null, "com.acme", "product", null, null, null, null, null, null, null, null, null, List.of(fieldDto), null, null, null); + var workflowDto = WorkflowDto.builder().table(tableDto).build(); + assertThrows(BadRequestException.class, () -> workflowService.saveAndRaisePr(workflowDto, 1L)); + } + + @Test + public void saveAndRaisePrFailTest() { + var clientRepo = ClientRepo.builder().id(1L).name("name").fullName("fullName").repoType(RepoType.PROTOBUF).connectId(1L).build(); + when(clientRepoService.getModel(anyLong())).thenReturn(clientRepo); + when(workflowRepository.save(any(Workflow.class))).thenReturn(Workflow.builder().id(1L).build()); + TableService tableService = new TableService(tableRepository, fieldRepostory, responseMessage, null, null, null, null); + ReflectionTestUtils.setField(workflowService, "tableService", tableService); + when(schemaValidator.apply(any(Table.class))).thenReturn(new Result(null, null, Status.SUCCESS, null)); + + fieldNameChangedTest(); + fieldDatatypeChangedTest(); + newFieldWithNameAlreayExistsTest(); + fieldMetadataEmptyTest(); + tableNameChangedTest(); + tableNameSpaceChangedTest(); + tableAlreadyChangedTest(); + fieldAlreadyChangedTest(); + tableMetadataEmptyTest(); + newTableAlreadyExistsTest(); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/metrics/service/SlaServiceTest.java b/connect/src/test/java/com/opsbeach/connect/metrics/service/SlaServiceTest.java new file mode 100644 index 0000000..751f241 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/metrics/service/SlaServiceTest.java @@ -0,0 +1,113 @@ +package com.opsbeach.connect.metrics.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.core.utils.Constants; +import com.opsbeach.connect.metrics.dto.SlaDto; +import com.opsbeach.connect.metrics.entity.Sla; +import com.opsbeach.connect.metrics.repository.SlaRepository; +import com.opsbeach.sharedlib.exception.AlreadyExistException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.response.SuccessCode; + +public class SlaServiceTest { + + @InjectMocks + private SlaService slaService; + + @Mock + private SlaRepository slaRepository; + + @Mock + private ResponseMessage responseMessage; + + @Spy + private IdSpecifications slaSpecifications; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private SlaDto getSlaDto() { + return SlaDto.builder().id(1L).type(ServiceType.PAGER_DUTY).build(); + } + + @Test + public void getAllTest() { + List slas = List.of(Sla.builder().type(ServiceType.PAGER_DUTY).build()); + when(slaRepository.findAll()).thenReturn(slas); + assertEquals(1, slaService.getAll().size()); + when(slaRepository.findAll()).thenReturn(null); + assertEquals(0, slaService.getAll().size()); + } + + @Test + public void updateTest() { + var slaDto = getSlaDto(); + var sla = slaDto.toDomin(slaDto); + when(slaRepository.findById(1L)).thenReturn(Optional.of(sla)); + when(slaRepository.save(ArgumentMatchers.any(Sla.class))).thenReturn(sla); + var response = slaService.update(slaDto); + assertEquals(slaDto.getType(), response.getType()); + } + + @Test + public void deleteTest() { + var slaDto = getSlaDto(); + var sla = slaDto.toDomin(slaDto); + when(slaRepository.findById(1L)).thenReturn(Optional.of(sla)); + when(slaRepository.save(ArgumentMatchers.any(Sla.class))).thenReturn(sla); + var response = slaService.delete(1L); + assertEquals(responseMessage.getSuccessMessage(SuccessCode.DELETED, Constants.SLA), response); + + assertThrows(RecordNotFoundException.class, () -> { slaService.delete(2L); }); + } + + @Test + public void addTest() { + var slaDto = getSlaDto(); + var sla = slaDto.toDomin(slaDto); + when(slaRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.empty()); + when(slaRepository.save(ArgumentMatchers.any(Sla.class))).thenReturn(sla); + var response = slaService.add(slaDto); + assertEquals(slaDto.getType(), response.getType()); + + when(slaRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(sla)); + assertThrows(AlreadyExistException.class, () -> { slaService.add(slaDto); }); + } + + @Test + public void getByTypeTest() { + var slaDto = getSlaDto(); + var sla = slaDto.toDomin(slaDto); + when(slaRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(sla)); + var response = slaService.getByType(ServiceType.PAGER_DUTY); + assertEquals(slaDto.getType(), response.getType()); + + when(slaRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.empty()); + assertThrows(RecordNotFoundException.class, () -> { slaService.getByType(ServiceType.GITHUB); }); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/scheduler/SchedulerTaskServiceTest.java b/connect/src/test/java/com/opsbeach/connect/scheduler/SchedulerTaskServiceTest.java new file mode 100644 index 0000000..d0f2bca --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/scheduler/SchedulerTaskServiceTest.java @@ -0,0 +1,95 @@ +package com.opsbeach.connect.scheduler; + +import com.opsbeach.connect.task.entity.Task; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.DateUtil; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.github.service.GitHubService; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.springframework.scheduling.TaskScheduler; +import org.springframework.scheduling.Trigger; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import java.time.LocalDateTime; +import java.util.Date; +import java.util.List; +import java.util.concurrent.ScheduledFuture; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.any; + +@ExtendWith(SpringExtension.class) +public class SchedulerTaskServiceTest { + @Mock + TaskScheduler scheduler; + + SchedulerTaskService schedulerTaskService; + + @Mock + ScheduledFuture t; + + @Mock + ResponseMessage responseMessage; + + @Mock + GitHubService gitHubService; + + @BeforeEach + public void setup() { + schedulerTaskService = new SchedulerTaskService(scheduler, responseMessage, gitHubService); + } + + @Test + public void shouldReturnEmptyTask() { + List taskIds = schedulerTaskService.getTaskIds(); + assertThat(taskIds).isEmpty(); + } + + @Test + public void shouldReturnTask() { + var date = DateUtil.convertLocalDateTimeToDate(LocalDateTime.now().plusSeconds(5)); + Trigger trigger = triggerContext -> date.toInstant(); + Task task = Task.builder().id(1L).build(); + Mockito.doReturn(t).when(scheduler).schedule( + any(Runnable.class), any(Trigger.class)); + schedulerTaskService.addTaskToScheduler(task, trigger); + List taskIds = schedulerTaskService.getTaskIds(); + assertThat(taskIds).isNotEmpty(); + assertEquals(1L, taskIds.stream().findFirst().get()); + } + + @Test + public void sendRequestTest() { + assertThrows(InvalidDataException.class, () -> schedulerTaskService.sendRequest(ServiceType.FRESH_DESK, null, null)); + + schedulerTaskService.sendRequest(ServiceType.GITHUB, TaskType.RENEWAL_ACCESS_TOKEN, 1L); + assertThrows(InvalidDataException.class, () -> schedulerTaskService.sendRequest(ServiceType.GITHUB, TaskType.CREATE_TICKET, 1L)); + } + + @Test + public void shouldRemoveTask() { + Trigger trigger = triggerContext -> new Date().toInstant(); + Task task = Task.builder().id(1L).build(); + Mockito.doReturn(t).when(scheduler).schedule( + any(Runnable.class), any(Trigger.class)); + schedulerTaskService.addTaskToScheduler(task, trigger); + List taskIds = schedulerTaskService.getTaskIds(); + assertThat(taskIds).isNotEmpty(); + schedulerTaskService.removeTaskFromScheduler(task.getId()); + List removedIds = schedulerTaskService.getTaskIds(); + assertThat(removedIds).isEmpty(); + + schedulerTaskService.removeTaskFromScheduler(2L); + } + + +} diff --git a/connect/src/test/java/com/opsbeach/connect/scheduler/SyncSchedulerTest.java b/connect/src/test/java/com/opsbeach/connect/scheduler/SyncSchedulerTest.java new file mode 100644 index 0000000..480a6ff --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/scheduler/SyncSchedulerTest.java @@ -0,0 +1,52 @@ +package com.opsbeach.connect.scheduler; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.scheduling.Trigger; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.task.entity.Task; +import com.opsbeach.connect.task.service.TaskService; + +public class SyncSchedulerTest { + + @InjectMocks + private SyncScheduler syncScheduler; + + @Mock + private TaskService taskService; + + @Mock + private SchedulerTaskService schedulerTaskService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + @Test + public void configureTasksTest() { + syncScheduler.configureTasks(null); + List tasks = List.of(Task.builder().id(1L).build()); + when(taskService.getAllForScheduler()).thenReturn(tasks); + ReflectionTestUtils.setField(syncScheduler, "schedulerEnabled", true); + var schedulerTaskService = mock(SchedulerTaskService.class); + doNothing().when(schedulerTaskService).addTaskToScheduler(any(Task.class), any(Trigger.class)); + syncScheduler.configureTasks(null); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/graph/SchemaGraphTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/graph/SchemaGraphTest.java new file mode 100644 index 0000000..baf6166 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/graph/SchemaGraphTest.java @@ -0,0 +1,111 @@ +package com.opsbeach.connect.schemata.graph; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.commons.collections4.SetUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import com.google.protobuf.Descriptors; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoFileDescriptorSetLoader; +import com.opsbeach.connect.schemata.processor.protobuf.ProtoProcessor; +import com.opsbeach.sharedlib.exception.SchemaNotFoundException; + +public class SchemaGraphTest { + + static SchemaGraph graph; + + @BeforeAll + static void setUp() + throws IOException, Descriptors.DescriptorValidationException { + var stream = new FileInputStream(new File("src/test/resources/schema_1/descriptors/entities.desc")); + var protoFileDescriptorLoader = new ProtoFileDescriptorSetLoader(stream); + var parser = new ProtoProcessor(); + var schemaList = parser.parse(protoFileDescriptorLoader.loadDescriptors(), new HashMap<>()); + graph = new SchemaGraph(schemaList); + } + + @Test + public void testWithInvalidSchema() { + Assertions.assertThrows(SchemaNotFoundException.class, () -> graph.getSchema("User"), + "Schema not found was expected"); + } + + @Test + public void testWithValidSchema() { + var response = graph.getSchema("org.entities.College"); + assertEquals("College", response.getName()); + } + + @Test + public void getSchemataScoreTest() { + var response = graph.getSchemataScore("org.entities.Person"); + System.out.println(response); + assertEquals(0, response); + response = graph.getSchemataScore("org.entities.Department"); + System.out.println(response); + assertEquals(0.4, response); + response = graph.getSchemataScore("org.entities.College"); + System.out.println(response); + assertEquals(0, response); + response = graph.getSchemataScore("org.entities.City"); + System.out.println(response); + assertEquals(0, response); + response = graph.getSchemataScore("org.entities.Country"); + System.out.println(response); + assertEquals(0, response); + } + + @Test + public void getVertexPageRankScoreTest() { + assertEquals(0.3114570188661495, graph.getVertexPageRankScore("org.entities.Country")); + } + + @Test + public void getAllEntityVertexTest() { + var response = graph.getAllEntityVertex(); + assertEquals("Department", response.iterator().next().getName()); + } + + @Test + public void incomingVertexOfTest() { + var incomingVertex = graph.incomingVertexOf("org.entities.Department"); + var expectedVertex = Set.of("Person"); + var actualVertex = incomingVertex.stream().map(Table::getName).collect(Collectors.toSet()); + assertEquals(1, incomingVertex.size()); + assertTrue(SetUtils.isEqualSet(actualVertex, expectedVertex)); + } + + @Test + public void outgoingVertexOfTest() { + var outgoingVertex = graph.outgoingEntityVertexOf("org.entities.Person"); + var expectedVertex = Set.of("Department"); + var actualVertex = outgoingVertex.stream().map(Table::getName).collect(Collectors.toSet()); + assertEquals(1, outgoingVertex.size()); + assertTrue(SetUtils.isEqualSet(actualVertex, expectedVertex)); + } + + @Test + public void buildEdgeFail() { + var fields = List.of( + Field.builder().id(1L).name("name").dataType("string").isPrimitiveType(Boolean.TRUE).isDeleted(Boolean.FALSE).build(), + Field.builder().id(2L).name("Filter").dataType("com.acme.filter").isPrimitiveType(Boolean.FALSE).isDeleted(Boolean.TRUE).build(), + Field.builder().id(2L).name("Brand").dataType("com.acme.brand").isPrimitiveType(Boolean.FALSE).isDeleted(Boolean.FALSE).build() + ); + var table = Table.builder().name("product").nameSpace("com.acme").fields(fields).build(); + assertThrows(SchemaNotFoundException.class, () -> new SchemaGraph(List.of(table))); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaTest.java new file mode 100644 index 0000000..b85e6d6 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/processor/avro/AvroSchemaTest.java @@ -0,0 +1,147 @@ +package com.opsbeach.connect.schemata.processor.avro; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.FileNotFoundException; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class AvroSchemaTest { + + @InjectMocks + private AvroSchema avroSchema; + + @Mock + private TableService tableService; + + @Mock + private ResponseMessage responseMessage; + + @Mock + private DomainNodeService domainNodeService; + + @Mock + private DomainService domainService; + + @Mock + private ModelService modelService; + + @Mock + private SchemaFileAuditService schemaFileAuditService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private final String FILE_PATH = "src/test/resources/schema_1/avro/avro_testing.avsc"; + + private Table getTable() { + var primitiveField = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).isPii(null).isClassified(null).deprecated(null).build(); + var arrayField = Field.builder().id(2L).name("arrayfield").dataType("array").items("array").arrayField(Field.builder().id(3L).name("arrayField").dataType("array").items("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var mapField = Field.builder().id(4L).name("mapFiled").dataType("map").values("map").mapField(Field.builder().id(5L).name("mapField").dataType("map").values("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var unionField = Field.builder().id(6L).name("unionfield").dataType("union").unionTypes(List.of(Field.builder().id(7L).dataType("null").isPrimitiveType(Boolean.TRUE).build(), Field.builder().id(8L).dataType("int").isPrimitiveType(Boolean.TRUE).build())).isPrimitiveType(Boolean.FALSE).build(); + var tableField = Field.builder().id(9L).name("tablefiled").dataType("Marks").contain(Table.builder().id(10L).name("Marks").schemaType(null).fields(List.of(Field.builder().id(11L).name("Marks").dataType("string").isPrimitiveType(Boolean.TRUE).build())).build()).isPrimitiveType(Boolean.FALSE).build(); + String[] enums = {"active","deactive"}; + var enumField = Field.builder().id(12L).name("enumField").dataType("enum").symbols(enums).build(); + var arrayOfUnion = Field.builder().id(13L).name("ArrayOfUnion").dataType("array").items("union").unionTypes(unionField.getUnionTypes()).build(); + var arrayOfEnum = Field.builder().id(14L).name("ArrayOfEnum").dataType("array").items("enum").symbols(enums).build(); + var arrayOfObject = Field.builder().id(15L).name("ArrayOfObject").dataType("array").items("table").contain(tableField.getContain()).build(); + var fixedField = Field.builder().id(16L).name("fixedField").dataType("fixed").size(10).build(); + var enumField2 = Field.builder().id(17L).name("enumField2").dataType("enum").defaultValue("active").symbols(enums).build(); + var arrayOfMap = Field.builder().id(18L).name("arrayOfMap").dataType("array").items("map").mapField(Field.builder().id(19L).dataType("map").values("string").build()).build(); + var arrayOfFixed = Field.builder().id(19L).name("arrayOfFixed").dataType("array").items("fixed").size(10).build(); + var mapOfArray = Field.builder().id(20L).name("mapOfArray").dataType("map").values("array").arrayField(arrayField).build(); + var mapOfUnion = Field.builder().id(21L).name("mapOfUnion").dataType("map").values("union").unionTypes(unionField.getUnionTypes()).build(); + var mapOfEnum = Field.builder().id(22L).name("mapOfEnum").dataType("map").values("enum").symbols(enums).build(); + var mapOfObject = Field.builder().id(23L).name("mapOfObject").dataType("map").values("table").contain(tableField.getContain()).build(); + var mapOfFixed = Field.builder().id(24L).name("mapOfFixed").dataType("map").values("fixed").size(10).build(); + var intField = Field.builder().id(25L).name("intField").dataType("int").defaultValue("0").isPrimitiveType(Boolean.TRUE).build(); + var floatField = Field.builder().id(25L).name("floatField").dataType("float").defaultValue("0.0").isPrimitiveType(Boolean.TRUE).build(); + var longField = Field.builder().id(25L).name("longField").dataType("long").defaultValue("0").isPrimitiveType(Boolean.TRUE).build(); + var doubleField = Field.builder().id(25L).name("doubleField").dataType("double").defaultValue("0.0").isPrimitiveType(Boolean.TRUE).build(); + var unionOfUnion = Field.builder().id(26L).name("unionOfUnion").dataType("union").isPrimitiveType(Boolean.FALSE).unionTypes(List.of(fixedField)).build(); + + var fields = List.of(primitiveField, arrayField, unionField, mapField, tableField, enumField, arrayOfUnion, arrayOfEnum, arrayOfObject, fixedField, enumField2, + arrayOfMap, arrayOfFixed, mapOfArray, mapOfUnion, mapOfEnum, mapOfObject, mapOfFixed, intField, floatField, longField, doubleField, unionOfUnion); + return Table.builder().id(12L).name("student").schemaType(SchemaType.ENTITY).fields(fields).complianceOwner("compliance owner") + .owner("owner").domain("domain").channel("channel").email("email").status("active") + .qualityRuleBase("base rule").subscribers(enums).build(); + } + + @Test + public void getTablesByPathTest() throws IOException { + when(tableService.addTable(any(Table.class))).thenReturn(Table.builder().id(1L).build()); + when(tableService.findByNameAndNameSpace("mailing_address", "eu.driver.model.sim.support")).thenReturn(Table.builder().id(2L).build()); + var response = avroSchema.getTables(FILE_PATH, Boolean.TRUE); + assertEquals(1L, response.get(response.size() - 1).getId()); + + response = avroSchema.getTables(FILE_PATH, Boolean.FALSE); + assertNull(response.get(response.size() - 1).getId()); + + assertThrows(InvalidDataException.class, () -> { avroSchema.getTables("/avro/avro_testing.avcc", Boolean.TRUE); }); + } + + @Test + public void getFileContentTest() { + var response = avroSchema.getFileContent(getTable()); + System.out.println(response); + } + + @Test + public void parseFolderTest() { + var folderPath = "src/test/resources/schema_1"; + var clientRepo = ClientRepo.builder().id(2L).clientId(1L).defaultBranch("branch").fullName("fullName").build(); + when(domainNodeService.addDomainNode(anyString(), anyLong(), anyLong())).thenReturn(DomainNode.builder().id(2L).build()); + when(domainService.addModel(any(Domain.class))).thenReturn(Domain.builder().id(1L).build()); + when(tableService.addTable(any(Table.class))).thenReturn(Table.builder().id(1L).build()); + when(tableService.findByNameAndNameSpace("mailing_address", "eu.driver.model.sim.support")).thenReturn(Table.builder().id(2L).build()); + avroSchema.parseFolder(folderPath, clientRepo); + + when(tableService.findByNameAndNameSpace("mailing_address", "eu.driver.model.sim.support")).thenThrow(RecordNotFoundException.class); + assertThrows(FileNotFoundException.class, () -> avroSchema.parseFolder(folderPath, clientRepo)); + } + + @Test + public void gettablesByContentTest() throws IOException { + var content = avroSchema.getFileContent(getTable()).getBytes(); + when(tableService.addTable(any(Table.class))).thenReturn(Table.builder().id(1L).build()); + when(tableService.findByNameAndNameSpace("mailing_address", "eu.driver.model.sim.support")).thenReturn(Table.builder().id(2L).build()); + var response = avroSchema.getTables(content, Boolean.FALSE); + assertEquals(2, response.size()); + assertNull(response.get(response.size() - 1).getId()); + } +} + diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaTest.java new file mode 100644 index 0000000..96a2579 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/processor/json/JsonSchemaTest.java @@ -0,0 +1,156 @@ +package com.opsbeach.connect.schemata.processor.json; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.List; + +import org.apache.avro.SchemaParseException; +import org.hibernate.tool.schema.extract.spi.SchemaExtractionException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class JsonSchemaTest { + + @InjectMocks + private JsonSchema jsonSchema; + + @Mock + private TableService tableService; + + @Mock + private DomainNodeService domainNodeService; + + @Mock + private DomainService domainService; + + @Mock + private ModelService modelService; + + @Mock + private ResponseMessage responseMessage; + + @Mock + private SchemaFileAuditService schemaFileAuditService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private Table getTable() { + final var JSON_SCHEMA_ID = "https://example.com/product/schema.json"; + var primitiveField = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).isPii(null).isClassified(null).deprecated(null).build(); + var arrayField = Field.builder().id(2L).name("arrayfield").dataType("array").items("array").arrayField(Field.builder().id(3L).name("arrayField").dataType("array").items("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var unionField = Field.builder().id(6L).name("unionfield").dataType("union").unionTypes(List.of(Field.builder().id(7L).dataType("null").isPrimitiveType(Boolean.TRUE).build(), Field.builder().id(8L).dataType("int").isPrimitiveType(Boolean.TRUE).build())).isPrimitiveType(Boolean.FALSE).build(); + String[] requiredFields = {"Marks"}; + var tableField = Field.builder().id(9L).name("tablefiled").dataType("Marks").contain(Table.builder().id(10L).name("Marks").schemaType(null).jsonSchemaId(JSON_SCHEMA_ID).fields(List.of(Field.builder().id(11L).name("Marks").dataType("string").isPrimitiveType(Boolean.TRUE).build())).requiredFields(requiredFields).build()).isPrimitiveType(Boolean.FALSE).build(); + String[] enums = {"active","deactive"}; + var enumField = Field.builder().id(12L).name("enumField").dataType("enum").symbols(enums).build(); + var arrayOfUnion = Field.builder().id(13L).name("ArrayOfUnion").dataType("array").items("union").unionTypes(unionField.getUnionTypes()).build(); + var arrayOfEnum = Field.builder().id(14L).name("ArrayOfEnum").dataType("array").items("enum").symbols(new String[0]).build(); + var arrayOfObject = Field.builder().id(15L).name("ArrayOfObject").dataType("array").items("table").contain(tableField.getContain()).build(); + var enumField2 = Field.builder().id(17L).name("enumField2").dataType("enum").defaultValue("active").symbols(enums).build(); + var intField = Field.builder().id(25L).name("intField").dataType("int").defaultValue("0").isPrimitiveType(Boolean.TRUE).build(); + var floatField = Field.builder().id(25L).name("floatField").dataType("float").defaultValue("0.0").isPrimitiveType(Boolean.TRUE).build(); + var longField = Field.builder().id(25L).name("longField").dataType("long").defaultValue("0").isPrimitiveType(Boolean.TRUE).build(); + var doubleField = Field.builder().id(25L).name("doubleField").dataType("double").defaultValue("0.0").isPrimitiveType(Boolean.TRUE).build(); + var unionOfUnion = Field.builder().id(26L).name("unionOfUnion").dataType("union").isPrimitiveType(Boolean.FALSE).unionTypes(List.of(primitiveField)).build(); + + var fields = List.of(primitiveField, arrayField, unionField, tableField, enumField, arrayOfUnion, arrayOfEnum, arrayOfObject, + enumField2, intField, floatField, longField, doubleField, unionOfUnion); + return Table.builder().id(12L).name("student").schemaType(SchemaType.ENTITY).fields(fields).complianceOwner("compliance owner") + .owner("owner").domain("domain").channel("channel").email("email").status("active") + .qualityRuleBase("base rule").subscribers(enums).jsonSchemaId(JSON_SCHEMA_ID).build(); + } + + final String FILE_PATH1 = "src/test/resources/schema_1/json/productListFiltered.json"; + final String FILE_PATH2 = "src/test/resources/schema_1/json/product.json"; + + @Test + public void buildSchemaTest() throws IOException { + when(tableService.addTable(any(Table.class))).thenReturn(Table.builder().id(1L).name("table").build()); + when(tableService.addField(any(Field.class))).thenReturn(Field.builder().id(1L).name("product").build()); + var response = jsonSchema.getTables(FILE_PATH1, Boolean.FALSE); + assertEquals(3, response.size()); + assertNull(response.get(0).getId()); + response = jsonSchema.getTables(FILE_PATH2, Boolean.FALSE); + assertEquals("brand", response.get(0).getName()); + assertEquals(3, response.size()); + assertNull(response.get(0).getId()); + } + + @Test + public void getFileContentTest() throws IOException { + var table = getTable(); + var response = jsonSchema.getFileContent(table); + System.out.println(response); + + var filterTable = Table.builder().id(2L).name("filter").nameSpace("schemas.browsing.product_list_filtered") + .fields(List.of(Field.builder().id(1L).name("id").dataType("interger").build())).build(); + var field = Field.builder().id(1L).name("filter").dataType("schemas.browsing.product_list_filtered.filter") + .jsonSchemaRefId("#/definitions/filter").contain(filterTable).build(); + var field2 = Field.builder().id(2L).name("brand").dataType("schemas.product").jsonSchemaRefId("/schemas/product").contain(filterTable).build(); + var arrayField = Field.builder().id(1L).name("filters").dataType("array").items("schemas.browsing.product_list_filtered.filter") + .contain(filterTable).jsonSchemaRefId("#/definitions/filter").build(); + var arrayField2 = Field.builder().id(2L).name("brands").dataType("array").items("schemas.product").jsonSchemaRefId("/schemas/product").contain(filterTable).build(); + table.setFields(List.of(field, field2, arrayField, arrayField2)); + response = jsonSchema.getFileContent(table); + System.out.println(response); + } + + @Test + public void getTablesByContentTest() throws IOException { + var content = jsonSchema.getFileContent(getTable()).getBytes(); + var response = jsonSchema.getTables(content, Boolean.FALSE); + assertEquals(2, response.size()); + assertNull(response.get(response.size() - 1).getId()); + + assertThrows(SchemaExtractionException.class, () -> jsonSchema.getTables("{ \"type\": \"object\" }".getBytes(), Boolean.FALSE)); + assertThrows(SchemaExtractionException.class, () -> jsonSchema.getTables("{ \"$schema\": \"http://json-schema.org/draft-07/schema#\" }".getBytes(), Boolean.FALSE)); + assertThrows(SchemaExtractionException.class, () -> jsonSchema.getTables("{ \"$schema\": \"http://json-schema.org/draft/schema#\" }".getBytes(), Boolean.FALSE)); + assertThrows(InvalidDataException.class, () -> jsonSchema.getTables("{ \"$schema\": \"http://json-schema.org/draft-07/schema#\", \"$id\": \"invalid id\" }".getBytes(), Boolean.FALSE)); + } + + @Test + public void parseFolderTest() { + var folderPath = "src/test/resources/schema_1"; + var clientRepo = ClientRepo.builder().id(2L).clientId(1L).defaultBranch("branch").fullName("fullName").build(); + when(domainNodeService.addDomainNode(anyString(), anyLong(), anyLong())).thenReturn(DomainNode.builder().id(2L).build()); + when(domainService.addModel(any(Domain.class))).thenReturn(Domain.builder().id(1L).build()); + when(tableService.addTable(any(Table.class))).thenReturn(Table.builder().id(1L).name("table").build()); + when(tableService.addField(any(Field.class))).thenReturn(Field.builder().id(1L).name("product").build()); + jsonSchema.parseFolder(folderPath, clientRepo); + + when(tableService.addTable(any(Table.class))).thenThrow(SchemaParseException.class); + assertThrows(SchemaParseException.class, () -> jsonSchema.parseFolder(folderPath, clientRepo)); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchemaTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchemaTest.java new file mode 100644 index 0000000..b924356 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/processor/protobuf/ProtoSchemaTest.java @@ -0,0 +1,182 @@ +package com.opsbeach.connect.schemata.processor.protobuf; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; +import org.yaml.snakeyaml.Yaml; + +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.databind.DatabindException; +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Domain; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.service.ClientRepoService; +import com.opsbeach.connect.github.service.DomainService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.service.DomainNodeService; +import com.opsbeach.connect.schemata.service.TableService; +import com.opsbeach.sharedlib.dto.ClientDto; +import com.opsbeach.sharedlib.exception.SchemaParserException; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.service.GoogleCloudService; +import com.opsbeach.sharedlib.utils.StringUtil; + +public class ProtoSchemaTest { + + @InjectMocks + private ProtoSchema protoSchema; + @Mock + private DomainNodeService domainNodeService; + @Mock + private DomainService domainService; + @Mock + private TableService tableService; + @Mock + private SchemaFileAuditService schemaFileAuditService; + @Mock + private ModelService modelService; + @Mock + private ClientRepoService clientRepoService; + @Mock + private ApplicationConfig applicationConfig; + @Mock + private GoogleCloudService googleCloudService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private Map gcloud; + private Object homePath; + + @BeforeEach + public void init() throws StreamReadException, DatabindException, IOException { + InputStream inputStream = new FileInputStream(new File("src/test/resources/application-test.yaml")); + Yaml yaml = new Yaml(); + Map> data = yaml.load(inputStream); + gcloud = data.get("gcloud"); + homePath = data.get("home-path"); + } + + @Test + public void parseFolderTest() { + var folderPath = "src/test/resources/schema_1/protobuf"; + var clientRepo = ClientRepo.builder().id(2L).clientId(1L).defaultBranch("branch").fullName("fullName").build(); + when(domainNodeService.addDomainNode(anyString(), anyLong(), anyLong())).thenReturn(DomainNode.builder().id(2L).build()); + when(domainService.addModel(any(Domain.class))).thenReturn(Domain.builder().id(1L).build()); + when(tableService.addTables(anyList())).thenAnswer(invocation -> invocation.getArgument(0)); + protoSchema.parseFolder(folderPath, clientRepo); + assertThrows(SchemaParserException.class, () -> protoSchema.parseFolder("src/test/resources/schema_1/protobuf/repository", clientRepo)); + } + + private Table getTableUser() { + var id = Field.builder().id(2L).name("id").dataType("INT32").description("Primary Key").isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).build(); + var timeZone = Field.builder().id(1L).name("timezone").dataType("STRING").description("preferred time zone for the user").isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).build(); + String[] symbols = {"EMAIL", "SOCIAL_MEDIA"}; + var origin = Field.builder().id(3L).name("origin").dataType("ENUM").description("origin source of the campaign") + .enumFilePath("main/schema/cam.proto").enumName("CampaignOrigin").enumPackage("com.schemata") + .isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).symbols(symbols).build(); + var is_active = Field.builder().id(4L).name("is_active").dataType("BOOL").description("email of user").isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).build(); + symbols[0] = "VIEW,READ_REVIEW"; symbols[1] = "VIEW_DESCRIPTION"; + var activityType = Field.builder().id(5L).name("activity_type").dataType("ENUM").description("Type of the user activity") + .enumFilePath("main/schema/schemata/protobuf/schemata.proto").enumName("ActivityType").enumPackage("org.schemata.schema") + .isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).symbols(symbols).build(); + var fields = List.of(id, timeZone, origin, is_active, activityType); + String[] subscribers = {"Customer Support Team", "sales team"}; + return Table.builder().id(1L).name("User").nameSpace("com.schemata").schemaType(SchemaType.ENTITY).fields(fields).complianceOwner("compliance owner") + .owner("owner").domain("domain").channel("channel").email("email").status("active") + .qualityRuleBase("base rule").subscribers(subscribers).build(); + } + + private Table getTableUserActivityAggregate() { + var user = Field.builder().id(1L).name("user").dataType("com.schemata.User").description("User entity reference").isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).build(); + String[] symbols = {"VIEW,READ_REVIEW","VIEW_DESCRIPTION"}; + var activityType = Field.builder().id(2L).name("activity_type").dataType("ENUM").description("Type of the user activity") + .enumFilePath("main/schema/user.proto").enumName("UserActivityType").enumPackage("com.schemata") + .isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).symbols(symbols).build(); + var product = Field.builder().id(3L).name("product").dataType("com.schemata.Product").description("Product entity reference").isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).build(); + var timestamp = Field.builder().id(4L).name("timestamp").dataType("google.protobuf.Timestamp").description("timestamp").build(); + symbols[0] = "READ_REVIEW"; symbols[1] = "VIEW_DESCRIPTION"; + var productType = Field.builder().id(2L).name("product_type").dataType("ENUM").description("Type of the Product") + .enumFilePath("main/schema/product/product.proto").enumName("ProductType").enumPackage("org.schemata") + .isPii(Boolean.FALSE).isClassified(Boolean.FALSE).deprecated(Boolean.FALSE).symbols(symbols).build(); + var fields = List.of(user, product, activityType, timestamp, productType); + return Table.builder().id(2L).name("UserActivityAggregate").nameSpace("com.schemata").schemaType(null) + .fields(fields).owner("owner").domain("domain").status(null).build(); + } + + @Test + public void generateSchemaTest() { + var userTable = getTableUser(); + var userActivityAggregateTable = getTableUserActivityAggregate(); + var userFilePath = "https://github.com/opsbeach/schemata_protobuf/tree/main/src/main/schema/user.proto"; + var userSchemaFileAudit = SchemaFileAudit.builder().id(1L).path(userFilePath).build(); + when(schemaFileAuditService.getByModelNodeId(userTable.getId())).thenReturn(List.of(userSchemaFileAudit)); + var userModel = Model.builder().id(1L).nodeId(userTable.getId()).build(); + var userActivityAggregateModel = Model.builder().id(2L).nodeId(userActivityAggregateTable.getId()).build(); + when(modelService.findBySchemaFileAudit(userSchemaFileAudit.getId())).thenReturn(List.of(userModel, userActivityAggregateModel)); + when(tableService.findAllById(anyList())).thenReturn(List.of(userTable, userActivityAggregateTable)); + when(schemaFileAuditService.filterFields(any(Table.class))).thenAnswer(invocation -> invocation.getArgument(0)); + + when(tableService.findByNameAndNameSpace("User", "com.schemata")).thenReturn(userTable); + when(schemaFileAuditService.getByModelNodeId(userTable.getId())).thenReturn(List.of(userSchemaFileAudit)); + + var productFilePath = "https://github.com/opsbeach/schemata_protobuf/tree/main/src/main/schema/Product/product.proto"; + var productSchemaFileAudit = SchemaFileAudit.builder().id(1L).path(productFilePath).build(); + when(tableService.findByNameAndNameSpace("Product", "com.schemata")).thenReturn(Table.builder().id(108L).build()); + when(schemaFileAuditService.getByModelNodeId(108L)).thenReturn(List.of(productSchemaFileAudit)); + var response = protoSchema.generateSchema(userTable.getId()); + assertEquals(1, response.values().size()); + } + + @Test + public void getTablesOfFilePathsTest() throws IOException { + when(clientRepoService.getClient()).thenReturn(ClientDto.builder().name("opsbeach").build()); + var clientRepo = ClientRepo.builder().id(2L).clientId(1L).defaultBranch("branch").fullName("fullName").build(); + String[] filePaths = {"src/main/schema/Product/product.proto", "/src/org/schemata/protobuf/schemata.proto"}; + createZipFile(clientRepo); + when(applicationConfig.getGcloud()).thenReturn(gcloud); + ReflectionTestUtils.setField(protoSchema, "homePath", homePath); + var response = protoSchema.getTablesOfFilePaths(filePaths, clientRepo, "develop"); + assertEquals(2, response.get(filePaths[0]).size()); + } + + private void createZipFile(ClientRepo clientRepo) throws IOException { + var repoFolderPath = StringUtil.constructStringEmptySeparator(homePath.toString(), clientRepo.getFullName(), "-delta"); + new File(repoFolderPath).mkdir(); + Files.copy(Paths.get("src/test/resources/sampleRepo/proto-repo.tar.gz"), Paths.get(repoFolderPath.concat("/repo.tar.gz")), StandardCopyOption.REPLACE_EXISTING); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/service/DomainNodeServiceTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/service/DomainNodeServiceTest.java new file mode 100644 index 0000000..b5c8b69 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/service/DomainNodeServiceTest.java @@ -0,0 +1,158 @@ +package com.opsbeach.connect.schemata.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.schemata.dto.SchemaVisualizerDto; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Organization; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.repository.DomainNodeRepository; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class DomainNodeServiceTest { + + @InjectMocks + private DomainNodeService domainNodeService; + + @Mock + private DomainNodeRepository domainNodeRepository; + + @Mock + private OrganizationService organizationService; + + @Mock + private TableService tableService; + + @Mock + private ResponseMessage responseMessage; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private Table getTable() { + var primitiveField = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build(); + var arrayField = Field.builder().id(2L).name("array field").dataType("array").items("array").arrayField(Field.builder().id(3L).name("array field").dataType("array").items("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var mapField = Field.builder().id(4L).name("map filed").dataType("map").values("map").mapField(Field.builder().id(5L).name("map field").dataType("map").values("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var unionField = Field.builder().id(6L).name("union field").dataType("union").unionTypes(List.of(Field.builder().id(7L).dataType(null).build(), Field.builder().id(8L).dataType("int").build())).isPrimitiveType(Boolean.FALSE).build(); + var tableField = Field.builder().id(9L).name("table filed").dataType("Marks").contain(Table.builder().id(10L).name("Marks").schemaType(SchemaType.ENTITY).fields(List.of(Field.builder().id(11L).dataType("string").build())).build()).isPrimitiveType(Boolean.FALSE).build(); + var fields = List.of(primitiveField, arrayField, unionField, mapField, tableField); + return Table.builder().id(12L).name("student").schemaType(SchemaType.ENTITY).fields(fields).build(); + } + + private DomainNode getDomain() { + return DomainNode.builder().id(1L).name("analytics").tables(List.of(getTable())).build(); + } + + @Test + public void addTest() { + var domain = getDomain(); + var organization = Organization.builder().id(2L).name("OpsBeach").domains(List.of()).build(); + when(organizationService.get(anyLong())).thenReturn(organization); + when(organizationService.update(any(Organization.class))).thenReturn(organization); + var response = domainNodeService.add(domain, organization.getId()); + assertEquals(response.getName(), domain.getName()); + organization.setDomains(List.of(domain)); + when(organizationService.get(anyLong())).thenReturn(organization); + response = domainNodeService.add(domain, organization.getId()); + assertEquals(response.getName(), domain.getName()); + } + + @Test + public void getTestFail() { + var domain = getDomain(); + when(domainNodeRepository.findById(1L)).thenReturn(Optional.of(domain)); + var response = domainNodeService.get(1L); + assertEquals(response.getId(), domain.getId()); + assertThrows(RecordNotFoundException.class, () -> { domainNodeService.get(2L); }); + } + + @Test + public void updateTest() { + var domain = getDomain(); + when(domainNodeRepository.findById(anyLong())).thenReturn(Optional.of(domain)); + when(domainNodeRepository.save(domain)).thenReturn(domain); + var response = domainNodeService.update(domain); + assertEquals(response.getName(), domain.getName()); + + } + + @Test + public void getAllTest() { + var organization = Organization.builder().domains(List.of()).build(); + when(organizationService.get(anyLong())).thenReturn(organization); + var response = domainNodeService.getAll(1L); + assertEquals(0, response.size()); + } + + @Test + public void getTablesByDominTest() { + var domain = getDomain(); + when(domainNodeRepository.findById(anyLong())).thenReturn(Optional.of(domain)); + var response = domainNodeService.getTablesByDominId(1L); + assertEquals(domain.getTables().get(0).getName(), response.get(0).getName()); + domain.setTables(List.of()); + when(domainNodeRepository.findById(anyLong())).thenReturn(Optional.of(domain)); + response = domainNodeService.getTablesByDominId(1L); + assertEquals(0, response.size()); + } + + @Test + public void getSchemaVisualizerByDomainTest() { + var domainNode = getDomain(); + when(domainNodeRepository.findById(anyLong())).thenReturn(Optional.of(domainNode)); + when(tableService.buildSchemaVisualizerDto(anyList())).thenReturn(SchemaVisualizerDto.builder().build()); + var response = domainNodeService.getSchemaVisualizerByDomain(1L); + assertNull(response.getTables()); + } + + @Test + public void getSchemaScoreTest() { + var table = getTable(); + var domain = getDomain(); + domain.setTables(List.of(table, table.getFields().get(4).getContain())); + when(domainNodeRepository.findById(anyLong())).thenReturn(Optional.of(domain)); + var response = domainNodeService.getSchemaScore(1L, table.getName()); + assertEquals(1.0, response); + } + + @Test + public void addDomainNodeTest() { + var organization = Organization.builder().id(2L).name("OpsBeach").domains(List.of()).build(); + when(organizationService.getByClientId(anyLong())).thenReturn(organization); + when(organizationService.get(anyLong())).thenReturn(organization); + when(organizationService.update(any(Organization.class))).thenReturn(organization); + + var response = domainNodeService.addDomainNode("domain", 1L, 2L); + assertEquals("domain", response.getName()); + } + + @Test + public void deleteByClientRepoIdTest() { + domainNodeService.deleteByClientRepoId(1L); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/service/OrganizationServiceTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/service/OrganizationServiceTest.java new file mode 100644 index 0000000..d8d599b --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/service/OrganizationServiceTest.java @@ -0,0 +1,83 @@ +package com.opsbeach.connect.schemata.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.schemata.entity.Organization; +import com.opsbeach.connect.schemata.repository.OrganizationRepository; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; + +public class OrganizationServiceTest { + + @InjectMocks + private OrganizationService organizationService; + + @Mock + private OrganizationRepository organizationRepository; + + @Mock + private ResponseMessage responseMessage; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private Organization getOrg() { + return Organization.builder().id(1L).name("OpsBeach").build(); + } + + @Test + public void addTest() { + var organization = getOrg(); + when(organizationRepository.save(any(Organization.class))).thenAnswer(invocation -> invocation.getArgument(0)); + var response = organizationService.add(1L, organization.getName()); + assertEquals(response.getClinetId(), 1L); + when(organizationRepository.findByClinetId(anyLong())).thenReturn(organization); + response = organizationService.add(1L, organization.getName()); + assertEquals(response.getName(), organization.getName()); + } + + @Test + public void updateTest() { + var organization = getOrg(); + when(organizationRepository.findById(anyLong())).thenReturn(Optional.of(organization)); + when(organizationRepository.save(any(Organization.class))).thenReturn(organization); + assertEquals(organizationService.update(organization).getName(), organization.getName()); + } + + @Test + public void getTestFail() { + var organization = getOrg(); + when(organizationRepository.findById(1L)).thenReturn(Optional.of(organization)); + var response = organizationService.get(1L); + assertEquals(response.getId(), organization.getId()); + assertThrows(RecordNotFoundException.class, () -> { organizationService.get(2L); }); + } + + @Test + public void getAllTest() { + var organization = getOrg(); + when(organizationRepository.findAll()).thenReturn(List.of(organization)); + var response = organizationService.getAll(); + assertEquals(organization.getName(), response.get(0).getName()); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/service/RedshiftServiceTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/service/RedshiftServiceTest.java new file mode 100644 index 0000000..be9a9ad --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/service/RedshiftServiceTest.java @@ -0,0 +1,67 @@ +package com.opsbeach.connect.schemata.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import com.opsbeach.connect.schemata.dto.RedshiftDto; +import com.opsbeach.connect.schemata.entity.DomainNode; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.repository.SchemaRepository; + +public class RedshiftServiceTest { + + @InjectMocks + private RedshiftService redshiftService; + + @Mock + private SchemaRepository schemaRepository; + + @Mock + private TableService tableService; + + @Mock + private DomainNodeService domainService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private List getSchemas() { + return List.of( + RedshiftDto.builder().tableCatalog("OpsBeach").tableSchema("analytics").tableName("task").columnName("id").dataType("bigint").isNullable("NO").columnDefault(1L).build(), + RedshiftDto.builder().tableCatalog("OpsBeach").tableSchema("analytics").tableName("task").columnName("name").dataType("varchar").isNullable("NO").build() + ); + } + + @Test + public void getSchemaTest() { + var domain = DomainNode.builder().id(1L).name("analytics").build(); + var redshiftDtos = getSchemas(); + when(domainService.get(anyLong())).thenReturn(domain); + when(schemaRepository.getSchemaByName(anyString())).thenReturn(redshiftDtos); + when(domainService.update(any(DomainNode.class))).thenReturn(domain); + var response = redshiftService.getSchema(1L); + assertEquals(response.getName(), domain.getName()); + domain.setTables(List.of(Table.builder().id(1L).build())); + when(domainService.get(anyLong())).thenReturn(domain); + response = redshiftService.getSchema(1L); + assertEquals(response.getName(), domain.getName()); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/schemata/service/TableServiceTest.java b/connect/src/test/java/com/opsbeach/connect/schemata/service/TableServiceTest.java new file mode 100644 index 0000000..42a3312 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/schemata/service/TableServiceTest.java @@ -0,0 +1,563 @@ +package com.opsbeach.connect.schemata.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyCollection; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.neo4j.driver.Values; +import org.neo4j.driver.internal.InternalNode; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.neo4j.core.Neo4jClient; +import org.springframework.data.neo4j.core.Neo4jClient.RecordFetchSpec; +import org.springframework.data.neo4j.core.Neo4jClient.RunnableSpec; +import org.springframework.data.neo4j.core.Neo4jClient.UnboundRunnableSpec; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; +import org.springframework.web.multipart.MultipartFile; + +import com.opsbeach.connect.github.entity.ClientRepo; +import com.opsbeach.connect.github.entity.Model; +import com.opsbeach.connect.github.entity.SchemaFileAudit; +import com.opsbeach.connect.github.entity.ClientRepo.RepoType; +import com.opsbeach.connect.github.service.ClientRepoService; +import com.opsbeach.connect.github.service.GitHubService; +import com.opsbeach.connect.github.service.ModelService; +import com.opsbeach.connect.github.service.SchemaFileAuditService; +import com.opsbeach.connect.schemata.dto.SchemaValidationDto; +import com.opsbeach.connect.schemata.entity.Field; +import com.opsbeach.connect.schemata.entity.Table; +import com.opsbeach.connect.schemata.enums.SchemaType; +import com.opsbeach.connect.schemata.repository.FieldRepostory; +import com.opsbeach.connect.schemata.repository.TableRepository; +import com.opsbeach.connect.schemata.validate.SchemaValidator; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.StringUtil; + +public class TableServiceTest { + + @InjectMocks + private TableService tableService; + @Mock + private TableRepository tableRepository; + @Mock + private FieldRepostory fieldRepostory; + @Mock + private DomainNodeService domainNodeService; + @Mock + private ResponseMessage responseMessage; + @Mock + private ModelService modelService; + @Mock + private SchemaValidator schemaValidator; + @Mock + private SchemaFileAuditService schemaFileAuditService; + @Mock + private Neo4jClient neo4jClient; + @Mock + private UnboundRunnableSpec unboundRunnableSpec; + @Mock + private RunnableSpec runnableSpec; + + @Mock + private RecordFetchSpec> recordFetchSpec; + @Mock + private ClientRepoService clientRepoService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private void mockApplicationUser() { + UserDto userDto = mock(UserDto.class); + Authentication authentication = mock(Authentication.class); + SecurityContext securityContext = mock(SecurityContext.class); + when(securityContext.getAuthentication()).thenReturn(authentication); + SecurityContextHolder.setContext(securityContext); + when(SecurityContextHolder.getContext().getAuthentication().getPrincipal()).thenReturn(userDto); + } + + private Table getTable() { + var primitiveField = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build(); + var arrayField = Field.builder().id(2L).name("array field").dataType("array").items("array").arrayField(Field.builder().id(3L).name("array field").dataType("array").items("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var mapField = Field.builder().id(4L).name("map filed").dataType("map").values("map").mapField(Field.builder().id(5L).name("map field").dataType("map").values("string").build()).isPrimitiveType(Boolean.FALSE).build(); + var unionField = Field.builder().id(6L).name("union field").dataType("union").unionTypes(List.of(Field.builder().id(7L).dataType(null).isPrimitiveType(Boolean.TRUE).build(), Field.builder().id(8L).dataType("int").isPrimitiveType(Boolean.TRUE).build())).isPrimitiveType(Boolean.FALSE).build(); + var tableField = Field.builder().id(9L).name("table filed").dataType("com.acme.Marks").contain(Table.builder().id(10L).name("Marks").nameSpace("com.acme").isUserChanged(Boolean.FALSE).isDeleted(Boolean.FALSE).schemaType(SchemaType.ENTITY).fields(List.of(Field.builder().id(11L).dataType("string").build())).build()).isPrimitiveType(Boolean.FALSE).build(); + var fields = List.of(primitiveField, arrayField, unionField, mapField, tableField); + return Table.builder().id(12L).name("student").nameSpace("org").schemaType(SchemaType.ENTITY).fields(fields).build(); + } + + private Table getModifiedTable() { + var table = getTable(); + table.setModifiedTable(Table.builder().description("desc").owner("owner").domain("domain").isUserChanged(Boolean.TRUE).isDeleted(Boolean.FALSE).build()); + table.setIsUserChanged(Boolean.TRUE); table.setIsDeleted(Boolean.TRUE); + var name = Field.builder().id(12L).name("name").dataType("string").isUserChanged(Boolean.TRUE).isDeleted(Boolean.TRUE).isPrimitiveType(Boolean.TRUE).build(); + var nameModified = Field.builder().id(12L).name("name").dataType("string").isUserChanged(Boolean.TRUE).isDeleted(Boolean.FALSE).isPii(Boolean.TRUE).isPrimitiveType(Boolean.TRUE).build(); + var referedField = Field.builder().id(13L).name("college_id").dataType("int").isUserChanged(Boolean.FALSE).isUserChanged(Boolean.FALSE).referenceField(Field.builder().id(14L).name("id").dataType("int").build()).build(); + List fields = new ArrayList<>(table.getFields()); + fields.add(name); fields.add(referedField); + fields.add(nameModified); + table.setFields(fields); + return table; + } + + @Test + public void addTableTest() { + var table = getTable(); + when(tableRepository.save(table)).thenReturn(table); + var response = tableService.addTable(table); + assertEquals(table.getName(), response.getName()); + assertEquals(table.getFields().get(0).getDataType(), response.getFields().get(0).getDataType()); + } + + @Test + public void addTablesTest() { + var table = getTable(); + var tables = List.of(table); + when(tableRepository.saveAll(tables)).thenReturn(tables); + var response = tableService.addTables(tables); + assertEquals(table.getName(), response.get(0).getName()); + assertEquals(table.getFields().get(0).getDataType(), response.get(0).getFields().get(0).getDataType()); + } + + @Test + public void getTableTest() { + var table = getTable(); + when(tableRepository.findById(table.getId())).thenReturn(Optional.of(table)); + when(modelService.getByNodeId(anyLong())).thenReturn(ClientRepo.builder().id(1L).repoType(RepoType.AVRO).build()); + var response = tableService.get(table.getId()); + assertEquals(table.getName(), response.getName()); + + table = getModifiedTable(); + when(tableRepository.findById(table.getId())).thenReturn(Optional.of(table)); + response = tableService.get(table.getId()); + assertEquals(table.getName(), response.getName()); + + assertThrows(RecordNotFoundException.class, () -> { tableService.get(3L); }); + } + + @Test + public void getSchemaVisualizerDtoAndParsingTest() { + var table = getModifiedTable(); + when(tableRepository.getTableIdsConnectedToTable(anyLong())).thenReturn(new HashSet<>()); + when(fieldRepostory.getTableIdOfField(anyLong())).thenReturn(1L); + when(tableRepository.findAllById(anyCollection())).thenReturn(List.of(table)); + when(tableRepository.findById(1L)).thenReturn(Optional.of(getTable())); + var responseSchemaVisualizerDto = tableService.getSchemaVisualizer(1L); + assertEquals(table.getName(), responseSchemaVisualizerDto.getTables().get(0).getName()); + assertEquals(table.getId(), responseSchemaVisualizerDto.getLinks().get(0).get("source")); + + // var tableResponse = tableService.parseSchemaVisualizerDto(table.getId(), responseSchemaVisualizerDto, SchemaVisualizerDto.Purpose.SUBMIT); + // assertEquals(table.getId(), tableResponse.get(0).getId()); + // assertEquals(table.getFields().size(), tableResponse.get(0).getFields().size()); + } + + // @Test + // public void computeScoresTest() { + // var fields = List.of(Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build()); + // var table = Table.builder().id(12L).name("student").nameSpace("org").schemaType(SchemaType.ENTITY).fields(fields).build(); + // when(tableRepository.getTableIdsConnectedToTable(anyLong())).thenReturn(new HashSet<>()); + // when(tableRepository.findAllById(anyCollection())).thenReturn(List.of(table)); + // var responseSchemaVisualizerDto = tableService.getSchemaVisualizer(1L); + + // table.setFields(fields); + // when(modelService.getNodeIds()).thenReturn(List.of(table.getId())); + // when(tableRepository.findAllById(List.of(table.getId()))).thenReturn(List.of(table)); + // var score = tableService.computeScores(responseSchemaVisualizerDto, table.getId()); + // assertEquals(0.0, score.get("student")); + // } + + @Test + public void getAllSchemaVisualizerDtoTest() { + var table = getTable(); + when(modelService.getNodeIds()).thenReturn(List.of(table.getId())); + when(tableRepository.findAllById(List.of(table.getId()))).thenReturn(List.of(table)); + var response = tableService.getSchemaVisualizerForAll(); + assertEquals(2, response.getTables().size()); + } + + @Test + public void findByNameAndNameSpaceTest() { + var table = getTable(); + mockApplicationUser(); + when(tableRepository.findByNameAndNameSpaceAndClientId(anyString(), anyString(), anyLong())).thenReturn(table); + var response = tableService.findByNameAndNameSpace("table", "nameSpace"); + assertEquals(table.getName(), response.getName()); + } + + @Test + public void getTableFilterOptionsTest() { + var owners = List.of("owner"); + var domains = List.of("domain"); + mockApplicationUser(); + when(tableRepository.getAllOwner(anyLong())).thenReturn(owners); + when(tableRepository.getAllDomain(anyLong())).thenReturn(domains); + when(tableRepository.getAllSubscribers(anyLong())).thenReturn(List.of("[\"subscriber\"]")); + var response = tableService.getTableFilterOptions(); + assertEquals(response.owners().get(0), owners.get(0)); + assertEquals(response.domains().get(0), domains.get(0)); + assertTrue(response.subscribers().contains("subscriber")); + } + + @Test + @SuppressWarnings("unchecked") + public void getAllTest() { + // Neo4j DB elementId is changed from "2" to "1:2:3". that's why + Object aNode = new InternalNode(1L, "2:2:1", null, Map.of("name", Values.value("table1"), "owner", Values.value("owner1"))); + Object bNode = new InternalNode(2L, "2:2:2", null, Map.of("name", Values.value("table2"), "owner", Values.value("owner2"))); + Collection> value1 = List.of(Map.of("a", aNode, "b", bNode)); + Collection> value2 = List.of(); + Mockito.when(neo4jClient.query(Mockito.anyString())).thenReturn(unboundRunnableSpec); + Mockito.when(unboundRunnableSpec.bindAll(anyMap())).thenReturn(runnableSpec); + Mockito.when(runnableSpec.fetch()).thenReturn(recordFetchSpec); + Mockito.when(recordFetchSpec.all()).thenReturn(value1, value2); + var response = tableService.getAll(List.of("owner", "default"), List.of("domain", "default"), List.of("subscribers", "default"), PageRequest.of(0, 4)); + assertEquals("table1", response.get("tables").get(0).get("name").asText()); + assertEquals("owner2", response.get("tables").get(0).get("owner").asText()); + assertEquals(0, response.get("total").asInt()); + + response = tableService.getAll(List.of(), List.of(), List.of(), PageRequest.of(0, 4)); + assertEquals(0, response.get("tables").size()); + + Collection> value = List.of(Map.of("a", aNode)); + Mockito.when(recordFetchSpec.all()).thenReturn(value, value2); + response = tableService.getAll(List.of("owner"), List.of("domain"), List.of("subscribers"), PageRequest.of(0, 4)); + assertEquals("table1", response.get("tables").get(0).get("name").asText()); + assertEquals("owner1", response.get("tables").get(0).get("owner").asText()); + assertEquals(0, response.get("total").asInt()); + } + + @Test + public void addFieldsTest() { + var field = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build(); + when(fieldRepostory.saveAll(anyList())).thenReturn(List.of(field)); + var response = tableService.addFields(List.of(field)); + assertEquals(response.get(0).getName(), field.getName()); + } + + @Test + public void addFieldTest() { + var field = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build(); + when(fieldRepostory.save(any(Field.class))).thenReturn(field); + var response = tableService.addField(field); + assertEquals(response.getName(), field.getName()); + } + + @Test + public void getFieldModelTest() { + var field = Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build(); + when(fieldRepostory.findById(1L)).thenReturn(Optional.of(field)); + var response = tableService.getFieldModel(1L); + assertEquals(response.getName(), field.getName()); + + assertThrows(RecordNotFoundException.class, () -> { tableService.getFieldModel(3L); }); + } + + @Test + public void computeScoresTest() { + var table = Table.builder().id(1L).name("product").nameSpace("com.acme").fields(List.of(Field.builder().id(1L).name("id").dataType("int").build())).build(); + when(modelService.getNodeIds()).thenReturn(List.of(table.getId())); + when(tableRepository.findAllById(List.of(table.getId()))).thenReturn(List.of(table)); + var response = tableService.computeScores(List.of(table)); + assertEquals(response.get(table.getName()), 0.0); + } + + @Test + public void getFieldDataTypesTest() { + when(modelService.getByNodeId(anyLong())).thenReturn(ClientRepo.builder().repoType(RepoType.AVRO).build(), + ClientRepo.builder().repoType(RepoType.JSON).build(), + ClientRepo.builder().repoType(RepoType.PROTOBUF).build(), + ClientRepo.builder().repoType(RepoType.YAML).build()); + var response = tableService.getFieldDataTypes(1L); + assertEquals(response, List.of("string", "int", "float", "double", "long", "bytes", "boolean", "null")); + response = tableService.getFieldDataTypes(2L); + assertEquals(response, List.of("string", "number" ,"integer" ,"boolean", "null")); + response = tableService.getFieldDataTypes(1L); + assertEquals(response, List.of("double", "float", "int32", "int64", "uint32", "uint64", "sint32", "sint64", "fixed32", "fixed64", "sfixed32", "sfixed64", "bool", "string", "bytes")); + response = tableService.getFieldDataTypes(1L); + assertEquals(response, List.of()); + } + + @Test + public void findDeltaForTableTest() { + var table = getTable(); + when(tableRepository.findById(anyLong())).thenReturn(Optional.of(table)); + var response = tableService.findDeltaForTable(table, table.getId(), 2L); + assertNull(response.getModifiedTable()); + + var tableNew = Table.builder().description("des").build(); + table.setModifiedTable(tableNew); + when(tableRepository.save(any(Table.class))).thenReturn(table); + response = tableService.findDeltaForTable(tableNew, table.getId(), 2L); + assertNotNull(response.getModifiedTable()); + } + + @Test + public void saveNewTableWithDeltaTest() { + var table = getTable(); + table.setPrId(1L); + when(tableRepository.save(any(Table.class))).thenReturn(table); + var response = tableService.saveNewTableWithDelta(table, 1L); + assertNotNull(response.getPrId()); + mockApplicationUser(); + when(tableRepository.findByNameAndNameSpaceAndClientId(anyString(), anyString(), anyLong())).thenReturn(getTable()); + response = tableService.saveNewTableWithDelta(table, 1L); + assertNull(response.getPrId()); + } + + @Test + public void findDeltaForFieldsTest() { + var table = getTable(); + List fields = new ArrayList<>(); + fields.add(Field.builder().name("score").dataType("int").description("des").isPrimitiveType(Boolean.TRUE).isPii(Boolean.TRUE).build()); + fields.add(Field.builder().name("name").dataType("string").isPrimitiveType(Boolean.TRUE).isPii(Boolean.TRUE).build()); + fields.addAll(table.getFields()); + var mapOfTable = Field.builder().name("mapOfTable").dataType("map").values("com.product").isPrimitiveType(Boolean.FALSE).contain(table).build(); + var arrayOfMap = Field.builder().name("arrayOfMap").dataType("array").items("map").isPrimitiveType(Boolean.FALSE).mapField(mapOfTable).build(); + var arrayOfTable = Field.builder().name("arrayOfTable").dataType("array").items("com.product").isPrimitiveType(Boolean.FALSE).contain(table).build(); + var mapOfArray = Field.builder().name("mapOfArray").dataType("map").values("array").isPrimitiveType(Boolean.FALSE).arrayField(arrayOfTable).build(); + var arrayOfUnion = Field.builder().name("arrayOfUnion").dataType("array").items("union").isPrimitiveType(Boolean.FALSE).unionTypes(List.of(arrayOfMap)).build(); + var mapOfUnion = Field.builder().name("arrayOfUnion").dataType("map").values("union").isPrimitiveType(Boolean.FALSE).unionTypes(List.of(mapOfArray)).build(); + fields.add(arrayOfUnion); fields.add(mapOfUnion); + var tableWithNewFields = Map.of(StringUtil.constructStringEmptySeparator(table.getNameSpace(),".",table.getName()), fields); + + table.setFields(List.of(Field.builder().id(2L).name("age").dataType("int").isPrimitiveType(Boolean.TRUE).build(), + Field.builder().name("name").dataType("string").isPrimitiveType(Boolean.TRUE).isPii(Boolean.TRUE).build(), + Field.builder().id(1L).name("score").dataType("int").isPrimitiveType(Boolean.TRUE).build())); + mockApplicationUser(); + when(tableRepository.findByNameAndNameSpaceAndClientId(anyString(), anyString(), anyLong())).thenReturn(table); + when(tableRepository.save(any(Table.class))).thenReturn(table); + var response = tableService.findDeltaForFields(tableWithNewFields, 1L, RepoType.AVRO); + assertEquals(response.get(0).getName(), table.getName()); + } + + @Test + public void compareFieldTest() { + var field1 = Field.builder().dataType("int").description("desc").build(); + var field2 = Field.builder().dataType("long").description("desc").build(); + assertFalse(tableService.compareField(field1, field1, RepoType.AVRO)); + assertTrue(tableService.compareField(field1, field2, RepoType.PROTOBUF)); + assertFalse(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setDescription("desc1"); + assertTrue(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setDescription("desc"); + field1.setIsPii(Boolean.FALSE); field2.setIsPii(Boolean.TRUE); + assertTrue(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setIsPii(Boolean.FALSE); field2.setIsPii(Boolean.FALSE); + field1.setIsClassified(Boolean.FALSE); field2.setIsClassified(Boolean.TRUE); + assertTrue(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setIsClassified(Boolean.FALSE); field2.setIsClassified(Boolean.FALSE); + field1.setDeprecated(Boolean.FALSE); field2.setDeprecated(Boolean.TRUE); + assertTrue(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setDeprecated(null); field2.setDeprecated(Boolean.TRUE); + assertTrue(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setDeprecated(Boolean.FALSE); field2.setDeprecated(null); + assertTrue(tableService.compareField(field1, field2, RepoType.AVRO)); + field1.setDeprecated(null); field2.setDeprecated(null); + assertFalse(tableService.compareField(field1, field2, RepoType.AVRO)); + } + + @Test + public void compareTableTest() { + var table1 = Table.builder().description("desc").owner("owner").build(); + var table2 = Table.builder().description("desc").owner("owner1").build(); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setOwner(table1.getOwner()); + table1.setComplianceOwner("owner"); table2.setComplianceOwner("owner1"); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setComplianceOwner(table1.getComplianceOwner()); + table1.setChannel("channel"); table2.setChannel("channel1"); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setChannel(table1.getChannel()); + table1.setEmail("email"); table2.setEmail("email2"); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setEmail(table1.getEmail()); + table1.setQualityRuleBase("email"); table2.setQualityRuleBase("email2"); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setEmail(table1.getEmail()); + table1.setQualityRuleBase("email"); table2.setQualityRuleBase("email2"); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setQualityRuleBase(table1.getQualityRuleBase()); + table1.setQualityRuleCel("email"); table2.setQualityRuleCel("email2"); + assumeTrue(tableService.compareTable(table1, table2)); + table2.setQualityRuleCel(table1.getQualityRuleCel()); + table1.setQualityRuleSql("email"); table2.setQualityRuleSql("email2"); + assumeTrue(tableService.compareTable(table1, table2)); + String[] subs1 = {"marketing"}; + String[] subs2 = {"sales"}; + table1.setSubscribers(subs1); table2.setSubscribers(subs2); + assumeTrue(tableService.compareTable(table1, table2)); + } + + @Test + public void revertChangesTest() { + var tables = List.of(Table.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.TRUE).build(), + Table.builder().id(1L).isDeleted(Boolean.FALSE).isUserChanged(Boolean.TRUE).build(), + Table.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.FALSE).build()); + var fields = List.of(Field.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.TRUE).build(), + Field.builder().id(1L).isDeleted(Boolean.FALSE).isUserChanged(Boolean.TRUE).build(), + Field.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.FALSE).build()); + when(fieldRepostory.findByPrId(anyLong())).thenReturn(fields); + when(tableRepository.findByPrId(anyLong())).thenReturn(tables); + ReflectionTestUtils.setField(tableService, "schemaFileAuditService", schemaFileAuditService); + assertTrue(tableService.revertChanges(10L, 2L)); + } + + @Test + public void acceptChangesTest() { + ReflectionTestUtils.setField(tableService, "schemaFileAuditService", schemaFileAuditService); + assertTrue(tableService.acceptChanges(10L, 2L)); + + var tables = List.of(Table.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.TRUE).modifiedTable(Table.builder().build()).build(), + Table.builder().id(1L).isDeleted(Boolean.FALSE).isUserChanged(Boolean.TRUE).build(), + Table.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.FALSE).build()); + var fields = List.of(Field.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.TRUE).build(), + Field.builder().id(1L).isDeleted(Boolean.FALSE).isUserChanged(Boolean.TRUE).build(), + Field.builder().id(1L).isDeleted(Boolean.TRUE).isUserChanged(Boolean.FALSE).build()); + when(fieldRepostory.findByPrId(anyLong())).thenReturn(fields); + when(tableRepository.findByPrId(anyLong())).thenReturn(tables); + assertTrue(tableService.acceptChanges(10L, 2L)); + } + + @Test + public void schemaCompareTest() { + when(schemaValidator.schemaCompare(anyMap(), any(), anyLong())).thenReturn(SchemaValidationDto.builder().status(true).build()); + var response = tableService.schemaCompare(new HashMap(), ClientRepo.builder().build(), 1L); + assertTrue(response.getStatus()); + } + + @Test + public void deleteByIdsTest() { + var table = getTable(); + when(tableRepository.findAllById(anyList())).thenReturn(List.of(table)); + tableService.deleteByIds(List.of(1L)); + } + public class CustomMultipartFile implements MultipartFile { + + private final String name; + private final String originalFilename; + private final String contentType; + private final byte[] content; + + public CustomMultipartFile(String name, String originalFilename, String contentType, byte[] content) { + this.name = name; + this.originalFilename = originalFilename; + this.contentType = contentType; + this.content = content; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public String getOriginalFilename() { + return this.originalFilename; + } + + @Override + public String getContentType() { + return this.contentType; + } + + @Override + public boolean isEmpty() { + return this.content.length == 0; + } + + @Override + public long getSize() { + return this.content.length; + } + + @Override + public byte[] getBytes() throws IOException { + return this.content; + } + + @Override + public InputStream getInputStream() throws IOException { + return new java.io.ByteArrayInputStream(this.content); + } + + @Override + public void transferTo(java.io.File dest) throws IOException, IllegalStateException { + try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) { + fos.write(this.content); + } + } + } + + @Test + public void uploadCsvToGitTest() throws IOException { + var filePath = "src/test/resources/schema_1/csv/sample.csv"; + byte[] content = Files.readAllBytes(new File(filePath).toPath()); + var multipartFile = new CustomMultipartFile("sample.csv", "sample.cav", "text/csv", content); + var clientRepo = ClientRepo.builder().id(2L).name("schemata").repoType(RepoType.AVRO).fullName("opsbeach/schemata").build(); + when(clientRepoService.getSchemataRepo()).thenReturn(Optional.of(clientRepo)); + when(modelService.findByFullNames(anyList())).thenReturn(List.of(Model.builder().id(1L).name("brand").nameSpace("org.company.ecommerce").build())); + when(tableRepository.saveAll(anyList())).thenAnswer(invocate -> invocate.getArgument(0)); + ReflectionTestUtils.setField(tableService, "schemaFileAuditService", schemaFileAuditService); + ReflectionTestUtils.setField(tableService, "gitHubService", Mockito.mock(GitHubService.class)); + when(schemaFileAuditService.createSchemaFileAuditForNewFile(any(ClientRepo.class), any(Table.class), anyString())).thenReturn(SchemaFileAudit.builder().id(1L).build()); + var response = tableService.uploadCsvToGit(List.of(multipartFile)); + assertEquals(Status.SUCCESS.name(), response.toString()); + + var models = List.of( + Model.builder().id(1L).name("brand").nameSpace("org.company.ecommerce").build(), + Model.builder().id(1L).name("promotion_reviewed").nameSpace("org.amce.ecommerce.promotion").build() + ); + when(modelService.findByFullNames(anyList())).thenReturn(models); + response = tableService.uploadCsvToGit(List.of(multipartFile)); + assertEquals(Status.SUCCESS.name(), response.toString()); + + var tableContent = new String(""" + table_name_space,table_name,table_description,owner,domain,column_name,data_type,column_description,is_pii,is_classified + org.company.ecommerce,brand,The table contains all the Brand information and related fields.,#team-brand,sales,id,int,The primary identification of the brand,FALSE,FALSE + """); + assertThrows(InvalidDataException.class, () -> tableService.uploadCsvToGit(List.of(new CustomMultipartFile("sample.csv", "sample.cav", "text/csv", tableContent.getBytes())))); + assertThrows(InvalidDataException.class, () -> tableService.uploadCsvToGit(List.of(new CustomMultipartFile("sample.csv", "sample.cav", "text/pdf", tableContent.getBytes())))); + } +} \ No newline at end of file diff --git a/connect/src/test/java/com/opsbeach/connect/task/service/ConnectServiceTest.java b/connect/src/test/java/com/opsbeach/connect/task/service/ConnectServiceTest.java new file mode 100644 index 0000000..eaf34b0 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/task/service/ConnectServiceTest.java @@ -0,0 +1,278 @@ +package com.opsbeach.connect.task.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.http.HttpEntity; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; +import org.yaml.snakeyaml.Yaml; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.opsbeach.connect.core.enums.AuthType; +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.schemata.validate.Status; +import com.opsbeach.connect.task.dto.ConnectDto; +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.entity.Connect; +import com.opsbeach.connect.task.repository.ConnectRepository; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.App2AppService; + +public class ConnectServiceTest { + + @InjectMocks + private ConnectService connectService; + + @Mock + private ConnectRepository connectRepository; + + @Mock + private ResponseMessage responseMessage; + + @Mock + private App2AppService app2AppService; + + @Mock + private TaskService taskService; + + @Spy + private IdSpecifications connectSpecifications; + + private Map zendesk = new HashMap<>(); + + private Map jira = new HashMap<>(); + + private Map pagerduty = new HashMap<>(); + + private Map slack = new HashMap<>(); + + private Object userEmail; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + @BeforeEach + public void init() throws FileNotFoundException { + InputStream inputStream = new FileInputStream(new File("src/test/resources/application-test.yaml")); + Yaml yaml = new Yaml(); + Map> data = yaml.load(inputStream); + zendesk = data.get("zendesk"); + pagerduty = data.get("pager-duty"); + jira = data.get("jira"); + userEmail = data.get("user-email"); + slack = data.get("slack"); + } + + @Test + public void connectTestPass() { + + // step to Pagerduty credential validation method testcase + var connectDto = ConnectDto.builder().serviceType(ServiceType.PAGER_DUTY).authToken(pagerduty.get("token")).authType(AuthType.TOKEN).userEmail(userEmail.toString()).build(); + ReflectionTestUtils.setField(connectService, "listIncidentUrl", pagerduty.get("incidents-url")); + ReflectionTestUtils.setField(connectService, "listServiceUrl", pagerduty.get("services-url")); + ReflectionTestUtils.setField(connectService, "listIncidentMetricsUrl", pagerduty.get("incident-metrics-url")); + ReflectionTestUtils.setField(connectService, "listLogEntryUrl", pagerduty.get("log-entry-url")); + var connect = connectDto.toDomin(connectDto); + when(connectRepository.save(connect)).thenReturn(connect); + var response = connectService.connect(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + + // step to Jira credential validation method testcase + connectDto = ConnectDto.builder().serviceType(ServiceType.JIRA).authToken(jira.get("token")).domain(jira.get("domin")).authType(AuthType.BASIC).projectKey("OP").userEmail(userEmail.toString()).build(); + ReflectionTestUtils.setField(connectService, "jiraGetProjectUri", jira.get("get-project-uri")); + ReflectionTestUtils.setField(connectService, "jiraCreateTicketUri", jira.get("create-ticket-uri")); + ReflectionTestUtils.setField(connectService, "jiraGetTicketsUri", jira.get("get-tickets-uri")); + connect = connectDto.toDomin(connectDto); + when(connectRepository.save(connect)).thenReturn(connect); + response = connectService.connect(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + + // step to zendesk credential validation method testcase + connectDto = ConnectDto.builder().serviceType(ServiceType.ZENDESK).authToken(zendesk.get("token")).domain(zendesk.get("domin")).authType(AuthType.BASIC).userEmail(userEmail.toString()).build(); + ReflectionTestUtils.setField(connectService, "zendeskTicketCountUri", zendesk.get("ticket-count-uri")); + ReflectionTestUtils.setField(connectService, "zendeskCreateTicketUri", zendesk.get("create-ticket-uri")); + ReflectionTestUtils.setField(connectService, "zendeskGetTicketUri", zendesk.get("get-tickets-uri")); + connect = connectDto.toDomin(connectDto); + when(connectRepository.save(connect)).thenReturn(connect); + response = connectService.connect(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + + // step to zendesk credential validation method testcase FOR BEARER AUTH TYPE + connectDto = ConnectDto.builder().serviceType(ServiceType.ZENDESK).authToken(zendesk.get("token")).domain(zendesk.get("domin")).authType(AuthType.BEARER).userEmail(userEmail.toString()).build(); + connect = connectDto.toDomin(connectDto); + when(connectRepository.save(connect)).thenReturn(connect); + response = connectService.connect(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + + when(taskService.getByType(ServiceType.METRICS, TaskType.INCIDENT_METRICS)).thenReturn(TaskDto.builder().build()); + when(taskService.getByType(ServiceType.METRICS, TaskType.TICKET_METRICS)).thenReturn(TaskDto.builder().build()); + + // step to zendesk credential validation method testcase + connectDto = ConnectDto.builder().serviceType(ServiceType.SLACK).authToken(slack.get("token")).authType(AuthType.BEARER).channelId(slack.get("channelId")).build(); + ReflectionTestUtils.setField(connectService, "slackPostMessageUrl", slack.get("post-message-url")); + var responseMessage = JsonNodeFactory.instance.objectNode().put("ok", true); + when(app2AppService.httpPost(anyString(), ArgumentMatchers.>any(), eq(JsonNode.class))).thenReturn(responseMessage); + connect = connectDto.toDomin(connectDto); + when(connectRepository.save(connect)).thenReturn(connect); + response = connectService.connect(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + } + + @Test + public void addSlackTestFail() { + var connectDto = ConnectDto.builder().serviceType(ServiceType.SLACK).authToken(slack.get("token")).authType(AuthType.BEARER).channelId(slack.get("channel-id")).build(); + ReflectionTestUtils.setField(connectService, "slackPostMessageUrl", slack.get("post-message-url")); + + var responseMessage = JsonNodeFactory.instance.objectNode().put("ok", true).put("error", "invalid_auth"); + when(app2AppService.httpPost(anyString(), ArgumentMatchers.>any(), eq(JsonNode.class))).thenReturn(responseMessage); + when(connectRepository.save(any(Connect.class))).thenReturn(connectDto.toDomin(connectDto)); + var response = connectService.connect(connectDto); + assertEquals(connectDto.getServiceType(), response.getServiceType()); + + responseMessage = JsonNodeFactory.instance.objectNode().put("ok", false).put("error", "invalid_auth"); + when(app2AppService.httpPost(anyString(), ArgumentMatchers.>any(), eq(JsonNode.class))).thenReturn(responseMessage); + assertThrows(UnAuthorizedException.class, () -> { connectService.connect(connectDto); }); + + responseMessage.put("error", "channel_not_found"); + when(app2AppService.httpPost(anyString(), ArgumentMatchers.>any(), eq(JsonNode.class))).thenReturn(responseMessage); + assertThrows(InvalidDataException.class, () -> { connectService.connect(connectDto); }); + + responseMessage.put("error", "not_in_channel"); + when(app2AppService.httpPost(anyString(), ArgumentMatchers.>any(), eq(JsonNode.class))).thenReturn(responseMessage); + assertThrows(InvalidDataException.class, () -> { connectService.connect(connectDto); }); + + responseMessage.put("error", "something_went_wrong"); + when(app2AppService.httpPost(anyString(), ArgumentMatchers.>any(), eq(JsonNode.class))).thenReturn(responseMessage); + assertThrows(InvalidDataException.class, () -> { connectService.connect(connectDto); }); + + } + + @Test + public void addConnectTestFail() { + var connectDto = ConnectDto.builder().serviceType(ServiceType.FRESH_DESK).build(); + assertThrows(InvalidDataException.class, () -> { connectService.connect(connectDto); }); + } + + @Test + public void getTest() { + assertThrows(RecordNotFoundException.class, () -> connectService.get(ServiceType.GITHUB)); + + var connect = Connect.builder().serviceType(ServiceType.FRESH_DESK).build(); + when(connectRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(connect)); + var response = connectService.get(ServiceType.FRESH_DESK); + assertEquals(ServiceType.FRESH_DESK, response.getServiceType()); + } + + @Test + public void getModelTest() { + var response = connectService.getModel(ServiceType.GITHUB, 1L); + assertTrue(response.isEmpty()); + } + + @Test + public void updateTestPass() { + var connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.PAGER_DUTY).authToken("12345").authType(AuthType.TOKEN).userEmail(userEmail.toString()).build(); + var connect = connectDto.toDomin(connectDto); + when(connectRepository.findById(1L)).thenReturn(Optional.of(connect)); + ReflectionTestUtils.setField(connectService, "listIncidentUrl", pagerduty.get("incidents-url")); + ReflectionTestUtils.setField(connectService, "listServiceUrl", pagerduty.get("services-url")); + ReflectionTestUtils.setField(connectService, "listIncidentMetricsUrl", pagerduty.get("incident-metrics-url")); + ReflectionTestUtils.setField(connectService, "listLogEntryUrl", pagerduty.get("log-entry-url")); + when(connectRepository.save(connect)).thenReturn(connect); + var response = connectService.update(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.JIRA).authToken(jira.get("token")).domain(jira.get("domin")).projectKey("OP").authType(AuthType.BASIC).userEmail(userEmail.toString()).build(); + ReflectionTestUtils.setField(connectService, "jiraGetProjectUri", jira.get("get-project-uri")); + connect = connectDto.toDomin(connectDto); + when(connectRepository.save(connect)).thenReturn(connect); + response = connectService.update(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + + connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.GITHUB).build(); + connect = connectDto.toDomin(connectDto); + when(connectRepository.save(any())).thenReturn(connect); + response = connectService.update(connectDto); + assertEquals(connect.getServiceType(), response.getServiceType()); + } + + @Test + public void updateTestFail() { + var connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.ZOHO).authToken("12345").authType(AuthType.TOKEN).userEmail(userEmail.toString()).build(); + var connect = connectDto.toDomin(connectDto); + when(connectRepository.findById(1L)).thenReturn(Optional.of(connect)); + assertThrows(InvalidDataException.class, () -> { connectService.update(connectDto); }); + var connectDto1 = ConnectDto.builder().id(2L).build(); + assertThrows(RecordNotFoundException.class, () -> { connectService.update(connectDto1); }); + } + + @Test + public void getAllTest() { + when(connectRepository.findAll()).thenReturn(List.of(Connect.builder().id(1L).build())); + var response = connectService.getAll(); + assertEquals(1, response.size()); + when(connectRepository.findAll()).thenReturn(List.of()); + response = connectService.getAll(); + assertEquals(0, response.size()); + } + + @Test + public void getAllServiceTypeTest() { + var response = connectService.getAllServiceType(); + assertEquals(ServiceType.values().length, response.length); + } + + @Test + public void checkConnectByServiceTypeTest() { + var connect = Connect.builder().id(1L).serviceType(ServiceType.ZOHO).authToken("12345").authType(AuthType.TOKEN).userEmail(userEmail.toString()).build(); + when(connectRepository.findAll()).thenReturn(List.of(connect)); + var response = connectService.checkConnect(); + assertEquals(connect.getId(), response.get(ServiceType.ZOHO.name())); + assertNull(response.get(ServiceType.GITHUB.name())); + } + + @Test + public void addRepoOrganizationTest() { + var connectDto = ConnectDto.builder().id(1L).serviceType(ServiceType.PAGER_DUTY).authToken("12345").authType(AuthType.TOKEN).userEmail(userEmail.toString()).build(); + var connect = connectDto.toDomin(connectDto); + when(connectRepository.findById(1L)).thenReturn(Optional.of(connect)); + when(connectRepository.save(any(Connect.class))).thenReturn(connect); + var response = connectService.addRepoOrganization(1L, "opsbeach"); + assertEquals(response, Status.SUCCESS.name()); + } +} diff --git a/connect/src/test/java/com/opsbeach/connect/task/service/TaskServiceTest.java b/connect/src/test/java/com/opsbeach/connect/task/service/TaskServiceTest.java new file mode 100644 index 0000000..b111fb7 --- /dev/null +++ b/connect/src/test/java/com/opsbeach/connect/task/service/TaskServiceTest.java @@ -0,0 +1,166 @@ +package com.opsbeach.connect.task.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import com.opsbeach.connect.core.enums.ServiceType; +import com.opsbeach.connect.core.enums.TaskType; +import com.opsbeach.connect.core.specification.IdSpecifications; +import com.opsbeach.connect.scheduler.SchedulerTaskService; +import com.opsbeach.connect.task.dto.TaskDto; +import com.opsbeach.connect.task.entity.Task; +import com.opsbeach.connect.task.repository.TaskRepository; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.App2AppService; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.scheduling.Trigger; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +public class TaskServiceTest { + + @InjectMocks + private TaskService taskService; + + @Mock + private TaskRepository taskRepository; + + @Mock + private ResponseMessage responseMessage; + + @Spy + private IdSpecifications taskSpecifications; + + @Mock + private SchedulerTaskService schedulerTaskService; + + @Mock + private App2AppService app2AppService; + + @BeforeEach + public void initMock() { + MockitoAnnotations.openMocks(this); + MockHttpServletRequest request = new MockHttpServletRequest(); + RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request)); + } + + private List getTasks() { + return List.of(TaskDto.builder().id(1L).taskType(TaskType.INCIDENTS).serviceType(ServiceType.PAGER_DUTY) + .connectId(1L).clientId(1L).build()); + } + + @Test + public void getTest() { + var taskDto = getTasks().get(0); + var task = taskDto.toDomin(taskDto); + when(taskRepository.findById(1L)).thenReturn(Optional.of(task)); + var response = taskService.get(1L); + assertEquals(task.getTaskType(), response.getTaskType()); + + assertThrows(RecordNotFoundException.class, () -> { taskService.get(2L); }); + } + + @Test + public void addTest() { + var taskDto = getTasks().get(0); + var task = taskDto.toDomin(taskDto); + ReflectionTestUtils.setField(taskService, "schedulerEnabled", true); + when(taskRepository.save(ArgumentMatchers.any())).thenReturn(task); + var schedulerTaskService = mock(SchedulerTaskService.class); + doNothing().when(schedulerTaskService).addTaskToScheduler(any(Task.class), any(Trigger.class)); + var response = taskService.add(taskDto); + assertEquals(task.getTaskType(), response.getTaskType()); + } + + @Test + public void updateTest() { + var taskDto = getTasks().get(0); + var task = taskDto.toDomin(taskDto); + when(taskRepository.save(ArgumentMatchers.any())).thenReturn(task); + var response = taskService.update(taskDto); + assertEquals(task.getTaskType(), response.getTaskType()); + } + + @Test + public void getAllTest() { + var taskDtos = getTasks(); + var tasks = taskDtos.stream().map(taskDtos.get(0)::toDomin).collect(Collectors.toList()); + when(taskRepository.findAll()).thenReturn(tasks); + var response = taskService.getAll(); + assertEquals(tasks.size(), response.size()); + assertEquals(tasks.get(0).getConnectId(), response.get(0).getConnectId()); + } + + @Test + public void getAllForSchedulerTest() { + var taskDtos = getTasks(); + var tasks = taskDtos.stream().map(taskDtos.get(0)::toDomin).collect(Collectors.toList()); + when(taskRepository.findAll()).thenReturn(tasks); + var response = taskService.getAllForScheduler(); + assertEquals(tasks.size(), response.size()); + assertEquals(tasks.get(0).getConnectId(), response.get(0).getConnectId()); + } + + @Test + public void getByTypePass() { + var taskDto = getTasks().get(0); + var task = taskDto.toDomin(taskDto); + when(taskRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(task)); + var response = taskService.getByType(ServiceType.JIRA, TaskType.CREATE_TICKET); + assertEquals(task.getTaskType(), response.getTaskType()); + when(taskRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.empty()); + assertNull(taskService.getByType(ServiceType.JIRA, TaskType.CREATE_TICKET)); + } + + @Test + public void getByTypesPass() { + var taskDto = getTasks().get(0); + var task = taskDto.toDomin(taskDto); + when(taskRepository.findOne(ArgumentMatchers.>any())).thenReturn(Optional.of(task)); + var response = taskService.getByTypes(List.of(ServiceType.JIRA, ServiceType.ZENDESK), TaskType.CREATE_TICKET); + assertEquals(task.getTaskType(), response.getTaskType()); + } + + @Test + public void getByTypesFail() { + assertThrows(RecordNotFoundException.class, () -> { taskService.getByTypes(List.of(ServiceType.JIRA, ServiceType.ZENDESK), TaskType.CREATE_TICKET); }); + } + + @Test + public void taskValidationTestPass() { + var task = getTasks().get(0); + taskService.taskValidation(task, TaskType.INCIDENTS, ServiceType.PAGER_DUTY); + } + + @Test + public void taskValidationTestFail() { + var task = getTasks().get(0); + assertThrows(InvalidDataException.class, () -> taskService.taskValidation(task, TaskType.SERVICES, ServiceType.PAGER_DUTY)); + } + + @Test + public void deleteTest() { + taskService.delete(1L); + } +} diff --git a/connect/src/test/resources/application-test.yaml b/connect/src/test/resources/application-test.yaml new file mode 100644 index 0000000..6a38f19 --- /dev/null +++ b/connect/src/test/resources/application-test.yaml @@ -0,0 +1,60 @@ +jira: + get-user-uri: /rest/api/3/user/search + create-ticket-uri: /rest/api/3/issue + domin: https://opsbeach.atlassian.net + token: 2nVLwSeK90V4JILPS53 + get-tickets-uri: /rest/api/3/search + get-project-uri: /rest/api/3/project/ +pager-duty: + incidents-url: https://api.pagerduty.com/incidents + services-url: https://api.pagerduty.com/services + incident-metrics-url: https://api.pagerduty.com/analytics/raw/incidents + log-entry-url: https://api.pagerduty.com/log_entries + token: E3ZPKdZk9Lpk8zHzcm +zendesk: + create-ticket-uri: /api/v2/tickets + ticket-count-uri: /api/v2/tickets/count + domin: https://obscura.zendesk.com/ + token: V4JI2KE5E53PKdZk9Lpk + get-tickets-uri: /api/v2/tickets +slack: + post-message-url: https://slack.com/api/chat.postMessage + token: xoxb-2nVLwSeKdZk9LpkS53 + channel-id: L045AB79DZ +user-email: abc@gmail.com +gcloud: + repo-bucket: "repo-bucket" + file-upload-bucket: "file-upload-bootstrap" + project-id: code-12345 + location-id: center + queue-id: queue-123 +home-path: src/test/resources/ +github: + client-id: client_id + client-secret: client_secret + construct-file-path: https://github.com/{repoFullName}/tree/{branch}/{filePath} + construct-pr-url: https://github.com/{repoFullName}/pull/{prNumber} + access_token: https://github.com/login/oauth/access_token + user: https://api.github.com/user + user_repos: https://api.github.com/search/repositories?q=user:{userId} + user_orgs: https://api.github.com/user/orgs + organization_repos: https://api.github.com/orgs/{orgName}/repos + process_event_audit: https://d3ca-136-185-1-251.ngrok-free.app/connect/v1/event-audit/process?eventAuditId={eventAuditId} + tarball: https://api.github.com/repos/{FullName}/tarball/{branchName} + refresh-token: https://github.com/login/oauth/access_token + redirect-url: https://api.aldefi.io/connect/v1/github/signin/callback?smClientId={smClientId} + repos-redirect-url: https://schemata.aldefi.io/repos + login-url: https://github.com/login/oauth/authorize?scope=user&client_id={clientID}&redirect_uri={redirectURI} + download_file_url: https://api.github.com/repos/{owner}/{repo}/contents/{path} + create_branch_url: https://api.github.com/repos/{owner}/{repo}/git/refs + get_branch_info_url: https://api.github.com/repos/{owner}/{repo}/git/refs/heads/{branchName} + create_tree_object_url: https://api.github.com/repos/{owner}/{repo}/git/trees + create_commit_url: https://api.github.com/repos/{owner}/{repo}/git/commits + commit_file_url: https://api.github.com/repos/{owner}/{repo}/contents/{filePath} + create_pr_url: https://api.github.com/repos/{owner}/{repo}/pulls + repo-details: https://api.github.com/repos/{owner}/{repo} + create-repo-authenticated-user: https://api.github.com/user/repos + get-user-details: https://api.github.com/users/{userName} + push-commit-to-branch: https://api.github.com/repos/{owner}/{repo}/git/refs/heads/{branchName} + create-pr-comment: https://api.github.com/repos/{owner}/{repo}/issues/{pr_number}/comments + delete-comment: https://api.github.com/repos/{owner}/{repo}/issues/comments/{comment_id} \ No newline at end of file diff --git a/connect/src/test/resources/github-response.json b/connect/src/test/resources/github-response.json new file mode 100644 index 0000000..a37c4c4 --- /dev/null +++ b/connect/src/test/resources/github-response.json @@ -0,0 +1,28 @@ +{ + "access_token": "access_token=token&expires_in=28800&refresh_token=token&refresh_token_expires_in=15811200&scope=&token_type=bearer", + "invalid_access_code": "error=bad_verification_code", + "user_details": { "login": "user_name" }, + "repo_details": { + "id": 1234, + "node_id": "R_Grb", + "name": "repo", + "full_name": "owner/repo", + "private": true, + "owner": { "login": "owner" }, + "default_branch": "main" + }, + "user_repos": { + "items": [ { "name": "repoName" } ] + }, + "pr_comment": { "id": "1" }, + "branch_info": { + "object": { "sha": "df933fse93HHe0" } + }, + "create_tree_object": { "sha": "df933fse93HHe0" }, + "create_commit": { "sha": "df933fse93HHe0" }, + "push_commit_to_branch": { "sha": "df933fse93HHe0" }, + "pr_info": { + "number": 1, + "head": { "sha": "df933fse93HHe0" } + } +} \ No newline at end of file diff --git a/connect/src/test/resources/schema_1/avro/avro_testing.avsc b/connect/src/test/resources/schema_1/avro/avro_testing.avsc new file mode 100644 index 0000000..88367ac --- /dev/null +++ b/connect/src/test/resources/schema_1/avro/avro_testing.avsc @@ -0,0 +1,954 @@ +{ + "name": "EntityAggregation", + "namespace": "eu.driver.model.sim.support", + "doc": "The entity aggregation message is a wrapper message containing a map with all entities that require an aggregated update. The values of this map would be the defined entity messages, with all described properties to be optional allowing the message to only contain the relevant changed properties. *Copyright (C) 2019-2020 XVR Simulation B.V., Delft, The Netherlands, Martijn Hendriks . This file is licensed under the MIT license : https://github.com/DRIVER-EU/avro-schemas/blob/master/LICENSE*", + "type": "record", + "desc": "This is the description of the Brand table", + "comment": "The comment added after thought", + "see_also": "db.brand MySQL table", + "owner": "Platform", + "domain": "Core", + "schema_type": "ENTITY", + "team_channel": "#team-platform", + "alert_channel": "#alerts-platform", + "event_type": "model", + "fields": [ + { + "name": "id", + "doc": "Unique case-insensitive identifier of the aggregation update", + "type": "string" + }, + { + "name" : "address", + "type" : { + "type" : "record", + "name" : "mailing_address", + "fields" : [ + {"name" : "street", + "type" : "string", + "default" : "NONE"}, + + {"name" : "city", + "type" : "string", + "default" : "NONE"}, + + {"name" : "state_prov", + "type" : "string", + "default" : "NONE"}, + + {"name" : "country", + "type" : "string", + "default" : "NONE"}, + + {"name" : "zip", + "type" : "string", + "default" : "NONE"} + ] + } + }, + { + "name": "map", + "doc": "Map containing key-value pairs, all with unique keys: key – unique case-insensitive identifier of the entity; value – entity message where all properties are optional", + "type": { + "type": "map", + "values": [ + { + "name": "AggregatedItem", + "namespace": "eu.driver.model.sim.support", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "location", + "doc": "Optional location of the item", + "type": [ + "null", + { + "name": "Location", + "namespace": "eu.driver.model.sim.support", + "doc": "A location is defined as a WGS84-based standard representation of a location on earth", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "latitude", + "doc": "In decimal degrees, ranging from [-90, 90] where 0 is the equator", + "type": "double" + }, + { + "name": "longitude", + "doc": "In decimal degrees, ranging from (-180, 180] where 0 is the Prime Meridian (line going through the geographic north, Greenwich, and the geographic south)", + "type": "double" + }, + { + "name": "altitude", + "doc": "Optional in meters, where 0 is the surface of the WGS84-based ellipsoid, or another agreed upon common ground level (specified inside the configuration guidelines). A positive number indicates a location outside the ellipsoid (or above the ground level), while a negative number indicates a location inside the ellipsoid (or below the ground level). If an altitude is not provided, it is presumed that the location is at the ground level of the provided latitude and longitude coordinates", + "type": ["null", "double"], + "default": null + } + ] + } + ], + "default": null + }, + { + "name": "orientation", + "doc": "Optional orientation of the item", + "type": [ + "null", + { + "name": "Orientation", + "namespace": "eu.driver.model.sim.support", + "doc": "An orientation is defined in the aviation axes conventions representation. It is a left-handed item-centric reference system, with in default initial state its heading/yaw-axis pointing away from the centre of the WGS84-based ellipsoid, its pitch-axis pointing to the right, and its roll/bank-axis pointing forward", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "yaw", + "doc": "In decimal degrees, ranging from [0, 360) where 0 is pointing towards the geographic north. The yaw value is applied in a clockwise rotation over the item’s heading/yaw-axis. A yaw value of 90 makes the item face east, while a yaw of 270 makes it face west", + "type": "double" + }, + { + "name": "pitch", + "doc": "In decimal degrees, ranging from [-90, 90] where 0 is perpendicular to the line crossing the item’s location and the centre of the WGS84-based ellipsoid. The pitch value is applied in a counter-clockwise rotation over the item’s pitch-axis. A pitch value of 45 makes the item face 45 degrees upwards, while a pitch of -20 makes it face 20 degrees downwards", + "type": "double" + }, + { + "name": "roll", + "doc": "In decimal degrees, ranging from (-180, 180] where 0 is perpendicular to the line crossing the item’s location and the centre of the WGS84-based ellipsoid. The roll value is applied in a clockwise rotation over the item’s roll/bank-axis. A roll value of 45 makes the item roll 45 degrees to the right, while a roll of -50 makes it roll 50 degrees to the left", + "type": "double" + } + ] + } + ], + "default": null + }, + { + "name": "velocity", + "doc": "Optional velocity of the item", + "type": [ + "null", + { + "name": "Velocity", + "namespace": "eu.driver.model.sim.support", + "doc": "A velocity is defined in the aviation axes conventions representation of a velocity vector. It is a left-handed item-centric reference system, with in default initial state its heading/yaw-axis pointing away from the centre of the WGS84-based ellipsoid, its pitch-axis pointing to the right, and its roll/bank-axis pointing forward", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "yaw", + "doc": "In decimal degrees, ranging from [0, 360) where 0 is pointing towards the geographic north. The yaw value is applied in a clockwise rotation over the item’s heading/yaw-axis. A yaw value of 90 makes the item face east, while a yaw of 270 makes it face west", + "type": "double" + }, + { + "name": "magic", + "type": { + "type": "fixed", + "name": "Magic", + "size": 4 + } + }, + { + "name": "pitch", + "doc": "In decimal degrees, ranging from [-90, 90] where 0 is perpendicular to the line crossing the item’s location and the centre of the WGS84-based ellipsoid. The pitch value is applied in a counter-clockwise rotation over the item’s pitch-axis. A pitch value of 45 makes the item face 45 degrees upwards, while a pitch of -20 makes it face 20 degrees downwards", + "type": "double" + }, + { + "name": "magnitude", + "doc": "In meter per seconds, ranging from [0, infinity) where 0 is standing still relative to the earth’s rotation", + "type": "double" + } + ] + } + ], + "default": null + }, + { + "name": "name", + "doc": "Optional name of the item", + "type": ["null", "string"], + "default": null + }, + { + "name": "description", + "doc": "Optional description of the item", + "type": ["null", "string"], + "default": null + }, + { + "name": "type", + "doc": "Optional type of the item", + "type": ["null", "string"], + "default": null + }, + { + "name": "owner", + "doc": "Optional unique case-insensitive identifier of the connected application owning the item", + "type": ["null", "string"], + "default": null + }, + { + "name": "timestamp", + "doc": "Optional UNIX Epoch time in milliseconds marking the time the update was performed", + "type": ["null", "long"], + "logicalType": "timestamp-millis", + "default": null + }, + { + "name": "coordinates_map_fixed", + "type": { + "type": "map", + "values": { + "type": "fixed", + "name": "brand", + "size": 4 + } + } + }, + { + "name": "tags", + "doc": "Optional map containing item specific information: key – unique name of the specific property; value – value of that property", + "type": [ + "null", + { + "type": "map", + "values": "string" + } + ], + "default": null + }, + { + "name": "children", + "doc": "Optional list of item identifiers that belong to this item", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "default": null + } + ] + }, + { + "name": "AggregatedFeatureCollection", + "namespace": "eu.driver.model.sim.support", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "FeatureCollectionType", + "namespace": "eu.driver.model.sim.support.geojson", + "doc": "Type of the feature collection (as defined by the GeoJSON standard): FeatureCollection – a collection of multiple GeoJSON features; Feature– a single GeoJSON feature (not used within this CWA); Geometry – a single geometric GeoJSON object (not used within this CWA). In this CWA only the FeatureCollection option is used for easier processing", + "type": "enum", + "symbols": [ "FeatureCollection" ] + }, + "default": "FeatureCollection" + }, + { + "name": "features", + "type": [ + "null", + { + "type": "array", + "items": { + "name": "AggregatedFeature", + "namespace": "eu.driver.model.sim.support.geojson", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "type": "enum", + "name": "FeatureType", + "namespace": "eu.driver.model.sim.support.geojson", + "doc": "Type of the feature (as defined by the GeoJSON standard): Feature – a single GeoJSON feature", + "symbols": [ "Feature" ] + }, + "default": "Feature" + }, + { + "name": "geometry", + "doc": "The geometry (as defined by the GeoJSON standard): Point – a point; MultiPoint – a collection of points; LineString – a collection of points forming a line; MultiLineString – a collection of lines; Polygon – a collection of points forming an area; MultiPolygon – a collection of areas; GeometryCollection – a collection of any of the types above", + "type": [ + "null", + { + "name": "Point", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "doc": "Describes a point geometry", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "PointType", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "type": "enum", + "symbols": [ "Point" ] + }, + "default": "Point" + }, + { + "name": "coordinates", + "type": { + "type": "array", + "items": "double" + } + }, + { + "name": "coordinates_array_enum", + "type": { + "type": "array", + "items": { + "type": "enum", + "name": "Simple", + "doc": "A simple name (attribute) and no namespace attribute: inherit the null namespace of the enclosing type 'Example'. The fullname is 'Simple'.", + "symbols": ["a", "b"], + "default": "a" + } + } + }, + { + "name": "coordinates_array_fixed", + "type": { + "type": "array", + "items": { + "type": "fixed", + "name": "Magic", + "size": 4 + } + } + } + ] + }, + { + "name": "MultiPoint", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "doc": "Describes a collection of points geometry", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "MultiPointType", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "type": "enum", + "symbols": [ "MultiPoint" ] + }, + "default": "MultiPoint" + }, + { + "name": "coordinates", + "type": { + "type": "array", + "items": { + "type": "array", + "items": "double" + } + } + }, + { + "name": "coordinates_map_array_record", + "type": { + "type": "map", + "values": { + "type": "map", + "values": { + "name": "OffsetFetchRequest", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "topic", + "type": "string" + }, + { + "name": "offset", + "type": ["null", "int"], + "default": null + }, + { + "name": "partition", + "type": ["null", "int"], + "default": null + } + ] + } + } + } + } + ] + }, + { + "name": "LineString", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "doc": "Describes a collection of points forming a line geometry", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "LineStringType", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "type": "enum", + "symbols": [ "LineString" ] + }, + "default": "LineString" + }, + { + "name": "coordinates", + "type": { + "type": "array", + "items": { + "type": "array", + "items": "double" + } + } + } + ] + }, + { + "name": "MultiLineString", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "doc": "Describes a collection of lines geometry", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "MultiLineStringType", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "type": "enum", + "symbols": [ "MultiLineString" ] + }, + "default": "MultiLineString" + }, + { + "name": "coordinates", + "type": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": "double" + } + } + } + } + ] + }, + { + "name": "Polygon", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "doc": "Describes a collection of points forming an area geometry", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "PolygonType", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "type": "enum", + "symbols": [ "Polygon" ] + }, + "default": "Polygon" + }, + { + "name": "coordinates", + "type": { + "type": "array", + "items": { + "type": "array", + "items": "double" + } + } + } + ] + }, + { + "name": "MultiPolygon", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "doc": "Describes a collection of areas geometry", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "type", + "type": { + "name": "MultiPolygonType", + "namespace": "eu.driver.model.sim.support.geojson.geometry", + "type": "enum", + "symbols": [ "MultiPolygon" ] + }, + "default": "MultiPolygon" + }, + { + "name": "coordinates", + "type": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": "double" + } + } + } + } + ] + } + ], + "default": null + }, + { + "name": "properties", + "doc": "Optional feature specific properties (as defined by the GeoJSON standard)", + "type": [ + "null", + { + "name": "Properties", + "namespace": "eu.driver.model.sim.support.geojson", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "id", + "doc": "Unique case-insensitive identifier of the feature", + "type": "string" + }, + { + "name": "name", + "doc": "Optional name of the feature", + "type": ["null", "string"], + "default": null + }, + { + "name": "description", + "doc": "Optional description of the feature", + "type": ["null", "string"], + "default": null + }, + { + "name": "type", + "doc": "Optional type of the feature", + "type": ["null", "string"], + "default": null + }, + { + "name": "tags", + "doc": "Optional map containing feature specific information: key – unique name of the specific property; value – value of that property", + "type": [ + "null", + { + "type": "map", + "values": "string" + } + ], + "default": null + }, + { + "name": "orientation", + "doc": "Optional orientation of the feature", + "type": ["null", "eu.driver.model.sim.support.Orientation"], + "default": null + }, + { + "name": "entities", + "doc": "Optional list of entity identifiers that are at this feature", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "default": null + }, + { + "name": "address", + "doc": "Optional address information of the feature", + "type": [ + "null", + { + "name": "Address", + "namespace": "eu.driver.model.sim.support", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "street", + "doc": "Optional street name", + "type": ["null", "string"], + "default": null + }, + { + "name": "houseNumber", + "doc": "Optional house number", + "type": ["null", "int"], + "default": null + }, + { + "name": "houseLetter", + "doc": "Optional house letter", + "type": ["null", "string"], + "default": null + }, + { + "name": "postalCode", + "doc": "Optional postal code", + "type": ["null", "string"], + "default": null + }, + { + "name": "city", + "doc": "Optional name of the city", + "type": ["null", "string"], + "default": null + }, + { + "name": "state", + "doc": "Optional name of the state or province", + "type": ["null", "string"], + "default": null + }, + { + "name": "country", + "doc": "Optional name of the country", + "type": ["null", "string"], + "default": null + }, + { + "name": "tags", + "doc": "Optional map containing address specific information: key – unique name of the specific property; value – value of that property", + "type": [ + "null", + { + "type": "map", + "values": "string" + } + ], + "default": null + } + ] + } + ], + "default": null + }, + { + "name": "title", + "doc": "Optional title of the feature (as defined by the simplestyle specification)", + "type": ["null", "string"], + "default": null + }, + { + "name": "markerSize", + "doc": "Optional size of the style marker (as defined by the simplestyle specification)", + "type": [ + "null", + { + "name": "MarkerSize", + "namespace": "eu.driver.model.sim.support", + "doc": "small – small marker size; medium – medium marker size; large – large marker size", + "type": "enum", + "symbols": [ "small", "medium", "large" ] + } + ], + "default": null + }, + { + "name": "markerSymbol", + "doc": "Optional symbol to display in the center of the style marker (as defined by the simplestyle specification). Allowed values include an Icon ID, an integer 0 through 9, a lowercase character a through z", + "type": ["null", "string"], + "default": null + }, + { + "name": "markerColor", + "doc": "Optional color of the style marker (as defined by the simplestyle specification). This value should be a string-encoded hexadecimal value for the red, green and blue intensity of the desired color (in that order)", + "type": ["null", "string"], + "default": null + }, + { + "name": "stroke", + "doc": "Optional color of a line as part of a GeoJSON geometry (as defined by the simplestyle specification). This value should be a string-encoded hexadecimal value for the red, green and blue intensity of the desired color (in that order)", + "type": ["null", "string"], + "default": null + }, + { + "name": "strokeOpacity", + "doc": "Optional opacity of the line as part of a GeoJSON geometry (as defined by the simplestyle specification), ranging from [0, 1]", + "type": ["null", "float"], + "default": null + }, + { + "name": "strokeWidth", + "doc": "The width of the line as part of a GeoJSON geometry (as defined by the simplestyle specification), ranging from [0, infinity)", + "type": ["null", "float"], + "default": null + }, + { + "name": "fill", + "doc": "Optional color of the GeoJSON geometry (as defined by the simplestyle specification). This value should be a string-encoded hexadecimal value for the red, green and blue intensity of the desired color (in that order)", + "type": ["null", "string"], + "default": null + }, + { + "name": "fillOpacity", + "doc": "Optional opacity of the GeoJSON geometry (as defined by the simplestyle specification), ranging from [0, 1]", + "type": ["null", "float"], + "default": null + } + ] + } + ], + "default": null + }, + { + "name": "bbox", + "doc": "Optional bounding box around the feature in the following order [west, south, east, north]. Length(bbox)=2xn, where n is the number of dimensions represented in the contained geometry, with all axes of the most south-westerly point followed by all axes of the more north-easterly point. The axes order of a bbox follows the axes order of the geometry. The bbox values define shapes with edges that follow lines of constant longitude, latitude, and elevation", + "type": [ + "null", + { + "type": "array", + "items": "double" + } + ], + "default": null + } + ] + } + } + ], + "default": null + }, + { + "name": "name", + "doc": "Optional name of the feature collection", + "type": ["null", "string"], + "default": null + }, + { + "name": "description", + "doc": "Optional description of the feature collection", + "type": ["null", "string"], + "default": null + }, + { + "name": "owner", + "doc": "Optional unique case-insensitive identifier of the connected application owning the feature collection", + "type": ["null", "string"], + "default": null + }, + { + "name": "timestamp", + "doc": "Optional UNIX Epoch time in milliseconds marking the time the update was performed", + "type": ["null", "long"], + "logicalType": "timestamp-millis", + "default": null + }, + { + "name": "tags", + "doc": "Optional map containing feature collection specific information: key – unique name of the specific property; value – value of that property", + "type": [ + "null", + { + "type": "map", + "values": "string" + } + ], + "default": null + }, + { + "name": "bbox", + "doc": "Optional bounding box around the feature collection in the following order [west, south, east, north]. Length(bbox)=2xn, where n is the number of dimensions represented in the contained geometries, with all axes of the most south-westerly point followed by all axes of the more north-easterly point. The axes order of a bbox follows the axes order of the geometries. The bbox values define shapes with edges that follow lines of constant longitude, latitude, and elevation", + "type": [ + "null", + { + "type": "array", + "items": "double" + } + ], + "default": null + } + ] + }, + { + "name": "AggregatedPost", + "namespace": "eu.driver.model.sim.support", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "body", + "doc": "Body text of the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "header", + "doc": "Optional header information of the post", + "type": [ + "null", + { + "name": "AggregatedHeader", + "namespace": "eu.driver.model.sim.support", + "type": "record", + "schema_type": "ENTITY", + "fields": [ + { + "name": "from", + "doc": "Sender of the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "date", + "doc": "UNIX Epoch time in milliseconds marking the time the post was published/updated", + "type": ["null", "long"], + "logicalType": "timestamp-millis", + "default": null + }, + { + "name": "to", + "doc": "Optional list of recipients of the post", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "default": null + }, + { + "name": "cc", + "doc": "Optional list of recipients in carbon copy of the post", + "type": [ + "null", + { + "type": "map", + "values": { + "type": "array", + "items": "string" + } + } + ], + "default": null + }, + { + "name": "bcc", + "doc": "Optional list of recipients in blind carbon copy of the post", + "type": [ + "null", + { + "type": "array", + "items": { + "type": "map", + "values": "bytes" + } + } + ], + "default": null + }, + { + "name": "subject", + "doc": "Optional subject of the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "intro", + "doc": "Optional introductory text of the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "attachments", + "doc": "Optional map of (references to) attachments inside the post: key – unique reference to the attachment (e.g. URI) or complete string-encoded attachment; value – media type of the attachment (e.g. .pdf, .png, .zip)", + "type": [ + "null", + { + "type": "map", + "values": { + "type": "enum", + "name": "Simple", + "doc": "A simple name (attribute) and no namespace attribute: inherit the null namespace of the enclosing type 'Example'. The fullname is 'Simple'.", + "symbols": ["a", "b"], + "default": "a" + } + } + ], + "default": null + }, + { + "name": "location", + "doc": "Optional location of the sender of the post", + "type": ["null", "eu.driver.model.sim.support.Location"], + "default": null + } + ] + } + ], + "default": null + }, + { + "name": "name", + "doc": "Optional name of the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "type", + "doc": "Optional type of the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "owner", + "doc": "Optional unique case-insensitive identifier of the connected application owning the post", + "type": ["null", "string"], + "default": null + }, + { + "name": "timestamp", + "doc": "Optional UNIX Epoch time in milliseconds marking the time the update was performed", + "type": ["null", "long"], + "logicalType": "timestamp-millis", + "default": null + }, + { + "name": "tags", + "doc": "Optional map containing post specific information: key – unique name of the specific property; value – value of that property", + "type": [ + "null", + { + "type": "map", + "values": "string" + } + ], + "default": null + } + ] + } + ] + } + }, + { + "name": "timestamp", + "doc": "Optional UNIX Epoch time in milliseconds marking the time the aggregated update was performed", + "type": ["null", "long"], + "logicalType": "timestamp-millis", + "default": null + } + ] + } + \ No newline at end of file diff --git a/connect/src/test/resources/schema_1/csv/sample.csv b/connect/src/test/resources/schema_1/csv/sample.csv new file mode 100644 index 0000000..3e119bb --- /dev/null +++ b/connect/src/test/resources/schema_1/csv/sample.csv @@ -0,0 +1,8 @@ +table_namespace,table_name,table_description,owner,domain,column_name,data_type,column_description,is_pii,is_classified +org.company.ecommerce,brand,The table contains all the Brand information and related fields.,#team-brand,sales,id,int,The primary identification of the brand,FALSE,FALSE +org.company.ecommerce,brand,The table contains all the Brand information and related fields.,#team-brand,sales,name,string,The name of the brand,FALSE,FALSE +org.amce.ecommerce.promotion,promotion_reviewed,This event is triggered whenever a customer reviews a product. The following properties are supported by this event,#team-promotion,core,product_id,string,Contains the ID of the product being reviewed,FALSE,FALSE +org.amce.ecommerce.promotion,promotion_reviewed,This event is triggered whenever a customer reviews a product. The following properties are supported by this event,#team-promotion,core,review_id,string,Contains the unique ID of the review being posted by the customer,FALSE,FALSE +org.amce.ecommerce.promotion,promotion_reviewed,This event is triggered whenever a customer reviews a product. The following properties are supported by this event,#team-promotion,core,review_body,string,Contains the body or the content of the review posted by the customer,FALSE,FALSE +org.amce.ecommerce.promotion,promotion_reviewed,This event is triggered whenever a customer reviews a product. The following properties are supported by this event,#team-promotion,core,rating,long,Contains the rating of the product added by the customer in the review,FALSE,FALSE +org.amce.ecommerce.promotion,promotion_reviewed,This event is triggered whenever a customer reviews a product. The following properties are supported by this event,#team-promotion,core,author,string,Contains the author name,,FALSE diff --git a/connect/src/test/resources/schema_1/descriptors/entities.desc b/connect/src/test/resources/schema_1/descriptors/entities.desc new file mode 100644 index 0000000000000000000000000000000000000000..f06aa5807d86e8f557b135811e455b17ebdf3388 GIT binary patch literal 14021 zcmc&*Ym6jUb*AUtw|jePYG!wPXJ@>#z4q>U?VUAaY~yi6sIIP=DQ~}gt9u`oqDpOd z&1~B}-9vZJ+GFJhQlf~UNHGq?;24CgXd#FsKyVZYL?-ZqkSK{0D~gi*Am9YZ8wA9l z{K$9it*WkmOlI=K#Ikq0&OPVcbI(1md+s&+@*`|%tJ&PDyO(xa%}(>i?&hVc+pe@~ zJDp~0S-{k3i6Os$k;<`|wOZZH1c;W~QQywSHUUgVmPXIVR_Rh5l3oNeM5IUnWXx0+;=J5J?EXUk2F2(bUD-oaSa-EkXLw^7+kjzWqk4?)y> z*v!uEje4zOhtL?%#BuCeLB9_*IWfXhkSyfm;QeQI4cd^_Sq#uEV_@vUYUYc|~GCfuu1hn);pbAeF$ zIP<6=%z#dPxNB;Y`&dd&y5@kjm06bT@2< zbow&GZDu<;1zAT<3G?PkL_O))(I#Pkn! zjY|gZ#o^2Aw<$`MqD;8o+j^&-oPl&xJR!l7z^SKxZ-kAJGSq1nD_t)dcBW8HXALEy zCKwZdwQNBvDI>wRnJ+!~Hf1y*C`-Ut7~%dEWgNZ|mki8nMg}Aky%69`sz+E<;G_$M ztTGkwv`XB}uPf1j=Q=NxiwX-kazEJ!R&aU__+$d_|ULY-kp z#EjM&i3N=5J`f~T0xp*1GSdS9hMLmZxG+PQdm^i)jjUZLmdrw4%R+Ynj2n-YO>ShM zzYwfwXeGc#Qx!HhET9f`p^z<5kri7Rv<1Pd-xgBe9AUFV>Xf0A{4^Vvwk*@^-2>)E zjD%(P=Kx6;E;VF094s}6MZb{h&kO{AnI9h5Y4#_9N7&2& zqeBPdpO3KQe##lD>TXD+?d*ocN4h>QHF$cc3dAP#t14JJ`gw+qv2F z%c{ZLl@r5#`sN;0INM8Eht!3#z#WkEmqyspq0EO0y2x0qv9sHewU<0(LM1j5_9N~KH_Te)LEsZyAJIo11^QmNP6M#pYCRb0h_!G`8$Mz3Z@~jmjSW%v{Fv*-i z!5ZC`U8y_mwkU2WO+>j$V+9)0{Soy6HY+H$k+5nzb=RhYVLQpflFblC&O;!H+9(Sb z1=*I{a9d8twcU5^V%@gWsMgd+L+79&>KPl_3L_6BEEduH6(>nD4N&F$RW8Kk=qrc;5^Pk5I&nYY{u z_OzNj9!94Fx;LOM!~83@+i-5cVw@I!w3DX<@>r*}i&I3UVu(RajHwscO!LMQ6={(+ z*lgCGPTmQED4(VQVUI-tR__My_D!d?BZ{9ktldrCB~c}uY9Ik7k7z$t+wAxp&k3}c zfO*``GleuET){^>FVGTzhv!A%MwsL;a?-m6S{%UcjBjJd2npfQsX84egnh5TCbc7E zh`}xu5MwzsH6eOs#^g_n4E%R{$wFpYHtXVQ_ zUe1@yoS}?{3j0ULrp_woQm>A%MDIyW{V8@F8WU}mw%f6vsuen1#D zUsTuI&JOIi)7lgD`4pc5z@YD+$T11poKm9rGpexEH%3`p9B62p(2b$vPle7J;*gnG zIWv%y!r03?rLxrtSsZgoNsSda&Xe$Ap*`+>sq1XwiMHTPxhFBS_lJIpC&}`omS7%z z)Uxvhp3}12BUewbvAXljUT@M9pu=tgI8cMGH`NHxo3{n2OKe=|OJ&T{nDQ2NijC<7 zo`FNa1Ee19qG{;xiPQsZLMnlrB~Sy1o=1 zY5pFXI`v9~#X@y89Xp+Rz4?^w)N6>?(lnR=X#_d!*8L=ehjWZ86RD3xm=YAwzVdqA zoa&&VZvK-HGxcJGCA>^OT)fl&ui|}ugdORrVf*%YU+Lb(W@=S;d#8yub7jx2ySLDf zNr|8_*i`J9VcG1`Wr=Kc*31~WVxeT@_3L&ye>q>cn&(Q0y+C^t-M+|_excOkY?xT^ z>{&LQFCZr(F&b-YhF-E{-y>KA`W|5~dtsE#4r!GkjLSO_d1$^k5XFbkEK|);M1Rq{ zLP0GAEj2o|%^K=?c?OrqnCUKDocYvynYz<#*E+RZ)ROm`Pjsvq!=;HgVm3MfcEjCr z`mpH)Gs+czB)~@fQf=Zw#`Q#?+!5h136l(|6q8lFl#;q$aUPQ}iGuEvv$fTtX?;fW zT#^XD0&)IPHs$Mq8et9S1f`Q`x{jb?oFV`ziR!;v+ZL@jaT7AKG=gSUtkw=1psA0c zVClVtz}aQ0-b8^*cPH`&?EJvq^pY%RJ$xQe&i`42P5EG?+p!&|b5pPzPmd@OF1`tl zTC#yii@Pfcr*Eop>#kE3ho0v4Hg>ozTA(P0J-F_H=`O6(a%%M+T-w$sSNwqhd6k{; z`BqVd;y_i!C5<-$U2#PDaStKmjr+8zS0ijj90#cM4{m63j5&=)vlCX=peYZ~Eo%W0 z4>6eEW~@81uiB8O-% zxBIs@t#oTgamUx*h&=v|ty<^i?u}*OU)pNcaU0UTXBF+y%Ec|WadE2|x^;b|Td%$m ziM%v2x?W6wVq|_@T+=QK*0_XS=I&TH1D6N#WOnDopf`IjD2g!>>u zSYlIc7e~E%yP5ne4`YPmKGM|yB8)REiu0R`^B&G?uX!k=ybAF*WiI1t%j7|CW znU1k2j?{9Fbeu6eV0Ym@#w?*Tsz%iqoAh!Bf;H+m1E-bJb;M|#Y>X@TLU>NIsBsH! zB?wU82wYuL0~wa%VDug+zU z_AMkt#>g7fu#E_biWxyRa^Vu2D%G}KME4F2ScYE6XDm#PkDQq=Q$t$Fx=|=|3$lLi zBvZvf6j5G=B{3a3!&>Hw!AmCE&Fi+A0gFjCO8xB!JV%8r8M9>T8qWlEr@FwBVR(sa z`elD~lnPyvQW>M>HvO^-Yj)b?r42OZ(dy3>a+;a9^BN9+6NF>nJ8Ns13`b`jjd|3^ zMrk8&>b7jXi{AViu(F1v6218~lUpVE&1i$xjeG`GvFOUg5s%4vFleOx`tWSbU@et%l4dJvmQ^h9lCnUv6%B3`yd791KE(}KuD^qK zpbMJ7k;5=bflQ}pMb=ew4W>Y9T%uhT$M@4@E@hroQb?4GSBwo4Ru3hRzpOzxTBFbT z3=-{ez2%BxgBp2g(b<+;!9`(Pxs#^Q%B2mvRJd&9mAj}!(ym%InHq7^vX*j|l&?Kr zhSEc*=V;|=&BBfg=!T(@DV(Q;pc>@Y#qEfF*|@IUP3vFRu{T9i04~rp1PD)a4^ci! zI=X74J$df+^>~#|aTm!st{U29n^a36n1i*UEYm8kS$xf-x`YQRa~g+1mdw0nXY!`6 zj4Ep6C9MoaYgrQ`+=LricGlF5ycKN5S0i}Ga-xeScO1+24uO9hL5Zn$$AgXjS|4Fa zyJoMM$0usdI23B<@=-1-4TC8w~HHjD2{|0d@x^hWfkL*BOJn==jP9daIpNJZ<26MaZku80ryruzF`D~Qbig2`UU#360yYPNSn`GuQ(6**!HZ%>q@pZ+_lo1}^VCWXj z$>DWG+R)ICLPJF)g3!>9{UO9Z3ykw-kjVZ%M%uy!s!%6cEhWf4_k1Nz8|EV!CKu81 zOL0<=4-FLbM{#7kcmOV1dF9J-6g?+Ri)*;V9jff#I6{gtOIr3i{MVvrFXnhpf~@RO zjTY47fd87H4U_}>_}afEkiw5kl&&&-GyV4jnY$bfWd9EVyj(zOA|jqhCVw%FXcv`0 z&hyHv(QxMu1FTQBpX&+@YhsmU7%c)qM49~{dg*BhLaY}sim$Q2))q5TXG26P{q zA@!nFcvZWO9rGvo?-??kqQrStFNpmu8WwiI!~ggk z1>K??8RAI4IVbpelzwXc#GK^EOXYRirEWy{<)gUExSI+r4SGW~@_iF!ph*cpB+0I+ z|L0NCV;4VI`FD?!#;gKmNo@@k4*85P^=rpSEmo0=E8Pr2_3u+X;ekWx=X1x&OeFR` zX7c$EW!?;e=pXcegL%GioXkct3RkEN1xouE${;{^ZuI^Ip(ntG z*ESzm5PGs~wC%Em{d2{yEl>yqZ>dI{MWDi>sXV`+zLnhzcqoPiS{c5A4=xCOT3vm9 z$X7y<8;Bi9SO>6p!C%!r_WbKY$&kt_(JAP~1xj1^EETfZH5ApTh(g8ohZiWfWB7ls zoIkQa5uwb>GPHF|m+y-$!Fkkpyo^lMQ`Ic4Zgn3cS8!xk2cJmvUb7|t;WawE$5u`+ z4`~i8hmZZ-f3Qrz)rZ)4^C?`?A3Vc1#0*g`@mC*a6V>LnQ)?Vd`Av_~BUr7Ve&9Ha}k2dh&0KF}@elUZ7 z@)*R3OqW!vYwUEo)SL)5}9bECCIb$Ev9KPcTJKZHMkRh!Wnc4*%e(_)Xb9v!E(;cb$5b zaD~AaY~lOPy-9AY&> zr2SSS25B|o$LbiA7D>H(j7=2X7M?GoBP}&6-max*-}`)O`n@kFV>@DWi7s}$o3D2Y z<Tczgb?U3aN_^|geTUrWx?7B0OSOOnaQpHD?NZKBD>(PgjIg{aZ= z{a`W=CdZnaXtUOw`jQx3a;jCl`ENf9vcG+KbZo<1-+1ZylqBWyFT6Dy!{eupc#a-h zN%rcttHbnNnBz3&SOm7GSFxt0=H^o8rt3+_1v4=9)g&HK3^5I%!*mKv?*P+z&l-kt z^f|45AoAYZi_5^iP`X)bFX3OBH`ML-McKvcH4M@NE>N6#?&ajE5-<^)$r?y2Zgt7o z#9KjdXzp&^>^ygSabmsIe5!Nvh1-i`6jaG0K6869afY(u#htp-!FFta{`R7BMkKg) J0Q3F!e*nGGrU3u| literal 0 HcmV?d00001 diff --git a/connect/src/test/resources/schema_1/json/product.json b/connect/src/test/resources/schema_1/json/product.json new file mode 100644 index 0000000..bc53b1a --- /dev/null +++ b/connect/src/test/resources/schema_1/json/product.json @@ -0,0 +1,86 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/product/schema.json", + "title": "Product", + "description": "A product from Acme's catalog", + "type": "object", + "properties": { + "productId": { + "description": "The unique identifier for a product", + "type": "integer" + }, + "productName": { + "description": "Name of the product", + "type": "string" + }, + "type": { + "type": "string", + "enum": ["ESSENTIAL", "SNAKS"] + }, + "price": { + "description": "The price of the product", + "type": "number", + "exclusiveMinimum": 0 + }, + "rating": { + "type": "array", + "items": { + "type": "string", + "enum": ["one", "two", "three"] + } + }, + "tags": { + "description": "Tags for the product", + "type": "array", + "items": { + "anyOf": [ + { + "type": "null" + }, + { + "type": "string", + "enum": ["faasi"] + } + ] + }, + "minItems": 1, + "uniqueItems": true + }, + "brand": { + "description": "Brands for the product", + "type": "array", + "items": { + "title": "Brand", + "type": "object", + "properties": { + "name": { + "type": "string" + } + }, + "required": ["name"] + }, + "minItems": 1, + "uniqueItems": true + }, + "dimensions": { "$ref": "#/definitions/dimension"}, + "address" : { "$ref": "http://example.com/schemas/address.json"} + }, + "required": [ "productId", "productName", "price", "tags" ], + "definitions": { + "dimension": { + "title": "Dimension", + "type": "object", + "properties": { + "length": { + "type": "number" + }, + "width": { + "type": "number" + }, + "height": { + "type": "number" + } + } + } + } +} \ No newline at end of file diff --git a/connect/src/test/resources/schema_1/json/productListFiltered.json b/connect/src/test/resources/schema_1/json/productListFiltered.json new file mode 100644 index 0000000..c020bc9 --- /dev/null +++ b/connect/src/test/resources/schema_1/json/productListFiltered.json @@ -0,0 +1,90 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://example.com/schemas/browsing/productListFiltered.json", + "title": "ProductListFiltered", + "description": "This event is triggered whenever a visitor filters a list or category of products on your website or app. The following properties are supported by this event", + "type": "object", + "properties": { + "listId": { + "type": "string", + "description": "Contains the name of the product list being viewed" + }, + "category": { + "type": ["string", "null"], + "description": "Contains the category of the product being viewed" + }, + "filters": { + "type": "array", + "description": "Contain the product filters that the customer has applied", + "items": { "$ref": "#/definitions/filter"} + }, + "products": { + "type": "array", + "description": "Contains the array of products displayed in the product list", + "items": { "$ref": "http://example.com/schemas/product.json"} + }, + "tag": { + "description": "Tags for the product", + "oneOf": [ + { + "type": "null" + }, + { + "type": "string", + "enum": ["faasi"] + } + ], + "uniqueItems": true + }, + "options": { + "description": "Options of the product", + "type": "array", + "items": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "string" + } + ] + } + }, + "dimension": { + "title": "Dimension", + "type": "object", + "properties": { + "length": { + "type": "number" + }, + "width": { + "type": "number" + }, + "height": { + "type": "number" + } + } + }, + "brands": { + "type": "array", + "items": { "type": "string" } + } + }, + "required": ["listId", "category", "filters", "products"], + "definitions": { + "filter": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Contains the ID of the filter type that the customer is using" + }, + "value": { + "type": "string", + "description": "Contains the ID of the selection chosen by the customer" + } + }, + "required": ["type", "value"] + } + } +} \ No newline at end of file diff --git a/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.desc b/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.desc new file mode 100644 index 0000000000000000000000000000000000000000..86bedcf04aeb84f0961ea4e3fd306687f024ef08 GIT binary patch literal 90269 zcmd?SdwgA2dFQRO&pEnm$+CRxB#z=FJIckE*mB|!0uGSM(y@IKS<)Oy3C;{C$I`K^ zMAi{HQXDhPTUtt?w8NAFfpoY{DFcOeXj;m2U>JIx&p>a}4#PV$r8B&gP%drCr2~{w z-uL@^)>?ZXU0kQ{ka_=j^?~@Tz1Moyv!3;=XFa#IcKoUz$;ZijqcOi&KfJusSZ$nH zJ9~Jp-ke=oSYBO&g+fpemEY;2nVs2ko)qahsT5SqI?nV;?>d$s_vSRZ$Fs-beJ68AM5Yb&$$nc2o%eP&_lY-4wq0l0n> zfJv)3OuY)#s=cmZD3`8`+nX1cR%?&#?lBJWrGbCb9&g`(`IanpcU(NHQNm#2XW1HMQ`S>9}{LuJrP&fo%&=+_-Lx(+4vqxg=h?sy(6B7}Bi& z<#F$N@|1SP#pY^lWz_^-vFZw?zBrFCGbvZB>SyT(vH|C9z_)BdcRl2diw5DqGlQVzShaSgYvL}ocf8RKkgjT{9*8eW{c)!)lev1>+R}0f#+s|2 zt*tGt&OBOMT&p9V)wst$ykE~s*T>y%RV}awK4#LhSal1nq9^M9;bvngu%>md2iE;v z8FM+oeeKr6+sISeDoste!%ux}arX|e>#X*=$F4zbap1#ITo5rz+vD!3(-Y;Hk@4wc zqvgI_sW*=8!ST`Yp{c$oEmy{-p8K4>eA-~zJt(wRxZ_A)k+ss}090OF9-&G5`X|)s zC~b?otj@9V@zK7{6z1eqwK8_1uPcRlqB=f3(HEx>PL(Gohfb9Hy3-oRPEVC5`+8Ck zLW?f8rCpTArcd?tmUhJ3tjEdYLW>&PH#`EV`2QNT)g{I#MzS6nMl-hW+L(KY|T3g z-moPJY!Wt-ap1kV_{yzS$}MqtQQUsMzIv_^)T+&U*C~6wf!U@i}P$)3(Jf385xGn-Ek{sJ5=M8uON(?sD;;Bv-$c`eWkWqpQ*oW zjd9P^mgZ*8)tcva?-GDx(e7OJYE?Yp>y@qA(9+yVDwU4LR~i&(2mxni&(&uinOR#s z`@G%PwDxZpLKo@`tI9MrN|SL<1Iy(6!pp(Av0{5@?~%=-sfB`rqNEy}!X7zVoSZ0^ zN2=Wc%JIfZ35J?)qy+9Zs&&mb0(l|U?Cgw<8QWCzg%Q`>-Peljj@elo?82;HaZT34 z6&bNw8`|a@IL1b~u_-++U)a)!0 z-0rJdtII(5FG_>3|LjbCsdfgAsjcv%x%+ynTv%OM!-|-lE!)c>dsjLT?`WKPc-C>` z3}DVKJhuBL8gx~+>xJ>-ggq|p2X^yZZDrY%pC;U1-+i;I>Yhsnvd3Bx%?k@>R|CYo zR;}Bfd1(9MK2gG!La4USs`c``wO#BEH|o@M&s+38i+&$M{Q*eD>FE>W@@vnttKm5ay^BzXQ?siD`(r! zkgr-=&%P4)X~S0bMY}#Otz~}ODj~O(Y zCcH5+e`aNCn336g=P&Mz2yFdy4h7l{(|X@OX8E1;Gk%bex%}l!sXp19!&ljtE*TDH&+wiryr#rKGph zr;e4YebM@bQ|K!WG+Bjml7u;a*4BGV*8{(ni@RI&YMGt2#l^;jncCt465FvxY>$TY zQ?wIq98B-_>$PYEZ(s&S3kepz|A~+ab~gpDE^oM6B?VHitBn9tYqy*)$Hib%1HUt#Q0Qs zZ20ud^w@o4;}48g`&z(TwP$hJPsDv2kt$skZ-o{sduP0TY@9ujjj??EczJkg()m5o zLE!gjU1dL=k9Th2DnpFBn20l)4{eg-TeKOJ&M<+0(T7mbg`m>X>cZIt^m%84J2Pf` z_S|f~(!sd2+-NSWE<7rid~iO=tSJyQ@oy?it7+|}`h0CeZCNn+YF~Jh)<%DsYY>pB zdla;Hw^&3E}e@F6kNqlTm)?2 zrny<)WN_5iucj^ge?J#@hR1BTh2`4nIRjTb7WL(-R@QTL$t5H$u`BmnW$Cz!_1c^n zJ&p6{nc=3nKv9SN^I`v3v9Q&Z+QQYqkvPQ`I;X{oW=O0LbUys6#bP}->40-*nV9A|GXNjCN= z7br4o)b=S1clSv4oZK&G>hlXr-f!#*?B3>v7jc{${@6duVX-yNZ=Bv^eVOiPF}~)> z+=DNgUsyf2c4m8KMSYHw;l}dCm4*3ptI3huZhM|@kW`jt2a{xI zaWS!X%_M|12IJ)1>gsay=wU{IXl^XiQ6P^-?nc^JJOtiC;&2=%)%x6m7V?=j4Ix2Q zPu7}s&`I2s*po90OSP4YhF9}oasgT0q7pFkPMnY7KrvO?J66aAX z^e;47g}cFy_+E0B7cp>|jb`KQ>IJch5QT%1=5l>j97L-HG1iKBXUTC z2+7#^RGf@fPGLxoBva!Dt=rA*lJVoosd9DrBw&V)Rj|8HTZhLhQ)BAv_;@u=hLVY) z>QrTTdUU9oOiWiN#wW{(MmAEJ93CCQMjsiZfBH0^^qm8t2ea&lsPe8h-7S+3q+87@!Wos5o){Ku2& z$ufwH3{4GL*T8^Wyk`K%rU}Z3aD-;c)#|iGUG_7}4?r1)K1ACQqwx5cMk`(^k5^9% ztmt4fa4>n`WSQqo^q8a8kO(3pZ+NP;8XdwbOmsHRWUPE*v~mI!w=8_d1?GXuWO+Xm zRspvYf+;dTFhr*?3LT3h!HX%H#=10z6?4A5MZu7U)^FS)vS zQDUZ+NI}@Jy~n8(sV!afGEr$O+CsIX?W+&Y52khS=Swq75NXK*2GGiCb3fzijB`;b zzrSs_esz}ex3*2kU2)Xe?S8t42ip$X!(1t^pYGwIw%hF?jr7w!9BjMG9^PEa>!*8o zxa|gecvC5_pYGvpZP(kw>q~k4bPu1?wkQ5UdhaUb`PskfX`7{jxs3-xHN4Xs@rv9; zj6$(9IC!;%<8k$h$)Pi1hd8M%&chF@=guRQ8%ujvlM9WNN0PZUIp32rjm9d9Ky7)M z5+r#zjtXt1!t>gW=K5cnOfjj5^woxR7+7wZiCD4eLVgp4*)%tJ0&UA+e-?Bxg`5A_ zF#t1=p!YZHVyigL7uq1}^E$7NyW>KkEov*}pC9dud*Y%B1(o7ZYNynFT`1A)`PbhV zN_k4R?2QkDe6Cb@VcUbb{u^Or$C{`ahWjEJ60L0KF~|Yxg`L;L&j*8?FnCdP&A>s& z-P$J)LQjth(jAaMTHX3xVe+JLgR`^+8ods3bGDjcxqSp|9ZP$Q=rbrU68-!Nq*ITBS9a3;Kc~0Vq}rvZz_{D~cj$GHH`CvLnsey-t6#e1WJ{cJO6`>T!R zL*}VjCtXcJ+&EoD`nxx!QIQ36Z+E}%p-6xCUb8op=nBdkd9J*G#d0kIsh`70_gzAXg8L<%~l6MMhEtX59aNod7wN0_g2J>7r3 zH6_~xK04i4Gab5FJ6mUOTdALy^HY(uCghqvG#J0csKt(XI=ZfZ!gdUo{~6G;{lm)@_j+wvJ1_{L39KOPoXk7bOm6nw!KFEKis0?7kp){6)#0%Z1@TS85M%$;wQ`{5CSJ!h3+t@7$f0>3IH{Jt=TrpDSDg@bY7ufoposwE)+)daeby zrs&+xD^uW_q;t&lHjg>e^l;&D`dSn9a0@t1)x+BcviE>{_{IZS8E_A?k{sjE_hLHd zlIV-+bV;Hw`sEl!lIV;5P6Sc{$YMI@R9uP<`UqA#V4BZrf>&Mb%a4lp{OK-S``JlCM7mGX~Xf+cwgMIKG3nKCo?Xv#H|0rqIhl9Zv# zqbW<~v}7+_f+cwg@Cz-##gZ3VfD6+LDN9lx;1^Qi!!~*`(yC*fMcPI$rZcB)^kTP@ zmb?SZ#jBlGKnWlh(F%UpW-ekMeMNM0|5`RNZg-MWEPvp+h1qjk%ukq}t)@ce-#jaO z$$6mUPjZ`*>=r-!jiuS3LPfTHzamAx3o5)K6~EfPUqJxW4O83qD+X?26-nw8f3WRG zb2$_45^F!$d2KunJLZ|v!Vl%5qy0n9wvj1<1;ySeE!73P?wSSfP#iCGv|B zTxa#cWU}skj_}Mj$P2)NK}?J7Dk2X3TiE}Ha@XGxig^{E`~3Jpb{&-1!mD%9)%~ZE zVM{z~nq_51G(Jr{o!Iy(PD>fHoBnQEL(1ZV?MRH<=bg_-C z@zuF2cE@)>twO2&$8v2?=W_iwuiGdyU4*HurOPRX#Cc4mQsKvPoqLdC1^(Jf?XSy4 zKbOl}2b6jJx?D$hJc*2SmR;eW<_f#|hm+e9G?H|-Wx&=~R?uD0kH%MU8|w9i`ofAc zZqdD%9K6i;dEYr#ZrXy}ny2ygpYT{O4P+ZuFoxdq9iV z=XR9h3%&*AH_)PgJ{d#oJh(9bAkv6tr zcX`K_E}bl#Mb?ByEeg-#!XxN_ErUTIsOJo|$WQ^bFc_HUr=VeqyJRa5vf<}x@lzUp z7!17m=I`5J?l|(y2P5{N=WH-i#@;MChrz)7kLL=P@EwPjN|#ok%kovFLp-RHkWa9!6ot1;mjduref%D{ZrV#;L>iE&I7n-XJSc+_sO5T%~!dxt73Cvr^%HZRirAL+;7nZyPZ{bZutp=G*XBT96M{dW?_>iYUTq?XXSGW={ z#%O(Me)ZhebVv!0Eus3^+{FNVXKv>e@r@o}cd76*BI{0;+y(9H5JEA5Dpe z;nfY8pAlYx&3a0OcegT44=s3*O=u4--kl@-7+P#A6@Io=WNf1a4>G3NMvI@#^<}Kw zTPnOKSGX!2TrVwnup@gwi}&PqNrQCKaC@onFI!t|rv(qP7Tam@FV$kNZ_!sOyq9qd zTqOb1Tql>D<1R=z5*t)A8z5PLJYawU)Q(c&ea{Hg4nR>e1GNKC?{l=0(9f0HKah)l zF%|j}t{=#C^x)UhFj09*gyTkIZQuol9Z616y45F5sCFnIkXS|7FfTO z!}{|@g!3=u(7+rdUjA|}+T&l+rHTO(${a;pL9VIB@yod#SA`;wei@_HLjuDOWjJA5 zs2Ct&T2M?OX?!2b?Fcww2T}wl5}GtTioYs-4-J!%#+N zHKmE+%10Aop!#jT02^s|6C+f~M?n=)*t?eO9o&0RiXn~9t}QNJJfy65)jOT1!A%3sEp{f73G3xOsLbs((2)4rK=@o6H%!xr(r#mRg4BUJ^Rkj*LOx)gV;V<8Z5FB(4S0+mAXn>h~lIovUjDPDVB{G{U; z*z1$I=%)UkaU27a5V4*OjH0l2ZGj90%}DCaTLo#*B-y~leG*G5@ zZU8i#)^}(n=1I1i$%|f?+WE!5`y|cSYdg3cTap%|y;N0TWm5!=2)}VnERnF1 zaJohm0r!FQgeXj7)~J@TNiBn~)vhtwE68CzDHQ6|KM9z@R%x`bszjr@(o|e3hViq3 z;)UT4evldEYSp9_A`X!;i=bE>CVZF@4ljZR9|+e8;Xakkz0~ASWpV;X0>$eT-H>ob zL;f^N%`*lbc@+aBT=EpV!|o?d^V7NAfs3%}DZ)j++-0CpGyfuoQ{rd4jD)PNOsLnq z-kc0L`f%uVFnK`IF@0kr*UW?(lIhZhNvu~Ujn*|8JxXX?nL})wrR^Ho7C}CW0dxv_ zdgN9RwbDHrUAte7~0*AlU8c zgoP9>lMEM#y%QS=lVCdu+kqsuq@K^4pej0vF$dP((%d03wk)1+tGy-vV$jIKa&Iyz z$^pmG!VNSEZ3o>VjA`TKGRkIyShRe1%HEnk=d~g2PAeQ0#KM$aiMp|Y4u&bnqIr<+ z#bjOdaaRbPW-Vx&HKWzC3-)SF)|u7?Z3%8U0U`B4X+tGtM4H_}cvqMhYxtQ6^U34| zesmlv#+Xn#!;w?~IOxiZFr`;w*sYPp+Wf&*^e#dH_+$?3{WQYkAqr?p;zR3Chd-3c zPJ(KZT0!E)$j-mWG+UX%f06485-&#L{Y4H(<^Zm_VyXRWxwgN;HMeV>Yc6bR^2ABc z`IU!xUvqd?Sc$N&iEV}9+Dq-<%C)^A%JpB}ItVv^wy{GgjYJy6w{oOnXe8|#$+yEu zDA7Mf*FQx@^6fAZiZuOp7zsuC|8^KjyGHVz96=RnCX|3gF+kEtzLV>|F}$_PH|-Ba zAblrCV8!8tHTK;c!DRj<^R~INoCx1S=o$(yBmfk!%?PF@Db?AaZvq7R}CM3xM^e zG*YDBswHvysZ*HQmJk`x&SCGOI@Tp^wVl$GW-f+@V~fqiz_Lmuut+spX^`=WAhZ~` z*QTjc*8t?HBFW;8Bc!o5&ZC3-Wdy;X=Z{1LXZGM;peP1;b+joyBS(n6Wpa4kV_q10 z0L#2OB8ZaK0N|@5LNw_Ers&lXF`Cc=6@E0bAWdsfGE^5M@jB~4hW(GW_Mm0)qY-hM zvdzj9+FL!h=WS$zhvP+C%QXGT2y5%5SuiQXFfqGyi-^)n z-D$L228|fz=BSawR7Ne`1xyj66un{CZYg3v8F@p6BAEVUgsi$9S=CW$e^b=<1hVSJ zR$1i~?Oh_11{II#GubkSGR7l3H(# zx}A;BTOhqP+T%nVMIgO3BDd})sMA?$e_Pb{FQCqq>!>3^K-Rq{h>U@^MV-p4DHJ+I zkhcedP@;2+u5*eE1*-s=>`os`V*g~X^tjaWo9rVxy=EvrN;$j7IzC8v<%t}S+|3! z69(OgE==np;}k92?3f1h(tnPnNj^*gw5^f!$iG5 ziU#~kO%%ldiG)OoeqDDN=e|GU!UqRQocsPLQCfn?L+R(E=w|;?vW#MYBw6nqpKY+(bU@BcvD<9NHqOm6y4Q-SHb~AHZ1g0Hat$5SP&OwA2pF&+i?9xcVr{VKtV$4gAph=1X+m`Z+>nl;#~P)blaUCK_ET36@s>3 zE84vQLH~Rsf^x3`Cm<;I+LIZAQahi_5R`lE$qd0F2tJu1SOmc*h2RH!Sv7H~{Xa)- z|1qCCGM2pXfBlcRu?yVCNB-lsq%7kVTT1FgYd*TAliF5!u7~SaCz2t90{gp6AGp)fw)DT+d{43WE6`6?pP$%1Zzc^4&IxU;Hbrb zR^Z#gkjjPCxv;#Z+;={I8o{#)t_ zrp&mD7(Wchdl6isw)x)3Jv2LOy_H52dC5tgFk-J$Hc&Z{LKvAf+_^GVG7~1PDrt{tw=k)MKp~)YcPoZWGV@WyU60<7}yf<~*Bx0{)sBbD-pMX~TFiNU1mOSZ*-2)~UG~ znt1kLwKMhlQk6<1}-bP$KCn5 z=Fyl#bjnwDwrSv7^ALtq8!1((E+YpA7tl&A=XO@_(_Ya@xW>vnT={Z)L^xr^ShoHc zhy36N+~2q?+`2nxgmh*)swv*rW^(6k{0YkHIeZd$3!bk(afQWDTx8-R`LGb3OHbE~ zi6EpNd0X7g4%3kMnKjUcMJii825|KyM-7!M@4QL0#K>>Mcd6)>51UGA^(0rcyT%r$ zJHmYx;f~wm5SGZIb0Bew^3eQZ<4kSwP&$M7hUPJrRxY+2&#}RCV8EG4*=&bd#lfX; zzPX2WR#HQ^gLDsb^2JnQcZfxB#?R;>ewstW$@1bFXM6VBP}ViMP(QNjk^Qk{&OI(A=NcD`>`HA{V5;HQMV&=F-6YHfj+z!@9}5rS zeh{E^_>wvoUK1rQ{sdcNAmYQEGr=H2a3`8#oI2>`URql={Ez`0s?=#E2|KA=b}-BJk@ARhjL1dwpQFy5 zE;l`v*7(O!bYuThITF&?OElPfnR0}TUCu0N#e%XMlGsqP{Yj@WsSV=ZE9fH!h^NIG zF#^`-G235s*`>VMtR4BwQoyv-@^RW<@Dp&ZQxc8?XPIe_aB}44Z0&$MiUy#beR>EGAQN*Yp0>uyz>O;XmeQEK3 zno(a`{GUc8XHiiZ>i=m(_V~N=Mt#)yuSC(^{cp;9>Zjonb0>T5HFd}{E8C6nX5d{0 z+pM#Pq9G-NB9A*z=Jq71eAzh}FDQ`OY(_4|5Z0m7oYDw$>(Mluur*{oQz0@CZ)EdS zVC?{k&H-(}*2Dw{pykLY`RRnFW90xbU0<@Un58hovaY@o z^!?bZtFJ_T&M!cb8U9Lii}NW^MBo2PwEw7IeG&TpSEA=UKi+TCPu-`ZsLVl(Dajr$ z;}m|8-ibb}lZS5I$}ppDS_FxwGbCh~JstHO3Y87;ZAU^8B%Y4$7_l^~ILa#w`E>N0 z;rI(q1_1h-QS_qz|1-#dt+?=r)5IC>U+AY4haAPbUJ#Fp zk(6fqohbUh@tMCMIgu`tAi#Y%OzfpAAzM$!24kzq^MI!%DSs#G*cp%DPzJd6)xU`f zy#volfx`^a92lfcycBUM^2`nfkzK$L6l9mlW>6R)V7kodYYz~Ae-rg=i%1$_lUz7KawphnNDSDS3MW*jH`Ri|pKLw+}3C!2ya6L&x%rGv?IMDq(Yqk0MJxMZPi+h3Gh0w&M8p=zN-WnoNNywh{ zhz^bKWF@Ii3={Ij>mc_7Q-M~E8l3fnZ2@U6QJoaEx=Tk=uZag^vpG4!KE zr?+JND7NvIe4le8b5{fXyd}TyfR}om2I|%qgd+XCCI8&JeLti@y)~1NN`s;p`r&93 z#dJIh6TCGWkJ6ytnvJK6e%_jmr;C2xDhcVvLypuF`6wWvy#%?;!PtCx)s{eY2XvnrC$`qKBe;i=^Z$ zr$g&uXkZYA*2B==oej-SPUL+=0&E%DyYt*x;Pmz$hW76KEiNL#TZZ=T{OxzfKjm13 zLly7KNBjF<)#9<>mT2eHN=Ua7N{mXkNPA~0!=Y@xIJ1OguD3L4wUg2TZE$ah6cWmS zLXq{dDjn`Nh6_?*xZ4=+`?5_=$3EVd*S9~+`LK=QzE3N#Vspijj}PRz;^J<1p$40^ zov+zUiX7QmKW?W~xFn=vIk>#E zU;=Dy5d=1yOLvp@5(u~*dP0-52tA+7=&6%7Pv%oM(RS$hWd4R5Eu%e-IA!xIn%3n^ zwdD5iXUd(bx@namxi)d-h~G6Cs4PUC0(4x~2ksz{7Qy{jGTe2v=T|ab;L|J=;e~(a z))?n{K9VOl_JcVyCtBz&`uR%+HBA->HkL)OY>712nK%?JnaAPiGGAMv2K1e}9Qesp z-yn<4T13u>8Ss~Jn}yhF9TMr_cQAMc5C*@4!G9zhybdFMB=55};%Enh|45!Rxlg*m zmpGY_j|Tfc`oA!E>?^zFJqSpYyiGH8St^*j)V9G^LRhA^Ge|E_h!TWgFo6&y2=VcZ z5G4rl@q8&*OC<>L@%*ia;@@zD*jXz4mL$S2UycxAzTg4fTECfdpD`1dR_+cEWp+#J z=(VnG@SEAdy7s7~oUjuDFo-~aoewQ|WWb1K!+;#|dxo=lZ)BwN>irz_<%}$uO~-SU7S?o>YqYE-PJ_QDNA{yMv){{;(DCv#*y}fFnC6|0_k%m_6 z!OhcMYX75r+fNm^&KxjOoBk-@d1d@xicDg+95GMj@zi~yC_!w7fMl549-OD#G@#pq z2Sc@554mc-QnNO7Y_;q*p10qGRuj9;(pfl z3ZCb+)tV-2DdTf+m@%|>ZoAAI%xJ&IGT55kmuEVnZd!H0u2B7Wqmle$c?byWI!~p4 z!zWMHec&N^p;7y8S2rVJ@*GXT`PyUl=AXpt(nYiMaX41wu(-F#5YRI>Wb^J8g32OP za)edQ`q4Ot&;+t4(tgbk&Ib~dK* zOSMmq`Dm4GZ;&5r#L>y^Z7z9a^@4?San9P(4@S5G#Z0@7*vOt2-xzbY-kIkG1e}e4 zfVG`~pvaYj7aOt zPC`15GLxc#rYRTZaDi}QUCzFlW_Vs#Bc%7Fc}2*tTNVcQpWjVPMLrB1l72Dawcv6{ z$1bL*5EILgv&oN5Jl8XC$HGlnWp$I9znm=$jepkD(By@jGfqTK; z&@R1+LAqTukkc)+^>Oe_f#&lqO2Oa;YFqh!3zqi|_qcJ0R_<|@boRtay+lu5a9{o6 z6e8mw_oH&nn0~%U&$)nHcdT&?5>Ym##VQ>^?z525lPoqG90$`eUUTaL&PQNQ)s~N| zb=lDJ0<~4BhELF}7o%AXT!EctolO+-spY8;DO~Jd9x-F^xHHAKxicHbg z6PJ44B%KM>>&FVwHm-zFz*%7%eZPzoRyeW{{<@!yZ9zKOj}^Lm+zK_FthI=+v*Pt0 z;_C#lWDu8Xe8#EPoDt*z3}NuP44A}hYY|{p)KyE}5-YDSeE+Um0-%E!XvM)II`GOh zZ$aku1+QFFgjTOF_^X!WrF%mG-TGO$YN=c5@*4_XU8V#c6rH+E5u&`IfVwQLw};~e zg|@%+TCY56Zz^=|ir+0~PLH(SCkoiGZ;EW0*axEmuZjO$NK3qr*Us&le)%E;Ss`Lv zG804UsZP}6oKbMd7L*jJY~c_GSW;?cnu&8kSIMxA2LmWX1@Is`n+J6Yy)ta{VG+r- z=2Az~<%=-&vVm_Agf9vR8#!L{d?{u5sxD6xpqwDBN<8 zd;!R>;POuwqJL4yyV@2W{^TvY?goJvzF}MLUc#}j~8uuyN|Yoi3wgI zue0BaM4z+-V@DyDwwo<;j9Qcs8iT*I5nGa2#m=yUQrUMnZ1B>YxXs~=t*q5L_ZE{S z1nu#)164BsMfZ{Z`P}D_%Ip7yzE-wMdeQX1IOfeQes1?QlfVyzZ>70UrAhpe?=)p?ovC=hV8xt|%BQ}HhFU}2C$n~Aq7f-xo)MgEHrbVRchYCL659h+z zA1ZLit`nom{rXUWW0JmzRQ*ulT9<`IWu)qd3j1#NQk6@hex(royHu)bk^D-bW1C$^ z)#EOS`q%92{dTjJZBT(o8b>N!-3?LHbjdE(k`?u@3naVjH(AMzQ22xe>S}X@ln&M-f_Zq+@X45OTheQ^?@^#^0h0`OtZAS7|2N8TmrOQtZ z3bD9LUuLkJTi2TjSt+)ByH?gq=&(HhM3YL5VP`=lgX~ zy?mWrb-CmjJVRDeN%oxXKXMKT@{j|^^Tvi>vr{t~?FNfRc@Ss0gkS9LEgPV^{l_#S zC0M}e^8D!QmPTrcfc1BLNH20S#TKf%HEBwZA+L_JE`IN@x%1N245jAUJeSW}BDP{; zz)BrJ7`J@AL#opP(Q7wJk{&`~9X=KvH$uod%z7Vb!dgPIM-Q6su z1G?vQ!Um4;S>*nvZ-|i!6BMeHoF3_rdC3JDmfL3-^D#JbGm!fAm9q+GLI`>Af`RCg zIxs^OhxI9^c8Q7kEg`2R)d-U1BPPl5HSj0oqjV8yqPr6g>EiSB;&Sm`=#24-)f_&E zTr-~1S-4wV348bixX_uP*)FaOdKYF(4;;7-jh$!LR@_M}+jGcIQ2yh3fMWWEr8Sln zl=HV)pfiLs?2av=VyoLTC9twty?8DlI-h~DAuiRxkPlEybdEcU%6e=;qB!0OHoOk=NCvrwI$%L?QolUtCxJK zt`GI<<6j|--~8FS;>_54JxsG4oFfv8o}tIvRV|__+mz&kH8;W>lyexjw=i)G zxWYFLjqgMl2ZWa_!67V_b?e@z2)G6BScfjQq&LLTQ9gu@rrhp5fzRxweinw(#5~2N zC2hzjYnhuME!U%Ap#68X4!A7*10D@sk`wMl@dP%{-hm}$u^d3P(Ra^ToLf{|FPnlq z13z=xu*8B`XEx>KM&vt+!=i3amRo@xCoQ6=ekM~?l}7!Ug1=0U-O08mF4OaE+0Q>y zxcWdSvY&sZz=arJlzX&Cn!@J_xJUnM<{owX*t6yw4Q^@Yzh}P)(dElz`m`qcm{NK< z*Qkv{T7n~l{0$5>EkeZ4Wki&};d2F_>H}FI;^zw9F-j34ey-phqZA?H=L$GR?=czz z>8S#r__*ENm+mWU&+yYC4mIW10wJKLv$;=Ys42hpsce3gU;9+yD(6M&iIiXaRH5HR z6)1w*Q-zxk*ag`=I{f~n@1xfSfS?M*fGYRTQ}kXN-h%3vg4c#3sD3GUZIpoY=g*SY zrbk|zKM!6TO5j1!d2J}7!2P+rHhb`k_26lHT7I$ZtZ03*V=j5vlrZzK^~l5aba0AM z0#a)cNLG~pXIrWLYlXINdH;`6jJ_uSkNjoZ0-% zC{Gc}ecU+JSFg;=qOJSdVMqa%X&-ge_d$%iK2@VD;Mgo3mk)c z2zvF3UVmHQ&d;ig5w{pf>;407U&;tad z3r9eTP~~sKDx(NGe_QBt7iUm}fPY)ya?GbBCwpZRysC(t{B$NK&18Gl#H2>yjy$nb zJd0A&La8Z@6o_M-fktwPysGFO*bu>};DsbbX!NS07m^f_M6W7(AxRNQ^r|94QqJgJ z7Tv3he}^-=SL^Q8Meo_BL|3gv_{fS{T)i^LUR(VBIimr{xPchPExGyHq8Dqt1)0|t zy)&93<9=(BEb zMNoY_@Zfev^~AHpgWJV}PXr#M1RfL}4^o5&p9nnIS89J-vF%@Z9#n$k+lr(C{Z}0u z?Gq2aw@89{OC&xVr+8Kw(0B4|bD!@9h4TojY>fNY$P_wm(P{5vN^Y0sZ1*Sd^#%Dl z{B;7(8SE5CoHk}i%Rj&g7)6NBuCTQ!V2XjmWf!&O#$sb$Uonx9bFRT5Yj1U7Z-uVd zEV+Q7S?3C76m48R0>q6McC01bedrt)>@??b*|MRYvEv}B#*u&b!DkuJF3>ZfTu8ED z1jv5?+pjXF7S?5M9qUqrAKx2R8ATZ3y) z>(eTG|Mzi~0T5Jy7*N$Jdw*DEytV%PDx(Oh?+>f2k5T=?v$V?kw90-VtTIaALD8); zimb9<2&-&Isr?s=ZNKMNnKEgAvDkS<{AsPS9a?1{)hZ)zs3HdyZWr(0c~%#h>*Lv8 zW8P)-|Gy>1yu!Cktm9jXFylwV5~Bzgd^9XEitz16!xE#&68mUaViZ|oAJr23s+5Br z3YGp^G5W1yzEw1PrqSW}L+;S`XjzOmVIWKFyrI2Rq+gsFb?2 zH^gZ8F!Pj`SBw~P{aUf(DvQff)T;UQVquR3ZmmoO9=NZ7*aKGJuNS$4?C}P>@sJTT^g3+;zGt&%T;rqEV2i{`HkY0SH*XEIFx_0SlBtRH_66g0^8hxmQQIH z6u3FyzA=TT1xXmt(tRz(9vCS5D31gIp_<<=M*qI(j%HJafp`!YNP_RTi~gcO-U98n zi+fx!ogxhM+eLpCpCSzO?~Cp3co0Px=-(H2++;F(hqUoed>>^r06`Uq0aa<^p9nIV zx1joopp8=m)lUQ&tz!ni`z*=m9g@+%8)P&k@Sx~qG)2hpyOPmAp2s&(Dz$&Q*!JaO zuK&Ay^gDI&rGZrK*xVrQg1qK1b-C@SGe7-SnLh!hzzJ?O%VkM zr$!z6DGDIfl6L1#cQyUDyPZGV;g^9udenp1x8I2i2IH=`XBe&3h8xQlryBeA?|0rp z^SU}8^)zQth&vE!X7uAOw?(lC4d^@zUJ2gpn>Arg|4EVC9+fwv8dUi2 z#lnqg1qm}AWH9Ba`0rYb5|7rxns78MeE`h=UgQoT1#wHl^0URrT`x%qcodz)p@_u! zY_WT%xsE9U>$AmO&ZSNfNS`edF}E-Al!V;pi_u|-t?gefJg!Z$lm>q#qQD{xm)5<4Wm!A&pj`|gShurLR?Rn8p{(7juZZ&|!PgOT6} zvCS|)Cig)v3{8ake9 zWO%Ara)*zp45X)uTm#`CQTjsBUs_v=6wsg;ASqAs3q_yy#akeKp}6BlPkjY6zEI@S zS_g@ye^&IDc9$%RkYa$O+e!W`!%1HKKP&q4fm8<4pJgmS-7gj+cNnu2$q7#}K#~*w ziy2OG!hf;2Bk+ry@Lv>zI-FpaFBSa_v?b>w&mif(g)e0|nUB0!ayQUY8Ax9$as%yu z%du_j#HCzp`+9q>|6$Gz^OcyUzeck48_+J^C6-CQW#hk0Z70?2grYmH&%H{=*#$C5 zYDOFvMm4iYFv|2p-_MIXv0GcdlMSn%Z8#~clU`qwKj4RVz+5}E0^i=w6}R8$V}l%R z_}dCo)|YHuULbTS244@6H!XtBg%5SKfyi)kxv?~7ZeZP?mfj^{Cs!=&Zb3q1a4SKr zh|WrKjm3;vL1YB!D-4*?m65U{xH{FK>jWR;ytXFB0PPkIQ#Y5;Dj~>vkH6R08fO zayOldH9{1R^JX?~%YApd?be`Pie0b2m%1PNNB?{2OJ(ra>{XJSnYAC#A~~BR^XC@$ zgo+=Dbs8>E!Jpvf)1~n9nW`lk>ar9MDEyP{d+O#N)($T0=bwZOwc8FRNBD2@6017J zkS>skzIkBma@O$7hK2_@a&vWb-)y=~c{-$*Fg4T8T5) z7ti{SjaeAkZcov$TA1G06@^I|3hX0F^_%j?N-P6W%ARdOQewCh>TKdOHd%Trm|Y3z z%==Un(t^n9Lk$5`ny%@N`Z)au;KwlyQzo~ZHl=(Dj;S-~rVQ_qWFLtE&4n}MaVk-R z%Uh&srl}s8=98`2rc?U_$t@bRU%RUG?L`wK2M}AT3_Hv>6f%z(J%e4 ziTX&Qk9F*>woQj`H#aH~OniI3(KNqZ;|y7cYt2AhQz6r_%?)_E*5-8)4*A&NxS~TH zcBgEBym1;N0gXa78hzi6#2!oAg%8~BVS)KAM!lX@aah{&W?bS#e|sT3vUbLEm9iq7 z;_B-N{z9HfK8Mx$29JnB>+p!vaHjfpZ-;bJP;9WT(w-CpUaB--iw-R|BB9e2bP#G=%xTH$NS#)0O_665TH zQA!?mM#;V&xZRyY@X9ed2udGHh*SC~qR73y-Ru1n(OBNzzRRV?QbbjIdpoz;>x98h zS&{E(kG|HPH$9aywcpW>A@r&atQN}Ba6a5#IN1O7_M{RtdPxa=YF%>LtL~T7(cGjn z!{T&(8!U`b+lCpWUSNy60~@#qfHEXgp=Q$LqGfba&7`E1M4d8(LVQeeOAI_im_(9j z6S1kEiRG}2Q<-nxW`Q?l!*=mIY!|=lAhC~^=+KCl%~nC0&!AU$W;Boc?qJV6d|12x z9hyJ=4H_TryNfsej|wOTccm4y^^!}M{ZWBCS;JNra^AL3gx7VBd~f=d*QPKT4qeaI%+=kHlT90%c!KiNrlr_Kd=W%rP{#6v#9wm^icu_V1pcE2q`mN=`> zxeDw7_SlEp{g;F60SouT?K_>b*B-FYKirP9H-&-!^TX|XZjJlmP6t$94*zibwFlxq ziu_^+$j7wUe;t3HJ9?-|VQtt+er2%AUSkn*FL==(mc}CW!+6|`h&lmIN|6*tN0RKH%+qXL8(Ren7Orh^`GriyR=v(t7<5n(g~U^tqvvHI zBACr*t1Z?KT1(k|?9j>6jO)GCZVf~#W#5J`T7(U{b!%rE7(#cq&xEl!j9a$omo#Ig z%hqSt>_C{RVX4}2A8yhzo|3JK`>7RMOHS4{oYqHc!Zfz8&BJ*xUCKnhQ|(JcixrRN zv##z-95X3G#t+JO*XCE_;L`Fr+qu&U#%_*WN`!?cVSXJ`kYdRQNMv?rgZ7*3@TuTo zHQ^N#XQi-NLYXHPf3jVCv~e(Up!xm-oS-TM!5_|LF}88LeT_{pG?4|vBy0agsj&+{ zmELYMj|RT-h)j`vc9+HB!(96_5SNNP^6zFg;&`_!Z-w8<

U(2(+1y~*fmeca<-g!3&-fLdoVMpoN@we)g zDxudQSP0SSmPjjRR_E_P4PUp1;VWGdQmC2IX0E|0jC#Uzryh6!BYt6|d?MM4mO4+U zASf+jc)yUeY48?4lp!)g#5Ob@34+LXwm z&>UOg(&%MND>>q-r$hB~VxwTNce+_QkQ_secqCw*iZb=YCqBU54;v?z35}G!%|4+@ z$PM?(#$ZDFmC(EtWMF#Wh!@QG&>%(B@vD0YqFm?chwU$yM5H^h6_5_c%%m5{0nR>= zZAWr}Ki%#l8CHYs=;`)dH>Eq6+~7~Q;|5nGLp3P>WqaX3T0yRG9%L}(4FAhkm~w~z zW&5t1vnB>(-~JRPAimmOIFeRSsyh!dm~xAMwH2lu<6mvxbtr2B%&)fJcDt@ki3+>; z$Y*=oPjuvz5#Y4aGcHERM!4vX$edePhTm2%=)A)EV?`#bWb|)vPfL)Z@v~)}2)BKw z`)lxf%8+R`8+=+ejd|ITG0Ew)7i6gI&IT_FmV0-S(=c&Z31cpb3@7MAqZZe(CRYJM zUFG$y_Rg#0$FS^n$!_~jJ9qCrtg|XZ>Aj_}gl$16TQc>{>mijCPlEQ06g}!;4%0P~ z)@ypEyNV>flGb1g9-yI<0QAhv(REtH9{o;x_Z8tSXnv>tO7|HtDl=!_Y3DOyvPXBx z9{rp4=*K&pJxZC{f72fCivL*--lME>eNQUt54M`4(m_%)(rpbJ=4~COPDGheW~VR^ zUYtR-Z5cp88(ptG4UT$m@Su6koPK8rFH)b9oT&K@v`?*;wN9x&+d1^vz* zpx=Ej=y&b`>fHCF-;Mim1LKD}3ODuNvvJ(wM|Z_EvQ6jnoYQdY@iR%%OVx$+QXpn{ zOE3MQ4lf9-hV{AY`m70f|4;`Xro7;h0?Ai*6t3;(R9NO4&}5h#V|c+xqbO(@b}?%U@{(fooFOkM_UaCA5D9sO@?PEHKc#99K>pPo zL_GgEuo4em*HO5p|C6Mdtw$f*;~67wxZ7P4EceuB+lsuAz_Jd^sPQDpBAt>8%P2)6 zgB<;sp-NbM)ILY8Z!zrS`&{)Wb*4;DI}Tv zx(@#-SW!qp%GY&pJ;49?K!kXjCHbJXT63QoIceG%3ejaj|?qrM~FEvAC04b~ zBpCV~u9od(n~)eOkCs(4XBbV4*qaC~rE`k!s*#+ssuz>x;qkGNNjj%+sxme`Ri1RX zo*bX9PJ-*#uZc^;x;B{)P`b#Obw^gk(8&Ge>QrU2GInC7G6IN>I6rhkm|sbgYI&qG zH8VU^9hn)MK6R{IWej`b-L223szbx~g}3<ZD#r8+s~%D6=1_7ml?5wPplJARj|v}ZUynvTk`p|P3a@sV=hHo;O_A;t0gD&@Xj zbypc4uB7AJZY_pKp6^=p31DS-Nyx>qAVHZ;kM+tZWfA+f?f4Wv9aTs=KuSlm}W-M3%kKQYYQ zOjKs{;DEZ(gVt_tRm*!tqX){zJU#~l9UmC4j`SVU;&`Aubl;3ft4FZICn@lI;KjcU* zxjL)nW}M;s#rTO_>8iMty-z3llN%aN4ZV2gcttD7t>WLwkx6s?vGVAN%JeBXMHGUQ zSO%?Y{pa-o;T^}SLl3e-hbNAQ)<2nNTswz{#)d|QW>{SJ%?u3>k53~N|0)mdwuLvt zGZQDr`u?YGp}y8XyTm}hnTHX(eVy@>m5IJ@<(cg5{z*EnZ|8-PfBFxm%g4&YGbbx! zeSfXX*RS)>C&!0jmzm1glBZV6^#PpqvNNJF@3`$!=d`S+BGGx?5UyZeGsi0Ui?T0%oiVb7!8kA z`o7ruV60N@`%;I7>}#Z5eYr!d;-6|Bw_aUe>0lG?KVBId9U24omKpWq_}xyC=mfH2 zXzcW*815IlSjT%pCD+r`>2*x;WS6KIs-$TCQdjBL_?GY(JST=um#ddR_LsXfjiK`7 z@X6`pLsaeiP?yMxI65(NpZ=50!kc1EQe|ueY7Y&KoE#bgHl_P3Bh!e-Z^xyp<6Z9c z2_$XHtG{k}HNi&E^6ER`74mNqz+;tkBl}FZs8y+!jhy0IZIBz5=dbl>!a}8TrE1dg z*Ly^u@H8dRZ}hO+^`ws{Cdc}Ivj@HCn#yE#sI0XoDY1drKf6tYs!R=yo@V_{O4?f= zuk96=-8-ZKty_S9-m3|8FD|jx{-T#H{3=%^BSv7;r+dZj?rn4%doY+f0u(-oti$OS;{2BPh3$NjQ2~y z8iH;}qp&tn11s79YS5;XpVzWxhFAf<0{58UO`o2qv&|kQQFI(Qg z`*fZ?l*w}Sezx}j=U-i;DQK;?X~X*9HQE)ak`D0~uPOD%SMVeiu+{SMauqeF-1np* zhF&esaAH9!#qjiGWrD%>{p22W$j;+z*(O5Xu%~o5J}^CrmN)J+q!i}XJIqh*(VC=z z2^@5Dbea=0rz*qMzBlgCw$G~(1caa2H|;Sx@oa0|=FNMIPLng`Qxl`(m)3hqb((Um$e+Cmk00#51?DkhFz

FU-ZPfZcAdNUXN;*0X0&M#(j8zxYSsO z^=6(SJABC_yFF_!5tp(@&wTKo{DYh1psMc38V3s0;g=Q{&}G-c?-;1=BgvK| zXz?ahTj3r(74LW%H`n3MnORv|tj{QS;qr6;yB_cDzHODJ$7p%!q{Wv(OAqvijW7YP>6ea4xeM9KVq4 zGAMt;fOS=Ou5Uc5wnvY}Jp}SOgXUM_Wnlf4ht=&H*-9(Ny^Ky5@N(zc47a9V4x|5> z4R6A=;#&Em7vl{WI@xd;jJ|2GdXMbga#;lGWiG~QX+0Ti1ka7zHG+kO%VGX4k9l#S zIkB*C^qx4@DRcLclgqXJwr|_gWR-qBk;{`;EzRG3WdHaq8fxhIk)0obJt*; z*MZS|T-+Y7#05@^f?ZG4+h0#({zMBGmbkuzFJ@?qB2Y@Ao%)kIj(ZlQ30n2=A>$v83?bh2McexSDpz*> zIowI71u@co&aVlnG&5w5{#_073RWInAlga-&ia;(vn0ppD2?6co2F3NpnUR9Y(ovy zbR)G3pEYpDnOvTjj69&=tNY-!b)o~;P}!eONI~+ z_`m|!o!@x; z5&EwSKN2Ao^(3E^!x^OPq`?~Hxr9)S{Z0clj=$|vVLEL$JSj0@ySw7J!R+Ze;e3D8 z*MFu3o^`i=ZMTx&R&Yjt%j4Rw%ZN$Wai^^hy3!mGM=cG|t}S|AvP+5SvD+gfUh41N z9)Cu>loJWBjkfpyel8o)7TDM$@w7#FISZ&>EYC}EPtV!LD+;-a_0#24^9fHW^H>&| zM}H_;CcBaRR8|qm%-ZgfV=l^?64;@`q&RLBaE{mM^T6Utu9P8v6zfE}NlQ)Gi)ddu zbP@YnvKS{2;H#@MUfQXLEl5%J_ zf1@>!9khfPGU36BAM_J6=1y0E<)aJr3r(Gfueo_rqE#&nI7OCNE(rB_Wb+_<{q{a% zZbI|}ZMQh)){q8@J7D`lF%om%Xx~XIs=%uoy;P+HfE$sjt|G5)N-JuC+>}-nN8FTF zq;PXuQJvkKRuuhirlQsxVdi~p2OUa6d|%P^#HLru@Am*G@M^yYKnVc*eNTD8K9E)v zN(a)4Lg_$Sk;1K>E_vkyP;w%GlHM`IDUyZQF%P#rKi7Yf9K0+v9S$k^GeD$>5Zh%s z?ILXZgsq)p5nK6tax{X&+@|;fa$9V(ygj<5|8R1OJVQ^xmtfA@lbhq?)+?{>;M?@i zPn@>PK9qIZazi_e>N9=qB3cb;egCA304QKXu1Nllw0}*&9r$fr|70)c@7QxgJZJqw zO3c1a(|9k=b8m{8LBz&XCgRbzo)3DVy9WZ6OWd|j^wrco!1V;Z_&LmN@dGGk8e}{SZ+)Fvd^rOx%dcubM!;0 z19q>=S$CZ@Pc83Hdz&(RIkzKKqo&RNEZ+oR&s^rd?}L_yHB!}Js>g@*Pc50`tG17% z7i*}GbVTHDfLRz3$s76Sr6ba!c^+N--JKp2v&kRfu9|Q-XcY$>#B4(-G{Q%$Hq__Yk z{!Cw(_ViE_XGrs*gULgdGp$>sRq#gDLu{*_k@SQVM$@v{y>tCPCAF5PHlrp~sPk_M9G%b2Fp;RyWrq@vB5k zc<&^{d@PwX$5epo*65&pYSMiNgVWy+LB$P&WqCd%27id1oV29*hY)Z2as=C-)BYA0 zxaIxgEc&JF3(JUcrhz-&?yxSjU`_HT7&$tT_nANG=~MvglhElOf;3g`<;!jpepS)` z6imgOIyaYbm$KtD$6RTl>38P`QvP#C7i1JbP&7F&fJL5S3o=}*Ku-=C?o`O%+jWJJ zkc};W@9t~kQILVkqtW&KLzhFw9B3v^bkn!DgR}|MS628=wx7WEmfpFF0nn~@?qSG} zc3o+`a|uI!^y=&4=hJ&fDL+Q!_}U6nI6W7(@FL z0XpbvtjBdFmK>uicFqpW&I$IHijvp8V=ft&drg}p#|cLnBy%e7M%=u!Vg%c??l(thEXS2Ot8<~1r}YUe3fL^gPXB15qsFP!f*>KxZ!rS zbks#hRo_t;9aX!Wtm~qq>UB58Lv$3E@>6VQ&$Xk$KGlnZoO3$nMBbj*IdcGpb+djo z#w}gOP2j*qjV-3jsXYTS>^r(k`TL`rtV(xFQ@$X^bH@jKtu}vuSHG)6 z)BCU0b`W*oQg|`fj0k#9OH;0~=DBOiC%^J9_7v@*>5F}@+e-Ndeba3%O}U4e=dLNY zMdu&%P5Byq{z2cgx0L@!k&`ODEls&)mglZ17boZckr(5=H2r^?I~OQBt18cPzxzSL zMF_ch7X&T|F*i_oLwNeAyhssJ#8d*l$xT%yDX3Hx^+-qnjmS%UV2g_4Xe%n+8D)v0 zV-yvK(6o)VHa)u9<R(~C}l~F>iDNAvC<{}TP2nMx12uD6K@b(}a-G186ZH|N^ zpBQ{QA&sY?cjo02drtT?&KB^&vvRTL#97v=^u%16=gw?K+jj~n5|pGDv-(lJdpSA- zB$tPj{1FLlp7GJN@m_TLF?s}HzF?B5Wq{Y1BJC))5c9g2mCfXdRXQ4e-??^_6rYYg zwF4aa)(4f2VVWhXm}ZPl9a%p#()g=t=i2oh_;q64sV{rB&@p~xt8S?KWa+j|V3s`;$9*{&#GvhFMa7x%7 z2+hN_{nAhyZ8bvC>8*v7R=;{*jNrdMn;t)DcG?Ia)U-BeVGnF^Zwo9~e{5(-m^IC4 zZ;h^na({R;FD&9=I0w0k*l_ZIu+`ZAmNI4Vu>fo6ZQdBIPyHrgfei&(5`WjdCypNpeALF!)x>l5gy1<26@px_Ib`AB5lM2OYnLTJE(tB`uOW9TMFlKs+sYU*_;`ex1Yc^F}n zRCiPVo^cA+HpO!{0uP#3TzH{W7ig1gxloixt2qA4N_8GsYzg_iOK-LMt7)kc;Rg~D zCFl=;#6^Zg#Q3e*w6-%rqWIj<;IjwcjRrKf8%3#N^&8SKlEJqI<2%nJsc)rV<8B*< z66psy|7|a4oS;kZ1RIyIxyt>o#`^oVmgWXx6%l%L5-@r|5IV!}L6cmWc91b!xD zaK1yu6{i7l)_B}4zgc;#y+fADs@9ugHp;@@*EcY*IHFa0^)8$nxIo8AWd+L;m6V_| z!9lvW{N2V7k{A#}@`b4ZDWFFzp|tQ3EZtW$G@V+`-&DZwc6(6=vQtG9)&AESQLR>z zU`G!II43Un&#H>bUL>Z43zW3E`p;QdZ5Qe6y^F`G2qO2~?jdJQfxY>9HD=AntP@UP zC(YAiDR~ETi0JB(T{WuU8Dz-b?yDD7makm3eAT5ZmMy!e&Vu0XI6%io;CFzB{!FvW zF*5AMAhjAX^Em8irMhm->h49+Yg=K|hCxY410O7wQ)fMejQNibYkyW=&@8{w&NoSo zkUlkGkyQo(>JAR!mgYWwh(QRkEfkpy3dL6v~qrL0oHQ6F;OeG z+e4wRev~6QAtFPjpxb^^pK*+7;fNAyi>e$9v<)m^ zG6*3zfoFL2rw~k9Pev5K%U0j-MPs$AlDzf!z|JS5idawDc8@9 zxKrE%`&*L=X-ej>s~H_=J=rs~gDfx_Q*&`QqFhyL0_#UgZ4_9{X<>V&u++B|rcNt8 z<45ZBa{dk-sV9m?j?F~9rYW04+q0I2TXMO#SGqV)yDMAz_F8vLEHB(wcC#cMV#LfYL#Wed(>P^`r|@T2Ua;%a}5k~w%0dQ6kWZ0zZyTBKHw)9RRH z%KkwT-TD!%)RG{#JN_+X4Da4QO2=!C6IoW==b56x9hQ)!L>9J?6NSgZ1@X4-N|(V2 zM_R*xP?`?khQ8VWYtt_=>g2YscCFdaaQHM_Tv%LJR0tJ}LB^WTX+Mg;@GMUm#qb!n z9jyEh|Lo{TGnSzN*$y~tt>YWshNWRCiGI)>XiRf8XMlvG%JOs|NvqMG#sf(<4Iep> z59*FI&!(31cS$7Xx(1CqCKGOBFJ%qg+(Or!fnsNhnlBdXN<5 z&{R*!-QZ03qH3HG+Cgq}s*Q;ziRlVKC&3|?+p z+=lDr@ZJ7+poe#JM^@)rWuRhWq99ELDg)Jf3NvPv4j5F^DBV=xK5k$_sOkYlfp^ux zdQNC0z!4~7#?722;Y)Qfnf&Ac*)$+~U*UPDl{Oe;)64k>L~)naA>*2Hb7;l@W;Rn+ zBIwoCaAVrJdG7_%n)&Z4S+2fhB{?%x+?VOh^8xKkr_D2?oPS8X>@u_{OQW1X<+EqU z*ZAT@Gnk06;u)I3L?6-_T51!`Ea!h&;3D=MscXh^U?6b?038btCAeoY9B;(YXENMh z7EYZ~y1|Bo|0koUQl)v)>(Sk_d3s3J7-;D;!Y??F_u-AKpLfI7m_W_y>K+I`{ih*x706koo zIlDB^0GdXe0^mrb(Vsj>&6Mv z+3s{)jdi6Le@j$SK|gF+VvIdSw?HVQ9c^PyhK*vijX6IdvAWvkJcE|PLgNLX(TLDY z3p6iek$DOv3&Q0yV-pFR6vi~e8O*>S5ie&j!>5FogU-g!Ea#sQ4gMQCiqeOv8cP+X z;Kc~K{1~qF(bk79c_hMirc$S_W)P6V5v5=t{kZ#Ui1gYZ)Wd%%zR7>qb4%XCMb;Id z5pQ)nZK$b<+Fsenl?snHnS5_IHKNNJ-cq-t7foUFkp5_|bfb^a_(k1UyNAI9|JkQR zJzlBxQ)7up1Q*d8`61S@`?Zc0-ErH`1VNx6eFQOZWdyMF%tJ{e{y{EsoH`uLaxuD%xMxjHM*bABj4i5U{{9$emXK!;^5irNI zIhPfEPG@tz5!G4c{BaTT43;y7zHZW?RU1ExA^26K%d;5banU8Wu^g_m%lQ+9Y-Zav z457#;MSD2o{bMfUY+zuT(*x5!8yKD_Oq)@9$SJo{&i_i{;C^)#u#8Vr^rYZuvt#{* zni)Zr&N^dTqnZ@e)tzf71KX*J!8O$;N6Sv!FKC_89|9W`oA2+T0yQm?)w~uO#eN*;bxY&i|T+ZBE5e(I1war4Xh-xj4E+BEmoL;`J{uP zEOJ*A(<7D9XQsjhnWLid!YWPB?{g%}55BkPd>H8qr|8@!H9XKqo zYMby9+!VIGp#hOQE{?SWU~ue_t0(*c)kiZks;}ah=2QtGWr2X8UPYNKzvO}o6^rO3 zyu5BBR#^j3M&+W)8ondt;z~o^BAP=;Ok6I79q;_eesA)kVAD{G8{KGU|E_U&)O&4t zYo0{8ZL21fHh&Q_F<2A^i=~odThX^`VtVUC{;$p311a+@>NVRv>i{5bQ+BG4xdoiaS#qu{Ln9l=dpo((% zJYfEw$#<)*xR23%u*}w$>FVd=e8305crMNd{2%CC{JDeQUe150-F<7Jq5=d|zu!^W zsuRAX<+FI8jynz~osB(95Csh-BtuwS&p@powHhu{^Y^`4Pue77;pR#XRg%4BUz)lb z7m$Y-M@_CB_<=BzOgr%ZP$WY{8;o>uIsa!Vz8B+ig6$Z!nU0I^ix3loS+t3~mb7(! z&DpeWs?sJeW)=pDlP+eKf0p`q#Idxbod2YdwY7cH)W>~&wHx4i@Y3DTkkoC>7C5v! zkCC%d?IqB~hr=%-sp2)Wh`|wwE|dYTe&FiPe<}aNostEXX%M;1Z6Y~w6@4C~l^mtW z_x)5*+@liSl*1g6<0!xLf64v;W0)4mto3C+cIsd6-(34u@ zU`0hGoYxA@KXzNp%t{yn8b;rf* z9UmZmq=rjVOKUtUeK!pc44D@XA3Sy-u0GY2*PM0gU&@+URg^YMS@TaNZ34qtMmOg| zHaGaAJ1AL**d-EqZD@xvROzZQta?|XM7s9bi;__#v)TK@L9PDW^%z6{2=)!DF`R^p zCA*4DKZ)HoOvva)Qejjk~yiyt zR)CfNT{!EU(h}n?E4fF^vNPK3K%|!k?LaqJ*lDztjK&M`gso(>gNTOGQOEHGT=Qk% z_!-mk8pa!|-DIWzqQzd_x;|>ZY2k+BaNE@I>Q-L8+TGi-K^TeH2($#Jwdi`cObL`^ ze=M#fLLY0mmqY%Dv*3-!nJg`iFB&}&M$t*@PPUeVmk@9P2w;g3ofm+B*Jq%!!_awQ zIln8*s%XF(37zd-TL0?8+kpH)`8E@JU=W_j#htbFQqe`WjMPVk>Te>8IK*MIc{T@Zq z?ZWpDR(-jSi^0nB1k^jyq!Plm8R8qovm8QsloBNe)L z;A!;rvhd(y?+SfgD5_(|SpB3&riB@N?3w4*KwT`+PS6m!g93}_onzxN-uX-Fii_Yn z#VIflwMVB}ggM~+#NBLue0!8Fp&%p>EIk*G@m_p27B#vWx!1UmgpZ5C2P>WuSn0*! z<82ug8E$ucT*75YMi{<=fF|Idwu@GC*jSN*lR5ukZc{MaF;=MxD6fhRr2r7pU%DAf zs+&>du%L&qkUkh*q=8ES5-_4|dI>ieE>K_cDfhPd0s)XT9awv9=5i48J~5j^?uvT>_a_-`{~pMUc_ut}t9tDK5f+is zB4(%#%FEP+wRL&0}KHJWB(6~^vEI6CKMQ3Ogy)R;5-fauK^hFGO zD4Sa;oo55TxSW49L)THzn63yk@I(jn#f-#{qe=H-M*3(*?a|wu;dO9hBdLdnMxtp* zqNxdiYx>7g!fS&^9RRNzvD+`bvb`jis4t~ zwN6sAGkB+a%qB>2N0el1r|2|+BFHw>3TYsUsv8MfMeuVb1*mgvK)poW+P*=hkVH?9 zsTk&Nv%7A*Nti4>5;QTus*%ujq|iXjoq#wZPNMFJ=23q-(Pr2Wn!Jh*Y64qecLe@m zVo)7A>OQWfv!n#$s5uw$cDpAr9ASxbHv$?~A*Z=1p*q>5R!ESdKbT>MOiYYwHP{Eg zQIB2?_8*lVeQ(idZB04Dzq{#mWv+2T4nvq%)2scXkDf7$_`IAL)eBk|*?x2$8N&u290^@xu3<@}3Tb{-KO zTkG_y!+l)(5?$A*zl^FmsL+$eK@WttXq|ZASYkBybSvyFYic*aN#HtuyPF_e4`fUp zeFy7-?2FlgMWv7W{$5eepUg-ve6M^5>-*_khf&e8Npg(11FmlA)xn!!5dt-i7umbJ zRiMsv1jXzT+{egr-!E)diShb|UBO}i*P5U~oL8`zC$qEXhdZwg<@`&sQFZ6#ciYDd z9FK5R1~xDt??h!_0|S03o4>GhixGckIsdY3j6vJCFy!Vy!L{+xeSQ4^&qABlV4%hK%=Q@Fn>CXy6yA#;HoSm~E#x`DA&c7;MSh0<9xvk`BaI+IcBsvEPYiRkM zC~o<(iK9z<)B$hMll>ulY&kB?mOXp$zDsPMC^&%GG+J3hXaP|L?UfCL6cA3(Ub%{B z0wN3v9cZs0NVm3Eu4%7a+g^ETd*x;Am6xZbUdd8fOjLZXWT{`xX3U8Z02|BsGg-Dw z5da+xrh8b-1a^94(8=+E_sP<{5ft(eEZ#9Rf|dziaMn1`w+pt^5V?ie~M&!udpaS638>{T*D3!G^sBDeioek>+*Nb!fPLAaH~fhM2~q4n7I& zs{k9oqM30OV1FH_L+Ne@ds8|8W`@u8fDzUYAA0u>4u9;QKKqgHKlb^5diV`?5;iE9 ze53wpIun)ZhK*eJ|Gq~fjhX=61fYNtmAp*=`psbl#Oq9-)JNhNUm0l#u>h0Sz zWWA;M+;lP31^56M74j~?|8_R_oYEbR=c~*4cRBfO`x>FARPFI;qf+Hj>z#WPEogtb zKsFr@vh@r16tjf9uysU-muX-L$yWm}MMbL$vclZxG`j?H>V~!2D}8Eg0+a#mw?kB2&c4EDj>&O~VM3NW> zn`J|jCQ8h)DM+ThhL{(qFOsU=@}}^tsCjfVC&NaKv70&nMH-{uUTiJr|0>~bw--K9 zwR0U&(m^F^)nm3Y8ZSfvx|PxXRRZ+CI6v7|&VMH7@m~aXXpq+rszR@OVyH%i9VZX3(`SvGY>y!O0?PDvRtNnCpxmmeLe(Occ|E~Qm@7Ij`{`GBf~X-}3wXblhTv!KrRDrT zqQBs!jQ1betTRe0ZMiQi=eHHnz)54S57%lfv<~?) zhUBfNIKPY`Z!4lWzrzp1%jq9f#E*8TQ;h|`3d-kZn8=p8b#B4N@|q~#W3)1FQ{>O{ z_?fr^Au9Xo1J=Dl{20_;StIQO%385(`KpB2mjg9$#W1s%1N9xnGb>@=Us2BQF5*RJ z-|L8INTE2uX>WO5k2WTRect}osJ%bD$KQJ&v6tAd%RB-?yjXQz&3cv$`==ggD z;N4xMPpzk&zLH!qfp-SrSs3xe7p<>kAbuMMdL;wBNdtK_gJ(g{K6Jh7+&3v*OP(Zh>5$p1W)4E zo(JG&p2%p=5I?+vot(LY8Ty>lle2M>%*MBfYbT z9Q>Rg*6ro|fnru^dstTP+HnHmE33ze^&s@dVzj`d9Gq3!0k|u9jfZ3qOwk>pxVnrN zo9RKwjk3m`8=b)ig`1+4(MXB~0nDF5SaaMk=iD3`X>r?G9N^CmynH*0d!Ts6*`=y6 zw;C|WiZH{2do}2ki6SF@po`VY_m2vIh%rgDARLYQ!OWh1B~r@K2nM{VS;%mdYAppR zddOiZ=JS>a6>vDtp$2K!9{P}QwSzUXim1ZuV2vLrqQZnK20h$`6fr$-0a=^XvUj|; zpMj2hH?xLVhyjkRS)44Z$38dd1;39ann3LVRDg-bXAhu$xQOxjXHK)d+^&fHZxhYJ z2gdhm$^T#Dl6rv{nBpnx1>!?GWp_Dy>?2i2$o-{@MC))7xeI%#y?buy=$r!$&T~W} z5n%se_NqdjDT%0E!^tRY;z2RBL}yArJyuQPmSik=I<29sver5H9M-|g3L?i<(Iifr z$QIJPo;wh`Q)Fg5&vt5Ij=IfPCZR(28{+Kxz!R8=JKG1I9@fs@>3G^%&L1gKHs}`t zPt@VSx@aP-cy!H3Xk@|1=iOQ?i`OubLC1if@VVk8NNl0YFZPNp6u`BBaHJQ@Z<^n2 zIZyct!9{2sIuSjUj-5Melu$5Z4y#1lDt_gx%mO7$WmcfMwM^xfCm87P5OPz~0oVze zKtzlc+6kH-DV{a2bjT0DF7AAE03NUd;Hzt8ZE@ANV{lKP_En5Ren%!hD0AWca+gI(OX64|j+g}^CZ5szm?)eVfqkE4XXfsvk&g#MN9 z|3Eo^N|N=*e&9)WvkQ_nM;^srW!vw|l`wT>alKn(2p{6o_Cbd_Wo%)KJr!bq6W_E% zI-_s*LqqL$=gSU_A@>zE5IYH_YNIF9Ba*L4@x4oXb zRWIe?46<&B4uBdK9|iINsCh~P*%OTRl=Ht4W^~!&z_&<<g>!x)(tO0UHGja?{Nt*>M*2A;aFXpFo01_Z$=mY_u>%>p zrye&y1oA*n+>RlT_cd*Ym0-`a;d1`>Lf+4sN&h*J7fIjfOzix(6Mu1A5*w3igE9UD zP&5pRKte>(Fev(aq3EqnUpJQXZ^-I>9Tgbt6d2K7J;DWz%=^v3R;X0@tEQXGK1#~i z!~UUYPnx{e1vUS_0Hb@Sm8PZER4m&cd7xeH1{!Kk^#au*9&v%+c5aOxGiP}{xW+UJ z*$@uB%iekwlfAVbWAO5Q;wFe$&)jub{a#Pqx)Fqdl4#uD2*SQ0<32FHwmN4np140%(=vT<%>#f16cg62`EGbTq&Qz3lqW}pQQF9vw$Ztz?bHja% z3!Z4W#~~v9wesFEM&id&a~fl$?@DuOHzi}7Oe>9a7OVBF@aKvu8)qzj9>*GItnX>8 zpik~C=l@v5&x<~}6(ZkC`hDf-;RlW${`k?u?>Kt+gGUd)$Nqif=;6DM9)8rm@$X_38W3?)>OylbD*@K%z1_=7{;`Pn zw$HG#ublr_JoQ?z5;W_r<0ChS6nw++;~sZYp@3+LW7GI0Uf5eHxEh2UyST~w0D^fU z+u8>ZKNj11%$1s(%K1-|F?rpsaura4j168mhs&P6o1#0Qx@JRXpn(a` zn)#0a(AQSK15wrcP~RuUEOkYC4s%6sO?QN*+O?sZOK*p*9&vpWYXkatv3?V4`$>_D z^#MchesY9#$X*SCCr};|uiVyDq(ErAg3EK74-jovgEnB1?IAB4Afjx9os$E3T|ZwO*w} zbN}K-MOfo;W|Ge1d^zQvd0L%21I!p9s+l+`i%<4-3!o4z>57+J`}OdS2;N3h+~aUt z0V}MF_b0pzAVRUr>O^Yo38OcIQE(Q8!_8pyml6){^}dBm6znzgdLIud6Y=0%7?l@e zaomqXN?TH4{OY!?1n>EBrJ}H6 z|Da?{vJ)vE7J?E(Dla=k2vKie51pp4*sTVv`&{d)Gc(xs2S4?;jg1N-w2ixnm{{9? zEGFOii2ZGY1FCV=csHzIbqG1?$Re8-M43kH)KL$Zjq5N9+`2{7DQ}SKK5I?Kixibx zq|W1S1?%8-RyZBEg7tqdww+aa(%I{4xNgefL7&2BK+K8R(b%#oA_2S$jm>(2K;~ix z(7N`Z&V&q5*94MGCsoF4s43NH8_hZ6rud<{q~WHZDR#mx`;Jw8jGxtEqSzk!Ckl#< zhbIy2Ygi=9h`jAJEb_J--sTaFujO(oC*hQCNF(EvOAdWoyhM(!gF|IIN;CFa#&d%^ zIPSi)-)<}8R#`tp*+y|(8?U4J^jGOaTIag21u`a&!tS*|c1M2d)Y6}4Rz2V#kMGX2 z3)=2Atvrx@n5Lt5$TQ#*$+zIUE9hVJ*^Rf|(v`&^n@3`R$^`G~RkQOni1w*9&IC16 zA+Xfk>l@_wMbTI7+=3+F9Q4;iT2gLR;7MTGDoO#5ISP=mHCq$LCY@!vqZd(>A3dyi zIq6i!&Yo5}iKxkglzA40+S6w+cz3>ZrOXQ5Q zSlzLxvW2TU*}D;a#$A=mFQLV`G^3XIG(US$BH(UnxSJNkM$3*mM9_o$GQLiz3W)9> zV>WX~S+p?gr1s}%zY0Bry$}iq0?dgL^3mc2e0VBT2di1p*FqIlczQ@NRFPZ*)vbVp zveyY^pk!u1+3QHQ<})`i&p66%o#?uqj@PLiWKW;@E?a4u?zc^6xk10H zMJFJ92cG7I21jCG0-MacCf=kO&{aqn4idll4Y08fZAN3%`_j0zXzlFas?XX3LvetTWdxxf;*jfKPthP|@Xy{(0L zZe0C{;ZER%^4;;|yj~zN=NSPcRVL;$T#?o#IytxV{Ovg^-@8oZYvvg1sJAjCDLVn% zSQj}?Q}BDOvL5%45I!NtNjl9jM+bLNwA1Y@$V6}^aJ!ZQsPofjmh$!-9q*m=1!O6A z@ce$3+IFXBYBZMEL@25BWql?5S*6W#IM-qClPMlfGA>d(wN5ONDMF&hBVAKjO+q(` zi6A??#-|f<$|aZd@DFq?3_Mj*b5nOnPzORQqLW;w8b1J=4%zX6t!~?_-3jj86i^v| z9LPI?67}xD((1Y<@Vf?M`WDJdJy}6nBz+?$j2$6%lud0xZAcq;hc=Flp@F(_^E*U? z_vb|5e9KwUojiXa&sMcP-Jtaa@rOZ&gVNH(f0F?0JXQcsNPv~9ek^UTG1J`wrY4kg zCLlG5^NTUWX^?F?hJi+g1|{G2k|Jmolf*)LNH39fHmD>lFH~T9L;ZtFtbkcImv~$x z1E(UGJATYL37)dd@t5MaOU2WUaqe^lMWCC&f`t4#g?x~^Bp{#K-TJ)zLY(3{G!6z2&E9LpCbZF5jgQJ0QjQ#vy2U}xY<+B*L& zcSLN^fR&NaUJ)iW&Y?uUSVe5JU+qkHdobsy_u;>#)hL#lz)(>#sfRPGetACIOLtD9iC8wKEZLcg_wruhI`TTGG{Pqg}EVj@3 z0Iv1fzFM|S{=(zUvuf2UO`sj{3W*KV*civWrM!=(-y+*DNu>J*Z4Stf;!w5f`HVPR zY^UKK9zP^2;Vk(-nuVbWR-BqN6Nr0#kA~y-_0J6VkTiQgTyNy@p*)-ChpQV^k81Th zseMSaz%fM>ojJZyL-I~rI9zWOgoknh?TISUfqfItKbn)8bu$qDKPhXc9cZ$`()!;d zU;$=c0G3+p`b@6YYYf&m^Z1dRThbw66SM0%B4D_S{v&25J2Kxa_yAyffbY$M?~$CS zuQxa{-@@}x%5FHI{ibD2-vJ7zqhlR6NT&|spy>r@I4(|q4`*yVs|lBH5v;%x7529X z)=x@dKSka3PWnu4-I~>7iyHBdiK>3b279N_i)jjjekPwZpF#O>k_~j4e_;B@@+out z1C25t%js~nf$6Bz{`s7Ug3D1+)C5OGFWrG>-G3FXDfu=U8V>U%X(Tmj=vQOoM~Nd% zsCwXO_R~+aaZmCm%>Ma&(scj8?4QqPomm=Vc4~${p5wWrD7t2c6Tq&qJd(e`x|R!~ zrk?2HmIi9oJVAWITNomHj`^*E31Ff6{UV2SY9pjmffN`f8*r-6{dj(|wxu{}Dv&;& z)A5oX$Jr!`b3Y+sLhN;0msPg}Q4*|0tgP}SQW;WzbYtn(;+Yz;uu){YE1Up>3`0Rc z@+0XT&*HRo?O5J-CXv^I?tp4fyGW95p;*-n~BXK{^m=ndRbALMmbZO9>;)(F`&AY)D^ zITa3nGN0s-O#`wg1=&s@qiFb3)U2YF%+}yYIhO1$b#ywcYVAWjpNN=t^Cs0s+!_o$|tGWZE?~J#`{u!`V8yU zNj>qWkv87{_XDp)=!HZK*xo zF}`zYeCEHO_kENp8>RfBQr6!)yHF`qi>sI3bN{@v@A=g1GZ5W!6Sr79_QK)&=bi9Y z{{Kb(zlB%J)T)qFpPezp$*UJI#0-X*+QoPArsCKa?UnMarIX=0?mL`a?>%{`WPq+yp}jJN?54w(_Nw;Xbvv|u*O^*m=s#t}G!cH< z#U=LQ>Z?G=3WdL>2n(+||I*k26Fij1UH-N+q`l+^e+)ZAm!*A4a+KO|QQ0AQ()*RjItL9$%5J0&One(tw`=1YOwjyf~}Iy7W~L$VH5` z@;4C34sE3qpe|FNi&C;WrCj-)lj$OvpK!v`LTPfTK+DGyPAKay8gms+n4vz9MYaC@6=@k3;Zx80S#?y;em!^~^+jqZskQRJXiwn~iKeaEa-RX;8+80}IVGi|=hRIIL zW<+SzsdySP*XKulda54vHP68)$aONa%G6!c4;DlbDb48TeZlPlCOBy4ZnnU;Paos+>HKW52wja-)n@U0Y|QWpXwIK6EZQ@mIe+B^Mojz?Xxg*I lH`_te$B3Ku;wu9*ftD60XaX%Qu5L-cFx)KeJ`h!w{{xi0d#V5c literal 0 HcmV?d00001 diff --git a/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.proto b/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.proto new file mode 100644 index 0000000..811cb87 --- /dev/null +++ b/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Brand/brand.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; +package com.schemata; + +import "schemata/protobuf/schemata.proto"; + +message Brand { + option(org.schemata.schema.message_core).description = "This is the description of the Brand table"; + option(org.schemata.schema.owner) = "Platform"; + option(org.schemata.schema.domain) = "Core"; + option(org.schemata.schema.schema_type) = ENTITY; + option(org.schemata.schema.status) = "Active"; + option(org.schemata.schema.subscribers) = { + name: "Sales" + }; + + int64 id = 1 + [(org.schemata.schema.field_core).description = "Unique identifier for Brand", + (org.schemata.schema.is_pii) = false,(org.schemata.schema.is_classified) = false,(org.schemata.schema.depricated) = false]; + + string name = 2 + [(org.schemata.schema.field_core).description = "Name of the Brand", + (org.schemata.schema.is_pii) = false,(org.schemata.schema.is_classified) = false,(org.schemata.schema.depricated) = false]; + + bool is_active = 3 + [(org.schemata.schema.field_core).description = "define the active status of the Brand. `true` == active; `false` = inactive`", + (org.schemata.schema.is_pii) = false,(org.schemata.schema.is_classified) = false,(org.schemata.schema.depricated) = false]; + +} + +message BrandEvent { + option(org.schemata.schema.message_core).description = "This is the description of the brand activity table"; + option(org.schemata.schema.owner) = "Platform"; + option(org.schemata.schema.domain) = "Core"; + option(org.schemata.schema.schema_type) = EVENT; + option(org.schemata.schema.status) = "Active"; + Brand previous_brand_state = 1 + [(org.schemata.schema.field_core).description = "Previous version of the Brand entity before the mutation", + (org.schemata.schema.is_pii) = false,(org.schemata.schema.is_classified) = false,(org.schemata.schema.depricated) = false]; + + Brand current_brand_state = 2 + [(org.schemata.schema.field_core).description = "Current version of the Brand entity before the mutation", + (org.schemata.schema.is_pii) = false,(org.schemata.schema.is_classified) = false,(org.schemata.schema.depricated) = false]; + + org.schemata.schema.ActivityType activity_type = 3 + [(org.schemata.schema.field_core).description = "Lifecycle event type for the Brand table", + (org.schemata.schema.is_pii) = false,(org.schemata.schema.is_classified) = true,(org.schemata.schema.depricated) = true]; + +} \ No newline at end of file diff --git a/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Product/product.desc b/connect/src/test/resources/schema_1/protobuf/repository/src/main/schema/Product/product.desc new file mode 100644 index 0000000000000000000000000000000000000000..908dcfd53ba0a1251aba86339e91e38e9b1193dc GIT binary patch literal 95310 zcmd?SdwgA2dFQRO&pEnm$+CRxB#z=FJIckE*mB|!0uGSM(y@IKS<)Oy3C;{C$I`K^ zMAi{HQXDhPTUtt?w8NAFfpoY{DFcOeXj;m2U>JIx&p>a}4#PV$r8B&gP%drCr2~{w z-uL@^)>?ZXU0kQ{ka_=j^?~@Tz1Moyv!3;=XFa#IcKoUz$;ZijqcOi&KfJusSZ$nH zJ9~Jp-ke=oSYBO&g+fpemEY;2nVs2ko)qahsT5SqI?nV;?>d$s_vSRZ$Fs-beJ68AM5Yb&$$nc2o%eP&_lY-4wq0l0n> zfJv)3OuY)#s=cmZD3`8`+nX1cR%?&#?lBJWrGbCb9&g`(`IanpcU(NHQNm#2XW1HMQ`S>9}{LuJrP&fo%&=+_-Lx(+4vqxg=h?sy(6B7}Bi& z<#F$N@|1SP#pY^lWz_^-vFZw?zBrFCGbvZB>SyT(vH|C9z_)BdcRl2diw5DqGlQVzShaSgYvL}ocf8RKkgjT{9*8eW{c)!)lev1>+R}0f#+s|2 zt*tGt&OBOMT&p9V)wst$ykE~s*T>y%RV}awK4#LhSal1nq9^M9;bvngu%>md2iE;v z8FM+oeeKr6+sISeDoste!%ux}arX|e>#X*=$F4zbap1#ITo5rz+vD!3(-Y;Hk@4wc zqvgI_sW*=8!ST`Yp{c$oEmy{-p8K4>eA-~zJt(wRxZ_A)k+ss}090OF9-&G5`X|)s zC~b?otj@9V@zK7{6z1eqwK8_1uPcRlqB=f3(HEx>PL(Gohfb9Hy3-oRPEVC5`+8Ck zLW?f8rCpTArcd?tmUhJ3tjEdYLW>&PH#`EV`2QNT)g{I#MzS6nMl-hW+L(KY|T3g z-moPJY!Wt-ap1kV_{yzS$}MqtQQUsMzIv_^)T+&U*C~6wf!U@i}P$)3(Jf385xGn-Ek{sJ5=M8uON(?sD;;Bv-$c`eWkWqpQ*oW zjd9P^mgZ*8)tcva?-GDx(e7OJYE?Yp>y@qA(9+yVDwU4LR~i&(2mxni&(&uinOR#s z`@G%PwDxZpLKo@`tI9MrN|SL<1Iy(6!pp(Av0{5@?~%=-sfB`rqNEy}!X7zVoSZ0^ zN2=Wc%JIfZ35J?)qy+9Zs&&mb0(l|U?Cgw<8QWCzg%Q`>-Peljj@elo?82;HaZT34 z6&bNw8`|a@IL1b~u_-++U)a)!0 z-0rJdtII(5FG_>3|LjbCsdfgAsjcv%x%+ynTv%OM!-|-lE!)c>dsjLT?`WKPc-C>` z3}DVKJhuBL8gx~+>xJ>-ggq|p2X^yZZDrY%pC;U1-+i;I>Yhsnvd3Bx%?k@>R|CYo zR;}Bfd1(9MK2gG!La4USs`c``wO#BEH|o@M&s+38i+&$M{Q*eD>FE>W@@vnttKm5ay^BzXQ?siD`(r! zkgr-=&%P4)X~S0bMY}#Otz~}ODj~O(Y zCcH5+e`aNCn336g=P&Mz2yFdy4h7l{(|X@OX8E1;Gk%bex%}l!sXp19!&ljtE*TDH&+wiryr#rKGph zr;e4YebM@bQ|K!WG+Bjml7u;a*4BGV*8{(ni@RI&YMGt2#l^;jncCt465FvxY>$TY zQ?wIq98B-_>$PYEZ(s&S3kepz|A~+ab~gpDE^oM6B?VHitBn9tYqy*)$Hib%1HUt#Q0Qs zZ20ud^w@o4;}48g`&z(TwP$hJPsDv2kt$skZ-o{sduP0TY@9ujjj??EczJkg()m5o zLE!gjU1dL=k9Th2DnpFBn20l)4{eg-TeKOJ&M<+0(T7mbg`m>X>cZIt^m%84J2Pf` z_S|f~(!sd2+-NSWE<7rid~iO=tSJyQ@oy?it7+|}`h0CeZCNn+YF~Jh)<%DsYY>pB zdla;Hw^&3E}e@F6kNqlTm)?2 zrny<)WN_5iucj^ge?J#@hR1BTh2`4nIRjTb7WL(-R@QTL$t5H$u`BmnW$Cz!_1c^n zJ&p6{nc=3nKv9SN^I`v3v9Q&Z+QQYqkvPQ`I;X{oW=O0LbUys6#bP}->40-*nV9A|GXNjCN= z7br4o)b=S1clSv4oZK&G>hlXr-f!#*?B3>v7jc{${@6duVX-yNZ=Bv^eVOiPF}~)> z+=DNgUsyf2c4m8KMSYHw;l}dCm4*3ptI3huZhM|@kW`jt2a{xI zaWS!X%_M|12IJ)1>gsay=wU{IXl^XiQ6P^-?nc^JJOtiC;&2=%)%x6m7V?=j4Ix2Q zPu7}s&`I2s*po90OSP4YhF9}oasgT0q7pFkPMnY7KrvO?J66aAX z^e;47g}cFy_+E0B7cp>|jb`KQ>IJch5QT%1=5l>j97L-HG1iKBXUTC z2+7#^RGf@fPGLxoBva!Dt=rA*lJVoosd9DrBw&V)Rj|8HTZhLhQ)BAv_;@u=hLVY) z>QrTTdUU9oOiWiN#wW{(MmAEJ93CCQMjsiZfBH0^^qm8t2ea&lsPe8h-7S+3q+87@!Wos5o){Ku2& z$ufwH3{4GL*T8^Wyk`K%rU}Z3aD-;c)#|iGUG_7}4?r1)K1ACQqwx5cMk`(^k5^9% ztmt4fa4>n`WSQqo^q8a8kO(3pZ+NP;8XdwbOmsHRWUPE*v~mI!w=8_d1?GXuWO+Xm zRspvYf+;dTFhr*?3LT3h!HX%H#=10z6?4A5MZu7U)^FS)vS zQDUZ+NI}@Jy~n8(sV!afGEr$O+CsIX?W+&Y52khS=Swq75NXK*2GGiCb3fzijB`;b zzrSs_esz}ex3*2kU2)Xe?S8t42ip$X!(1t^pYGwIw%hF?jr7w!9BjMG9^PEa>!*8o zxa|gecvC5_pYGvpZP(kw>q~k4bPu1?wkQ5UdhaUb`PskfX`7{jxs3-xHN4Xs@rv9; zj6$(9IC!;%<8k$h$)Pi1hd8M%&chF@=guRQ8%ujvlM9WNN0PZUIp32rjm9d9Ky7)M z5+r#zjtXt1!t>gW=K5cnOfjj5^woxR7+7wZiCD4eLVgp4*)%tJ0&UA+e-?Bxg`5A_ zF#t1=p!YZHVyigL7uq1}^E$7NyW>KkEov*}pC9dud*Y%B1(o7ZYNynFT`1A)`PbhV zN_k4R?2QkDe6Cb@VcUbb{u^Or$C{`ahWjEJ60L0KF~|Yxg`L;L&j*8?FnCdP&A>s& z-P$J)LQjth(jAaMTHX3xVe+JLgR`^+8ods3bGDjcxqSp|9ZP$Q=rbrU68-!Nq*ITBS9a3;Kc~0Vq}rvZz_{D~cj$GHH`CvLnsey-t6#e1WJ{cJO6`>T!R zL*}VjCtXcJ+&EoD`nxx!QIQ36Z+E}%p-6xCUb8op=nBdkd9J*G#d0kIsh`70_gzAXg8L<%~l6MMhEtX59aNod7wN0_g2J>7r3 zH6_~xK04i4Gab5FJ6mUOTdALy^HY(uCghqvG#J0csKt(XI=ZfZ!gdUo{~6G;{lm)@_j+wvJ1_{L39KOPoXk7bOm6nw!KFEKis0?7kp){6)#0%Z1@TS85M%$;wQ`{5CSJ!h3+t@7$f0>3IH{Jt=TrpDSDg@bY7ufoposwE)+)daeby zrs&+xD^uW_q;t&lHjg>e^l;&D`dSn9a0@t1)x+BcviE>{_{IZS8E_A?k{sjE_hLHd zlIV-+bV;Hw`sEl!lIV;5P6Sc{$YMI@R9uP<`UqA#V4BZrf>&Mb%a4lp{OK-S``JlCM7mGX~Xf+cwgMIKG3nKCo?Xv#H|0rqIhl9Zv# zqbW<~v}7+_f+cwg@Cz-##gZ3VfD6+LDN9lx;1^Qi!!~*`(yC*fMcPI$rZcB)^kTP@ zmb?SZ#jBlGKnWlh(F%UpW-ekMeMNM0|5`RNZg-MWEPvp+h1qjk%ukq}t)@ce-#jaO z$$6mUPjZ`*>=r-!jiuS3LPfTHzamAx3o5)K6~EfPUqJxW4O83qD+X?26-nw8f3WRG zb2$_45^F!$d2KunJLZ|v!Vl%5qy0n9wvj1<1;ySeE!73P?wSSfP#iCGv|B zTxa#cWU}skj_}Mj$P2)NK}?J7Dk2X3TiE}Ha@XGxig^{E`~3Jpb{&-1!mD%9)%~ZE zVM{z~nq_51G(Jr{o!Iy(PD>fHoBnQEL(1ZV?MRH<=bg_-C z@zuF2cE@)>twO2&$8v2?=W_iwuiGdyU4*HurOPRX#Cc4mQsKvPoqLdC1^(Jf?XSy4 zKbOl}2b6jJx?D$hJc*2SmR;eW<_f#|hm+e9G?H|-Wx&=~R?uD0kH%MU8|w9i`ofAc zZqdD%9K6i;dEYr#ZrXy}ny2ygpYT{O4P+ZuFoxdq9iV z=XR9h3%&*AH_)PgJ{d#oJh(9bAkv6tr zcX`K_E}bl#Mb?ByEeg-#!XxN_ErUTIsOJo|$WQ^bFc_HUr=VeqyJRa5vf<}x@lzUp z7!17m=I`5J?l|(y2P5{N=WH-i#@;MChrz)7kLL=P@EwPjN|#ok%kovFLp-RHkWa9!6ot1;mjduref%D{ZrV#;L>iE&I7n-XJSc+_sO5T%~!dxt73Cvr^%HZRirAL+;7nZyPZ{bZutp=G*XBT96M{dW?_>iYUTq?XXSGW={ z#%O(Me)ZhebVv!0Eus3^+{FNVXKv>e@r@o}cd76*BI{0;+y(9H5JEA5Dpe z;nfY8pAlYx&3a0OcegT44=s3*O=u4--kl@-7+P#A6@Io=WNf1a4>G3NMvI@#^<}Kw zTPnOKSGX!2TrVwnup@gwi}&PqNrQCKaC@onFI!t|rv(qP7Tam@FV$kNZ_!sOyq9qd zTqOb1Tql>D<1R=z5*t)A8z5PLJYawU)Q(c&ea{Hg4nR>e1GNKC?{l=0(9f0HKah)l zF%|j}t{=#C^x)UhFj09*gyTkIZQuol9Z616y45F5sCFnIkXS|7FfTO z!}{|@g!3=u(7+rdUjA|}+T&l+rHTO(${a;pL9VIB@yod#SA`;wei@_HLjuDOWjJA5 zs2Ct&T2M?OX?!2b?Fcww2T}wl5}GtTioYs-4-J!%#+N zHKmE+%10Aop!#jT02^s|6C+f~M?n=)*t?eO9o&0RiXn~9t}QNJJfy65)jOT1!A%3sEp{f73G3xOsLbs((2)4rK=@o6H%!xr(r#mRg4BUJ^Rkj*LOx)gV;V<8Z5FB(4S0+mAXn>h~lIovUjDPDVB{G{U; z*z1$I=%)UkaU27a5V4*OjH0l2ZGj90%}DCaTLo#*B-y~leG*G5@ zZU8i#)^}(n=1I1i$%|f?+WE!5`y|cSYdg3cTap%|y;N0TWm5!=2)}VnERnF1 zaJohm0r!FQgeXj7)~J@TNiBn~)vhtwE68CzDHQ6|KM9z@R%x`bszjr@(o|e3hViq3 z;)UT4evldEYSp9_A`X!;i=bE>CVZF@4ljZR9|+e8;Xakkz0~ASWpV;X0>$eT-H>ob zL;f^N%`*lbc@+aBT=EpV!|o?d^V7NAfs3%}DZ)j++-0CpGyfuoQ{rd4jD)PNOsLnq z-kc0L`f%uVFnK`IF@0kr*UW?(lIhZhNvu~Ujn*|8JxXX?nL})wrR^Ho7C}CW0dxv_ zdgN9RwbDHrUAte7~0*AlU8c zgoP9>lMEM#y%QS=lVCdu+kqsuq@K^4pej0vF$dP((%d03wk)1+tGy-vV$jIKa&Iyz z$^pmG!VNSEZ3o>VjA`TKGRkIyShRe1%HEnk=d~g2PAeQ0#KM$aiMp|Y4u&bnqIr<+ z#bjOdaaRbPW-Vx&HKWzC3-)SF)|u7?Z3%8U0U`B4X+tGtM4H_}cvqMhYxtQ6^U34| zesmlv#+Xn#!;w?~IOxiZFr`;w*sYPp+Wf&*^e#dH_+$?3{WQYkAqr?p;zR3Chd-3c zPJ(KZT0!E)$j-mWG+UX%f06485-&#L{Y4H(<^Zm_VyXRWxwgN;HMeV>Yc6bR^2ABc z`IU!xUvqd?Sc$N&iEV}9+Dq-<%C)^A%JpB}ItVv^wy{GgjYJy6w{oOnXe8|#$+yEu zDA7Mf*FQx@^6fAZiZuOp7zsuC|8^KjyGHVz96=RnCX|3gF+kEtzLV>|F}$_PH|-Ba zAblrCV8!8tHTK;c!DRj<^R~INoCx1S=o$(yBmfk!%?PF@Db?AaZvq7R}CM3xM^e zG*YDBswHvysZ*HQmJk`x&SCGOI@Tp^wVl$GW-f+@V~fqiz_Lmuut+spX^`=WAhZ~` z*QTjc*8t?HBFW;8Bc!o5&ZC3-Wdy;X=Z{1LXZGM;peP1;b+joyBS(n6Wpa4kV_q10 z0L#2OB8ZaK0N|@5LNw_Ers&lXF`Cc=6@E0bAWdsfGE^5M@jB~4hW(GW_Mm0)qY-hM zvdzj9+FL!h=WS$zhvP+C%QXGT2y5%5SuiQXFfqGyi-^)n z-D$L228|fz=BSawR7Ne`1xyj66un{CZYg3v8F@p6BAEVUgsi$9S=CW$e^b=<1hVSJ zR$1i~?Oh_11{II#GubkSGR7l3H(# zx}A;BTOhqP+T%nVMIgO3BDd})sMA?$e_Pb{FQCqq>!>3^K-Rq{h>U@^MV-p4DHJ+I zkhcedP@;2+u5*eE1*-s=>`os`V*g~X^tjaWo9rVxy=EvrN;$j7IzC8v<%t}S+|3! z69(OgE==np;}k92?3f1h(tnPnNj^*gw5^f!$iG5 ziU#~kO%%ldiG)OoeqDDN=e|GU!UqRQocsPLQCfn?L+R(E=w|;?vW#MYBw6nqpKY+(bU@BcvD<9NHqOm6y4Q-SHb~AHZ1g0Hat$5SP&OwA2pF&+i?9xcVr{VKtV$4gAph=1X+m`Z+>nl;#~P)blaUCK_ET36@s>3 zE84vQLH~Rsf^x3`Cm<;I+LIZAQahi_5R`lE$qd0F2tJu1SOmc*h2RH!Sv7H~{Xa)- z|1qCCGM2pXfBlcRu?yVCNB-lsq%7kVTT1FgYd*TAliF5!u7~SaCz2t90{gp6AGp)fw)DT+d{43WE6`6?pP$%1Zzc^4&IxU;Hbrb zR^Z#gkjjPCxv;#Z+;={I8o{#)t_ zrp&mD7(Wchdl6isw)x)3Jv2LOy_H52dC5tgFk-J$Hc&Z{LKvAf+_^GVG7~1PDrt{tw=k)MKp~)YcPoZWGV@WyU60<7}yf<~*Bx0{)sBbD-pMX~TFiNU1mOSZ*-2)~UG~ znt1kLwKMhlQk6<1}-bP$KCn5 z=Fyl#bjnwDwrSv7^ALtq8!1((E+YpA7tl&A=XO@_(_Ya@xW>vnT={Z)L^xr^ShoHc zhy36N+~2q?+`2nxgmh*)swv*rW^(6k{0YkHIeZd$3!bk(afQWDTx8-R`LGb3OHbE~ zi6EpNd0X7g4%3kMnKjUcMJii825|KyM-7!M@4QL0#K>>Mcd6)>51UGA^(0rcyT%r$ zJHmYx;f~wm5SGZIb0Bew^3eQZ<4kSwP&$M7hUPJrRxY+2&#}RCV8EG4*=&bd#lfX; zzPX2WR#HQ^gLDsb^2JnQcZfxB#?R;>ewstW$@1bFXM6VBP}ViMP(QNjk^Qk{&OI(A=NcD`>`HA{V5;HQMV&=F-6YHfj+z!@9}5rS zeh{E^_>wvoUK1rQ{sdcNAmYQEGr=H2a3`8#oI2>`URql={Ez`0s?=#E2|KA=b}-BJk@ARhjL1dwpQFy5 zE;l`v*7(O!bYuThITF&?OElPfnR0}TUCu0N#e%XMlGsqP{Yj@WsSV=ZE9fH!h^NIG zF#^`-G235s*`>VMtR4BwQoyv-@^RW<@Dp&ZQxc8?XPIe_aB}44Z0&$MiUy#beR>EGAQN*Yp0>uyz>O;XmeQEK3 zno(a`{GUc8XHiiZ>i=m(_V~N=Mt#)yuSC(^{cp;9>Zjonb0>T5HFd}{E8C6nX5d{0 z+pM#Pq9G-NB9A*z=Jq71eAzh}FDQ`OY(_4|5Z0m7oYDw$>(Mluur*{oQz0@CZ)EdS zVC?{k&H-(}*2Dw{pykLY`RRnFW90xbU0<@Un58hovaY@o z^!?bZtFJ_T&M!cb8U9Lii}NW^MBo2PwEw7IeG&TpSEA=UKi+TCPu-`ZsLVl(Dajr$ z;}m|8-ibb}lZS5I$}ppDS_FxwGbCh~JstHO3Y87;ZAU^8B%Y4$7_l^~ILa#w`E>N0 z;rI(q1_1h-QS_qz|1-#dt+?=r)5IC>U+AY4haAPbUJ#Fp zk(6fqohbUh@tMCMIgu`tAi#Y%OzfpAAzM$!24kzq^MI!%DSs#G*cp%DPzJd6)xU`f zy#volfx`^a92lfcycBUM^2`nfkzK$L6l9mlW>6R)V7kodYYz~Ae-rg=i%1$_lUz7KawphnNDSDS3MW*jH`Ri|pKLw+}3C!2ya6L&x%rGv?IMDq(Yqk0MJxMZPi+h3Gh0w&M8p=zN-WnoNNywh{ zhz^bKWF@Ii3={Ij>mc_7Q-M~E8l3fnZ2@U6QJoaEx=Tk=uZag^vpG4!KE zr?+JND7NvIe4le8b5{fXyd}TyfR}om2I|%qgd+XCCI8&JeLti@y)~1NN`s;p`r&93 z#dJIh6TCGWkJ6ytnvJK6e%_jmr;C2xDhcVvLypuF`6wWvy#%?;!PtCx)s{eY2XvnrC$`qKBe;i=^Z$ zr$g&uXkZYA*2B==oej-SPUL+=0&E%DyYt*x;Pmz$hW76KEiNL#TZZ=T{OxzfKjm13 zLly7KNBjF<)#9<>mT2eHN=Ua7N{mXkNPA~0!=Y@xIJ1OguD3L4wUg2TZE$ah6cWmS zLXq{dDjn`Nh6_?*xZ4=+`?5_=$3EVd*S9~+`LK=QzE3N#Vspijj}PRz;^J<1p$40^ zov+zUiX7QmKW?W~xFn=vIk>#E zU;=Dy5d=1yOLvp@5(u~*dP0-52tA+7=&6%7Pv%oM(RS$hWd4R5Eu%e-IA!xIn%3n^ zwdD5iXUd(bx@namxi)d-h~G6Cs4PUC0(4x~2ksz{7Qy{jGTe2v=T|ab;L|J=;e~(a z))?n{K9VOl_JcVyCtBz&`uR%+HBA->HkL)OY>712nK%?JnaAPiGGAMv2K1e}9Qesp z-yn<4T13u>8Ss~Jn}yhF9TMr_cQAMc5C*@4!G9zhybdFMB=55};%Enh|45!Rxlg*m zmpGY_j|Tfc`oA!E>?^zFJqSpYyiGH8St^*j)V9G^LRhA^Ge|E_h!TWgFo6&y2=VcZ z5G4rl@q8&*OC<>L@%*ia;@@zD*jXz4mL$S2UycxAzTg4fTECfdpD`1dR_+cEWp+#J z=(VnG@SEAdy7s7~oUjuDFo-~aoewQ|WWb1K!+;#|dxo=lZ)BwN>irz_<%}$uO~-SU7S?o>YqYE-PJ_QDNA{yMv){{;(DCv#*y}fFnC6|0_k%m_6 z!OhcMYX75r+fNm^&KxjOoBk-@d1d@xicDg+95GMj@zi~yC_!w7fMl549-OD#G@#pq z2Sc@554mc-QnNO7Y_;q*p10qGRuj9;(pfl z3ZCb+)tV-2DdTf+m@%|>ZoAAI%xJ&IGT55kmuEVnZd!H0u2B7Wqmle$c?byWI!~p4 z!zWMHec&N^p;7y8S2rVJ@*GXT`PyUl=AXpt(nYiMaX41wu(-F#5YRI>Wb^J8g32OP za)edQ`q4Ot&;+t4(tgbk&Ib~dK* zOSMmq`Dm4GZ;&5r#L>y^Z7z9a^@4?San9P(4@S5G#Z0@7*vOt2-xzbY-kIkG1e}e4 zfVG`~pvaYj7aOt zPC`15GLxc#rYRTZaDi}QUCzFlW_Vs#Bc%7Fc}2*tTNVcQpWjVPMLrB1l72Dawcv6{ z$1bL*5EILgv&oN5Jl8XC$HGlnWp$I9znm=$jepkD(By@jGfqTK; z&@R1+LAqTukkc)+^>Oe_f#&lqO2Oa;YFqh!3zqi|_qcJ0R_<|@boRtay+lu5a9{o6 z6e8mw_oH&nn0~%U&$)nHcdT&?5>Ym##VQ>^?z525lPoqG90$`eUUTaL&PQNQ)s~N| zb=lDJ0<~4BhELF}7o%AXT!EctolO+-spY8;DO~Jd9x-F^xHHAKxicHbg z6PJ44B%KM>>&FVwHm-zFz*%7%eZPzoRyeW{{<@!yZ9zKOj}^Lm+zK_FthI=+v*Pt0 z;_C#lWDu8Xe8#EPoDt*z3}NuP44A}hYY|{p)KyE}5-YDSeE+Um0-%E!XvM)II`GOh zZ$aku1+QFFgjTOF_^X!WrF%mG-TGO$YN=c5@*4_XU8V#c6rH+E5u&`IfVwQLw};~e zg|@%+TCY56Zz^=|ir+0~PLH(SCkoiGZ;EW0*axEmuZjO$NK3qr*Us&le)%E;Ss`Lv zG804UsZP}6oKbMd7L*jJY~c_GSW;?cnu&8kSIMxA2LmWX1@Is`n+J6Yy)ta{VG+r- z=2Az~<%=-&vVm_Agf9vR8#!L{d?{u5sxD6xpqwDBN<8 zd;!R>;POuwqJL4yyV@2W{^TvY?goJvzF}MLUc#}j~8uuyN|Yoi3wgI zue0BaM4z+-V@DyDwwo<;j9Qcs8iT*I5nGa2#m=yUQrUMnZ1B>YxXs~=t*q5L_ZE{S z1nu#)164BsMfZ{Z`P}D_%Ip7yzE-wMdeQX1IOfeQes1?QlfVyzZ>70UrAhpe?=)p?ovC=hV8xt|%BQ}HhFU}2C$n~Aq7f-xo)MgEHrbVRchYCL659h+z zA1ZLit`nom{rXUWW0JmzRQ*ulT9<`IWu)qd3j1#NQk6@hex(royHu)bk^D-bW1C$^ z)#EOS`q%92{dTjJZBT(o8b>N!-3?LHbjdE(k`?u@3naVjH(AMzQ22xe>S}X@ln&M-f_Zq+@X45OTheQ^?@^#^0h0`OtZAS7|2N8TmrOQtZ z3bD9LUuLkJTi2TjSt+)ByH?gq=&(HhM3YL5VP`=lgX~ zy?mWrb-CmjJVRDeN%oxXKXMKT@{j|^^Tvi>vr{t~?FNfRc@Ss0gkS9LEgPV^{l_#S zC0M}e^8D!QmPTrcfc1BLNH20S#TKf%HEBwZA+L_JE`IN@x%1N245jAUJeSW}BDP{; zz)BrJ7`J@AL#opP(Q7wJk{&`~9X=KvH$uod%z7Vb!dgPIM-Q6su z1G?vQ!Um4;S>*nvZ-|i!6BMeHoF3_rdC3JDmfL3-^D#JbGm!fAm9q+GLI`>Af`RCg zIxs^OhxI9^c8Q7kEg`2R)d-U1BPPl5HSj0oqjV8yqPr6g>EiSB;&Sm`=#24-)f_&E zTr-~1S-4wV348bixX_uP*)FaOdKYF(4;;7-jh$!LR@_M}+jGcIQ2yh3fMWWEr8Sln zl=HV)pfiLs?2av=VyoLTC9twty?8DlI-h~DAuiRxkPlEybdEcU%6e=;qB!0OHoOk=NCvrwI$%L?QolUtCxJK zt`GI<<6j|--~8FS;>_54JxsG4oFfv8o}tIvRV|__+mz&kH8;W>lyexjw=i)G zxWYFLjqgMl2ZWa_!67V_b?e@z2)G6BScfjQq&LLTQ9gu@rrhp5fzRxweinw(#5~2N zC2hzjYnhuME!U%Ap#68X4!A7*10D@sk`wMl@dP%{-hm}$u^d3P(Ra^ToLf{|FPnlq z13z=xu*8B`XEx>KM&vt+!=i3amRo@xCoQ6=ekM~?l}7!Ug1=0U-O08mF4OaE+0Q>y zxcWdSvY&sZz=arJlzX&Cn!@J_xJUnM<{owX*t6yw4Q^@Yzh}P)(dElz`m`qcm{NK< z*Qkv{T7n~l{0$5>EkeZ4Wki&};d2F_>H}FI;^zw9F-j34ey-phqZA?H=L$GR?=czz z>8S#r__*ENm+mWU&+yYC4mIW10wJKLv$;=Ys42hpsce3gU;9+yD(6M&iIiXaRH5HR z6)1w*Q-zxk*ag`=I{f~n@1xfSfS?M*fGYRTQ}kXN-h%3vg4c#3sD3GUZIpoY=g*SY zrbk|zKM!6TO5j1!d2J}7!2P+rHhb`k_26lHT7I$ZtZ03*V=j5vlrZzK^~l5aba0AM z0#a)cNLG~pXIrWLYlXINdH;`6jJ_uSkNjoZ0-% zC{Gc}ecU+JSFg;=qOJSdVMqa%X&-ge_d$%iK2@VD;Mgo3mk)c z2zvF3UVmHQ&d;ig5w{pf>;407U&;tad z3r9eTP~~sKDx(NGe_QBt7iUm}fPY)ya?GbBCwpZRysC(t{B$NK&18Gl#H2>yjy$nb zJd0A&La8Z@6o_M-fktwPysGFO*bu>};DsbbX!NS07m^f_M6W7(AxRNQ^r|94QqJgJ z7Tv3he}^-=SL^Q8Meo_BL|3gv_{fS{T)i^LUR(VBIimr{xPchPExGyHq8Dqt1)0|t zy)&93<9=(BEb zMNoY_@Zfev^~AHpgWJV}PXr#M1RfL}4^o5&p9nnIS89J-vF%@Z9#n$k+lr(C{Z}0u z?Gq2aw@89{OC&xVr+8Kw(0B4|bD!@9h4TojY>fNY$P_wm(P{5vN^Y0sZ1*Sd^#%Dl z{B;7(8SE5CoHk}i%Rj&g7)6NBuCTQ!V2XjmWf!&O#$sb$Uonx9bFRT5Yj1U7Z-uVd zEV+Q7S?3C76m48R0>q6McC01bedrt)>@??b*|MRYvEv}B#*u&b!DkuJF3>ZfTu8ED z1jv5?+pjXF7S?5M9qUqrAKx2R8ATZ3y) z>(eTG|Mzi~0T5Jy7*N$Jdw*DEytV%PDx(Oh?+>f2k5T=?v$V?kw90-VtTIaALD8); zimb9<2&-&Isr?s=ZNKMNnKEgAvDkS<{AsPS9a?1{)hZ)zs3HdyZWr(0c~%#h>*Lv8 zW8P)-|Gy>1yu!Cktm9jXFylwV5~Bzgd^9XEitz16!xE#&68mUaViZ|oAJr23s+5Br z3YGp^G5W1yzEw1PrqSW}L+;S`XjzOmVIWKFyrI2Rq+gsFb?2 zH^gZ8F!Pj`SBw~P{aUf(DvQff)T;UQVquR3ZmmoO9=NZ7*aKGJuNS$4?C}P>@sJTT^g3+;zGt&%T;rqEV2i{`HkY0SH*XEIFx_0SlBtRH_66g0^8hxmQQIH z6u3FyzA=TT1xXmt(tRz(9vCS5D31gIp_<<=M*qI(j%HJafp`!YNP_RTi~gcO-U98n zi+fx!ogxhM+eLpCpCSzO?~Cp3co0Px=-(H2++;F(hqUoed>>^r06`Uq0aa<^p9nIV zx1joopp8=m)lUQ&tz!ni`z*=m9g@+%8)P&k@Sx~qG)2hpyOPmAp2s&(Dz$&Q*!JaO zuK&Ay^gDI&rGZrK*xVrQg1qK1b-C@SGe7-SnLh!hzzJ?O%VkM zr$!z6DGDIfl6L1#cQyUDyPZGV;g^9udenp1x8I2i2IH=`XBe&3h8xQlryBeA?|0rp z^SU}8^)zQth&vE!X7uAOw?(lC4d^@zUJ2gpn>Arg|4EVC9+fwv8dUi2 z#lnqg1qm}AWH9Ba`0rYb5|7rxns78MeE`h=UgQoT1#wHl^0URrT`x%qcodz)p@_u! zY_WT%xsE9U>$AmO&ZSNfNS`edF}E-Al!V;pi_u|-t?gefJg!Z$lm>q#qQD{xm)5<4Wm!A&pj`|gShurLR?Rn8p{(7juZZ&|!PgOT6} zvCS|)Cig)v3{8ake9 zWO%Ara)*zp45X)uTm#`CQTjsBUs_v=6wsg;ASqAs3q_yy#akeKp}6BlPkjY6zEI@S zS_g@ye^&IDc9$%RkYa$O+e!W`!%1HKKP&q4fm8<4pJgmS-7gj+cNnu2$q7#}K#~*w ziy2OG!hf;2Bk+ry@Lv>zI-FpaFBSa_v?b>w&mif(g)e0|nUB0!ayQUY8Ax9$as%yu z%du_j#HCzp`+9q>|6$Gz^OcyUzeck48_+J^C6-CQW#hk0Z70?2grYmH&%H{=*#$C5 zYDOFvMm4iYFv|2p-_MIXv0GcdlMSn%Z8#~clU`qwKj4RVz+5}E0^i=w6}R8$V}l%R z_}dCo)|YHuULbTS244@6H!XtBg%5SKfyi)kxv?~7ZeZP?mfj^{Cs!=&Zb3q1a4SKr zh|WrKjm3;vL1YB!D-4*?m65U{xH{FK>jWR;ytXFB0PPkIQ#Y5;Dj~>vkH6R08fO zayOldH9{1R^JX?~%YApd?be`Pie0b2m%1PNNB?{2OJ(ra>{XJSnYAC#A~~BR^XC@$ zgo+=Dbs8>E!Jpvf)1~n9nW`lk>ar9MDEyP{d+O#N)($T0=bwZOwc8FRNBD2@6017J zkS>skzIkBma@O$7hK2_@a&vWb-)y=~c{-$*Fg4T8T5) z7ti{SjaeAkZcov$TA1G06@^I|3hX0F^_%j?N-P6W%ARdOQewCh>TKdOHd%Trm|Y3z z%==Un(t^n9Lk$5`ny%@N`Z)au;KwlyQzo~ZHl=(Dj;S-~rVQ_qWFLtE&4n}MaVk-R z%Uh&srl}s8=98`2rc?U_$t@bRU%RUG?L`wK2M}AT3_Hv>6f%z(J%e4 ziTX&Qk9F*>woQj`H#aH~OniI3(KNqZ;|y7cYt2AhQz6r_%?)_E*5-8)4*A&NxS~TH zcBgEBym1;N0gXa78hzi6#2!oAg%8~BVS)KAM!lX@aah{&W?bS#e|sT3vUbLEm9iq7 z;_B-N{z9HfK8Mx$29JnB>+p!vaHjfpZ-;bJP;9WT(w-CpUaB--iw-R|BB9e2bP#G=%xTH$NS#)0O_665TH zQA!?mM#;V&xZRyY@X9ed2udGHh*SC~qR73y-Ru1n(OBNzzRRV?QbbjIdpoz;>x98h zS&{E(kG|HPH$9aywcpW>A@r&atQN}Ba6a5#IN1O7_M{RtdPxa=YF%>LtL~T7(cGjn z!{T&(8!U`b+lCpWUSNy60~@#qfHEXgp=Q$LqGfba&7`E1M4d8(LVQeeOAI_im_(9j z6S1kEiRG}2Q<-nxW`Q?l!*=mIY!|=lAhC~^=+KCl%~nC0&!AU$W;Boc?qJV6d|12x z9hyJ=4H_TryNfsej|wOTccm4y^^!}M{ZWBCS;JNra^AL3gx7VBd~f=d*QPKT4qeaI%+=kHlT90%c!KiNrlr_Kd=W%rP{#6v#9wm^icu_V1pcE2q`mN=`> zxeDw7_SlEp{g;F60SouT?K_>b*B-FYKirP9H-&-!^TX|XZjJlmP6t$94*zibwFlxq ziu_^+$j7wUe;t3HJ9?-|VQtt+er2%AUSkn*FL==(mc}CW!+6|`h&lmIN|6*tN0RKH%+qXL8(Ren7Orh^`GriyR=v(t7<5n(g~U^tqvvHI zBACr*t1Z?KT1(k|?9j>6jO)GCZVf~#W#5J`T7(U{b!%rE7(#cq&xEl!j9a$omo#Ig z%hqSt>_C{RVX4}2A8yhzo|3JK`>7RMOHS4{oYqHc!Zfz8&BJ*xUCKnhQ|(JcixrRN zv##z-95X3G#t+JO*XCE_;L`Fr+qu&U#%_*WN`!?cVSXJ`kYdRQNMv?rgZ7*3@TuTo zHQ^N#XQi-NLYXHPf3jVCv~e(Up!xm-oS-TM!5_|LF}88LeT_{pG?4|vBy0agsj&+{ zmELYMj|RT-h)j`vc9+HB!(96_5SNNP^6zFg;&`_!Z-w8<

U(2(+1y~*fmeca<-g!3&-fLdoVMpoN@we)g zDxudQSP0SSmPjjRR_E_P4PUp1;VWGdQmC2IX0E|0jC#Uzryh6!BYt6|d?MM4mO4+U zASf+jc)yUeY48?4lp!)g#5Ob@34+LXwm z&>UOg(&%MND>>q-r$hB~VxwTNce+_QkQ_secqCw*iZb=YCqBU54;v?z35}G!%|4+@ z$PM?(#$ZDFmC(EtWMF#Wh!@QG&>%(B@vD0YqFm?chwU$yM5H^h6_5_c%%m5{0nR>= zZAWr}Ki%#l8CHYs=;`)dH>Eq6+~7~Q;|5nGLp3P>WqaX3T0yRG9%L}(4FAhkm~w~z zW&5t1vnB>(-~JRPAimmOIFeRSsyh!dm~xAMwH2lu<6mvxbtr2B%&)fJcDt@ki3+>; z$Y*=oPjuvz5#Y4aGcHERM!4vX$edePhTm2%=)A)EV?`#bWb|)vPfL)Z@v~)}2)BKw z`)lxf%8+R`8+=+ejd|ITG0Ew)7i6gI&IT_FmV0-S(=c&Z31cpb3@7MAqZZe(CRYJM zUFG$y_Rg#0$FS^n$!_~jJ9qCrtg|XZ>Aj_}gl$16TQc>{>mijCPlEQ06g}!;4%0P~ z)@ypEyNV>flGb1g9-yI<0QAhv(REtH9{o;x_Z8tSXnv>tO7|HtDl=!_Y3DOyvPXBx z9{rp4=*K&pJxZC{f72fCivL*--lME>eNQUt54M`4(m_%)(rpbJ=4~COPDGheW~VR^ zUYtR-Z5cp88(ptG4UT$m@Su6koPK8rFH)b9oT&K@v`?*;wN9x&+d1^vz* zpx=Ej=y&b`>fHCF-;Mim1LKD}3ODuNvvJ(wM|Z_EvQ6jnoYQdY@iR%%OVx$+QXpn{ zOE3MQ4lf9-hV{AY`m70f|4;`Xro7;h0?Ai*6t3;(R9NO4&}5h#V|c+xqbO(@b}?%U@{(fooFOkM_UaCA5D9sO@?PEHKc#99K>pPo zL_GgEuo4em*HO5p|C6Mdtw$f*;~67wxZ7P4EceuB+lsuAz_Jd^sPQDpBAt>8%P2)6 zgB<;sp-NbM)ILY8Z!zrS`&{)Wb*4;DI}Tv zx(@#-SW!qp%GY&pJ;49?K!kXjCHbJXT63QoIceG%3ejaj|?qrM~FEvAC04b~ zBpCV~u9od(n~)eOkCs(4XBbV4*qaC~rE`k!s*#+ssuz>x;qkGNNjj%+sxme`Ri1RX zo*bX9PJ-*#uZc^;x;B{)P`b#Obw^gk(8&Ge>QrU2GInC7G6IN>I6rhkm|sbgYI&qG zH8VU^9hn)MK6R{IWej`b-L223szbx~g}3<ZD#r8+s~%D6=1_7ml?5wPplJARj|v}ZUynvTk`p|P3a@sV=hHo;O_A;t0gD&@Xj zbypc4uB7AJZY_pKp6^=p31DS-Nyx>qAVHZ;kM+tZWfA+f?f4Wv9aTs=KuSlm}W-M3%kKQYYQ zOjKs{;DEZ(gVt_tRm*!tqX){zJU#~l9UmC4j`SVU;&`Aubl;3ft4FZICn@lI;KjcU* zxjL)nW}M;s#rTO_>8iMty-z3llN%aN4ZV2gcttD7t>WLwkx6s?vGVAN%JeBXMHGUQ zSO%?Y{pa-o;T^}SLl3e-hbNAQ)<2nNTswz{#)d|QW>{SJ%?u3>k53~N|0)mdwuLvt zGZQDr`u?YGp}y8XyTm}hnTHX(eVy@>m5IJ@<(cg5{z*EnZ|8-PfBFxm%g4&YGbbx! zeSfXX*RS)>C&!0jmzm1glBZV6^#PpqvNNJF@3`$!=d`S+BGGx?5UyZeGsi0Ui?T0%oiVb7!8kA z`o7ruV60N@`%;I7>}#Z5eYr!d;-6|Bw_aUe>0lG?KVBId9U24omKpWq_}xyC=mfH2 zXzcW*815IlSjT%pCD+r`>2*x;WS6KIs-$TCQdjBL_?GY(JST=um#ddR_LsXfjiK`7 z@X6`pLsaeiP?yMxI65(NpZ=50!kc1EQe|ueY7Y&KoE#bgHl_P3Bh!e-Z^xyp<6Z9c z2_$XHtG{k}HNi&E^6ER`74mNqz+;tkBl}FZs8y+!jhy0IZIBz5=dbl>!a}8TrE1dg z*Ly^u@H8dRZ}hO+^`ws{Cdc}Ivj@HCn#yE#sI0XoDY1drKf6tYs!R=yo@V_{O4?f= zuk96=-8-ZKty_S9-m3|8FD|jx{-T#H{3=%^BSv7;r+dZj?rn4%doY+f0u(-oti$OS;{2BPh3$NjQ2~y z8iH;}qp&tn11s79YS5;XpVzWxhFAf<0{58UO`o2qv&|kQQFI(Qg z`*fZ?l*w}Sezx}j=U-i;DQK;?X~X*9HQE)ak`D0~uPOD%SMVeiu+{SMauqeF-1np* zhF&esaAH9!#qjiGWrD%>{p22W$j;+z*(O5Xu%~o5J}^CrmN)J+q!i}XJIqh*(VC=z z2^@5Dbea=0rz*qMzBlgCw$G~(1caa2H|;Sx@oa0|=FNMIPLng`Qxl`(m)3hqb((Um$e+Cmk00#51?DkhFz

FU-ZPfZcAdNUXN;*0X0&M#(j8zxYSsO z^=6(SJABC_yFF_!5tp(@&wTKo{DYh1psMc38V3s0;g=Q{&}G-c?-;1=BgvK| zXz?ahTj3r(74LW%H`n3MnORv|tj{QS;qr6;yB_cDzHODJ$7p%!q{Wv(OAqvijW7YP>6ea4xeM9KVq4 zGAMt;fOS=Ou5Uc5wnvY}Jp}SOgXUM_Wnlf4ht=&H*-9(Ny^Ky5@N(zc47a9V4x|5> z4R6A=;#&Em7vl{WI@xd;jJ|2GdXMbga#;lGWiG~QX+0Ti1ka7zHG+kO%VGX4k9l#S zIkB*C^qx4@DRcLclgqXJwr|_gWR-qBk;{`;EzRG3WdHaq8fxhIk)0obJt*; z*MZS|T-+Y7#05@^f?ZG4+h0#({zMBGmbkuzFJ@?qB2Y@Ao%)kIj(ZlQ30n2=A>$v83?bh2McexSDpz*> zIowI71u@co&aVlnG&5w5{#_073RWInAlga-&ia;(vn0ppD2?6co2F3NpnUR9Y(ovy zbR)G3pEYpDnOvTjj69&=tNY-!b)o~;P}!eONI~+ z_`m|!o!@x; z5&EwSKN2Ao^(3E^!x^OPq`?~Hxr9)S{Z0clj=$|vVLEL$JSj0@ySw7J!R+Ze;e3D8 z*MFu3o^`i=ZMTx&R&Yjt%j4Rw%ZN$Wai^^hy3!mGM=cG|t}S|AvP+5SvD+gfUh41N z9)Cu>loJWBjkfpyel8o)7TDM$@w7#FISZ&>EYC}EPtV!LD+;-a_0#24^9fHW^H>&| zM}H_;CcBaRR8|qm%-ZgfV=l^?64;@`q&RLBaE{mM^T6Utu9P8v6zfE}NlQ)Gi)ddu zbP@YnvKS{2;H#@MUfQXLEl5%J_ zf1@>!9khfPGU36BAM_J6=1y0E<)aJr3r(Gfueo_rqE#&nI7OCNE(rB_Wb+_<{q{a% zZbI|}ZMQh)){q8@J7D`lF%om%Xx~XIs=%uoy;P+HfE$sjt|G5)N-JuC+>}-nN8FTF zq;PXuQJvkKRuuhirlQsxVdi~p2OUa6d|%P^#HLru@Am*G@M^yYKnVc*eNTD8K9E)v zN(a)4Lg_$Sk;1K>E_vkyP;w%GlHM`IDUyZQF%P#rKi7Yf9K0+v9S$k^GeD$>5Zh%s z?ILXZgsq)p5nK6tax{X&+@|;fa$9V(ygj<5|8R1OJVQ^xmtfA@lbhq?)+?{>;M?@i zPn@>PK9qIZazi_e>N9=qB3cb;egCA304QKXu1Nllw0}*&9r$fr|70)c@7QxgJZJqw zO3c1a(|9k=b8m{8LBz&XCgRbzo)3DVy9WZ6OWd|j^wrco!1V;Z_&LmN@dGGk8e}{SZ+)Fvd^rOx%dcubM!;0 z19q>=S$CZ@Pc83Hdz&(RIkzKKqo&RNEZ+oR&s^rd?}L_yHB!}Js>g@*Pc50`tG17% z7i*}GbVTHDfLRz3$s76Sr6ba!c^+N--JKp2v&kRfu9|Q-XcY$>#B4(-G{Q%$Hq__Yk z{!Cw(_ViE_XGrs*gULgdGp$>sRq#gDLu{*_k@SQVM$@v{y>tCPCAF5PHlrp~sPk_M9G%b2Fp;RyWrq@vB5k zc<&^{d@PwX$5epo*65&pYSMiNgVWy+LB$P&WqCd%27id1oV29*hY)Z2as=C-)BYA0 zxaIxgEc&JF3(JUcrhz-&?yxSjU`_HT7&$tT_nANG=~MvglhElOf;3g`<;!jpepS)` z6imgOIyaYbm$KtD$6RTl>38P`QvP#C7i1JbP&7F&fJL5S3o=}*Ku-=C?o`O%+jWJJ zkc};W@9t~kQILVkqtW&KLzhFw9B3v^bkn!DgR}|MS628=wx7WEmfpFF0nn~@?qSG} zc3o+`a|uI!^y=&4=hJ&fDL+Q!_}U6nI6W7(@FL z0XpbvtjBdFmK>uicFqpW&I$IHijvp8V=ft&drg}p#|cLnBy%e7M%=u!Vg%c??l(thEXS2Ot8<~1r}YUe3fL^gPXB15qsFP!f*>KxZ!rS zbks#hRo_t;9aX!Wtm~qq>UB58Lv$3E@>6VQ&$Xk$KGlnZoO3$nMBbj*IdcGpb+djo z#w}gOP2j*qjV-3jsXYTS>^r(k`TL`rtV(xFQ@$X^bH@jKtu}vuSHG)6 z)BCU0b`W*oQg|`fj0k#9OH;0~=DBOiC%^J9_7v@*>5F}@+e-Ndeba3%O}U4e=dLNY zMdu&%P5Byq{z2cgx0L@!k&`ODEls&)mglZ17boZckr(5=H2r^?I}<25uj1aTzy55= zE!&bl&1e~It&wE4tQqZ=CD|;a%^GBlghsZ309I>eB#k|qk!O)C*#=?2HfAwlGch4y zb)pwIkQg2UL*Q8sAr2uS4^DD8;Y82yNXT+X$VtLU-b>!^Uw8SsTQjmTBroTHJYRp^ z_tvdjx9VSY>(+hsjM-3WH>TY_scO(Lm8!p;+RA`XyOgDPFmWaiuLuQo&>KgdX!xKv zj?RBN$Zn2|BTqDbkd(%$;LbF=tY^~a2)4iv-etv}%VyhErI)2bo`;j>mhTo~CMbtq z%bI9%?Jx5C6vvLj6=JR~W_cgy#L68t z-|yT$!V#a2W7X3v`NkLJj^kXGsA8HSJ9T*H&~WXyDxKSRc5qfs2W!Ut@~voH6rsL% ztFl93iU!Nr4<{sLxhLl6Ft>HB!z)N-gpMYqmSut~z!uO$bp-d{VkVeI=|vhmr3zlL zJgqGCl#=EGZBv?$}U$G zPjlmNc?e3_9Ei=wtEYreYkf6R(Yad-E3NzL-C#uj9Z6GoQtwqZeSw(y}ML?Kp{C_(#W%MxsyU^RBF&dxln?wcH z6ljSQ3cMXl433H6k9E{}pxqAS z^IPs#%YPb&DieM>GEs*9G)!D!OvH@em&|OL4-=*5h6bNK^GhJ0(W3yRvej>h&BzAd z=bi5~k)^(mf{jP58A_y|Vg0wfigrAfJ`6WlNt%tA z4vI)9N)&DvA&SrtAJtV(CKWIzNCg5Se!7>&IV6|RN}jp;^Q0H+E{g2dQ_%;#Z!Aok zQEE4Gr)KE`1tRH->iN$x2~#7c|3{_e+~9RPJ>Q=a9%<}>0=L|sbA!Kg31gB$GsKrHiZz)fE1D&m@XiN^uQ%R3y*N=-9=;5RayGB0&%xD z6}2EcRWwo8|Jp@VTXjgVqX!3^73bq;6=h|wkkTRq%CWip>kO=VgyZbJ?PF8~QF!j? zkW)=wy}9wK&8llN7aXrn>bu8NayOVGM0I%Uwn%8fhC1B57KDyZWjFj7{IfsZ-d#(zR_@&EivYVPIig0T3z~ zgA;46Py15%hCAgcrx+dMdnF zqw?CgAURf}ISmVv(=R3I0IXD2{o~InF zUDGHCtp8Z~p+*Xf5XXkw47vIlsaLGG?Lx60*DZk~ln1dn7SpVkz;h`;7x*M5>+e!U zjB}PtYIqv8behOQRiuYW!4A!Uh3}UgI$<2lvh)K5;$ufIwnBT{O5DLccE`{3{3ic_7dr!)-@pgtNH&@*H?PdSjsj#4rA7zg$*IF4=(J@(&27xlD`aq$1cImWH zHIvd!1@_|xCZsA)DDt{1C+m{b$be%|q>Sq|P2`vAV$%5)p0b%x_A7-euP*H}%9^tD z!;-iyIc01!?(>&1z?prNmGHQ_;&)6t_np`v)XaZ3%5!x?)*)w_3iHxLKOfe-bXq^n zS^6>QvKv5Ac8qcY$Y<}yM|`29W;&v+u!frH=wn(#tF5D1S^Db*HeweGu9?a~frJr2 zbSOL&>7GS%{35hIi{^g4aMiTZoz~p!Ed33c$k))E08&+^R!)ZO*f15YL(>(`Y=sBh zeivAiML?^@7=fvUQ*k6!s12kG66* zc#Ml*bo*-eaX7($PU@f@KdJUpV~I=zXQFrWMZ94b*SgqrhiRV=gJ3~?gh8LzqWVn- z;ud7-7qzNBQsBPMGL>)#ylZPS5>}7)F+!9s_Y;Hii)&GqK2=C&wcJV*%6w9`hf};i&tWWr2D+KdkVd>YUbz|mxf`U_Qag36t_V%w&g6qT)=+b9Fusfxi>)g}jJNA2fPr|^SUgHrST zJyf9P%42*g0~O{F7_tljMyq;iSs?82n*Twplgz4697I1wCi2G6972j<;M}Lg5uv@% z3WBj!zry+A_&z4o24~2|e#&LON|2I|;>!B?U4q=)mtiA}3dXJs8-GW}Zlg`*;w=4L zUba*u4Wy3XNF6&ah#s>`dhBz3M9?p)mBqBm4=?wVvzS(Yw{XSO(pqb^HA|mCleR3O zRaT?C5-`3mRbnMD7ge_4nX1a3~HTrwsMk)-69nE?03g4iHloh>$J z#myaG;)Jm|t+D^5^iLOd$*DS||350sUs(E~Q%}pX^jlg5-!UIkw{ojl&k`$SX1*Q~ znI6<{U%S!_EGF$8WzjQy%sIJ;FF#jCK`xv79$B}-kytcOkvSl2%vLB%S>qZedu?$Wy)*#5s;Ka8i$eU~? zuF29rDUd&KZP*97t?v-KlrA4L1|2*iBHb}xAlJ|?KjnPC;o)m&_fHBpA~mpVb3MZ!UV%vPPeoiVEA&n{f=PxN8Q9$ zxc5P}|2;CyhX`2U)z&* zH`wTK|B*2lsCTvH?eZkbZCf>=IQc8+iN=C4SV2!0WDIjCVO^|*{2ypt{Hi0rElYo> z*?niBtO5j7zduwypcTHV;aNIR%bkFe*2b|_n1Y%Tl0Gf2XQ0{-tVYXJ-S1tso;XRS z!Y!2St0cQC`(oGKgn(R%aX@ly&<}+{WZIzrhY}ew+EAqJS^75uzMBX+p>|B#4C4~} zBE`gLmTY3LWo;c^b81>URdJHr>4k5mFYOUq{rKA!BW-ig*j zmmWoi1h>_j*U&C_jGYyZCa)n^F~%PixOe-1 zx-qWK(jUtPJ*_bgmQ_^3>H2YWQy56}oPyv&yQO4X&Q!aFX?-rEI5A2ox;w!_+t9!x zjc?_;y2NRXI}GN;*Z}z>RYIB?TJ2ityZiXSkVWx`!Q%%K>Qha5^_ffmYR1f{g0)%A zn13v5;}y;tZgVaq3w%7fgOY`qT{4lk`|B`<%3W2O)!mhVNQXXq0T~reHoN=bkXHBJ z;TU88F#ZjrF`oF1CEJROUkSHun33thl{n3QXPl=s&8o$sOET56;f zk`;n_WC;ZSjJPQZtZx`OfDyD7ZqAIYuBBCeXt%?#aMsf5zZ8~LO8@9)e;wCD^S0#& zGE|#A)T(KIkL}d}X@frVK#g(sQ@PjaQk-U-K`AFh*hVX<&8Z}z*^Y47xVwgi@j370 z=kf?a@xor=B!kbL(60@6<-MUDO|Ee5!^^Z-BI>%!aeL1>BG z2vCBPwGeu@lM;Yr_gY#>f<9WaFNgh+W+56)Fj**$4{9S3CeaafM_->W1rhF<4%^){ntYk}Y_|%ynx*KGA#xL$i_bKyiRx8yU}A zlUZ|pXz_Y>qLO63g%&OTqM^wBY*>Yn|JTzTzp(m>u;+T3dnB1Nuk<6wo=t4`BpCY( zfjy|6k)!N3Qla}0kw!OO2JS=bUH)Ddit3m%map`nv~YtjUi0i4=oU-R2?&urD7Xmj z937MM&R>En&IIQYr^G;Dk5;n;bHM2n7iM$Iw@0TXl!OF>g>#7*pCDFaS)*IAd$j|J z{MZCP81Xc(N;koecO_J0c$4GDW;R0-((si8G=cuKS+ttNrizrD%=-6z8^CaZv2sO3 zxvJ<$3It*Oh0PdJ?nZHj1suXa`cQa51~x+^WCY)IGeo{OXXi9mL? @Er!>@6_i{mJy1rIn^R zZeVjG!F^l6M8$>B5a*0d-9S@(J0RX0XzBw=(`0*hmM~wA3J}RGQ$`1 zGYG$hY~1lgZ_M|1?0LCY1^=a&g&$gE%#ARHp$BDiBaHbKDVwt{6!#+bPZF;Edn_^K z8Ts6;>a_!8SOiH6oS|ZjjbD;q18_KjXh~g`Cg@j(Y}0e-45@|VeM4k9YIjL`LuN>7 zgIwtTkt(?HHV)7M5_8HryG)xX;mV-43tytc!Eik+2hxsxaBe(R!nJ1&ps8LvNr#ci z!8ylpAT%u57r{w*Wrb_evoC^^A5NBBQ@Y2b^~G8G@dT&;n56Y6oAEM0=g;;Mcx-@? z6a{lIGD1V3%Fl)#UkqvV9s&|ChP00-+~T^~NP9_^p2d8&taPNYp=hVztosJc&}e#J zLc{#F#p}|S(D2!0LAi9THT=>n{X_!RQPP;U2-NUI3-qP5#Fv9h_flH=L_+P+`<=q; zV8=#Qj|h!S(~wM49Rs)Zj{(A~gXce4#ddGs=2#fSOLd%oW-fL>B18m3+5w55k|F)B zTPj<#^a%mOZzyUVt!Af>PWPBk5c7^G$<{{EX%a=SZNLg45M|X3dsN}$oDl$ZZuhL0 zsoUN+s3RmH(qjT4(irp*kJI9BvOX)eRSoBfsu}+!!L4L zsyV=QUkyk+tSTs{YY!axtKO5gFcz$_tfgAO$BZy>_8L~IGh6~T_IjLzNav}Rx9tJM zfD-e}feS#F=!%OH7D%mz7@oKg^UsL(qk}6uZecIp1jSVdcc?Ay561c0#(Xg&!OPyp ze0?sNHdED=6{OjorC-o|ecf4H&ugs6`Ljag%dsr+ykm8&y#M@J3B`~$P%4xu^y`Dq zymQH4ET1?F4XkB8tF|8`Nv^ZozGJJ#K9GkP{3OUk1x1g{_6&?!-DkI+P$;;*u!1=m zfj7CgzMZi%x-jH=s1wes}}dT@+G;hQhyn!IjGc=c8>$mEgB~gIEEP9J?#p+D{HEEqe;*@ zZg)3QwiC+eJj4!mLfKc6Wh+XbaPxapmOh}FXO$VxC+sE$1G#ny8p3%KgLyhxw8Zau?aI=x z$w$?mm)qOEsNt}L0~pvvgZw6dfn7BCwPeZi(!D19omu+(@-YT&-h7k$yaZRrMo#wi zLp%em8;`k|KV<+(>)UU~$C>MdHo9Nvjdmxr{eH4|S;%dCS(bi7SXjA@VYrRrX>>Ca zWF$Hb;>XbNJxSc!H4|r-_UHz@L3h|6!N-?l(`?PNXWqTp=81v>xJ@JFZKM{ERnS)6 zMM?qb1a0M;$tEDfkko;;GKTa(TlvUYm@=dQRBM%!A#(%2L&AiA4H$*m^X$(5rXzZL&Km<^nz231ARwOOLf67 zKO-(0z{(A$g2YRAnV+<{7BY*_j$OwQ?Pf&KT=A2)n-P68S-HCOKDX>{X6H;G{u)BhkaK70{i@Iq$)NenK zIKmNzaEwPc_(ZaAhHMB6ZpO`!{VjqHrH35Zd$ROT6JoBXO|X9W_%Hq7+^7EX3%~LG zC%^QUpL~n0gk4G|->rY@)ds zu489MDz_7TiFxg)_8zUW)8MU!>Dd^}s>?t?<@qlS9a07|010DVCmhx$=|44TqTEb* zs9z>r$BQIcy?w`q>~1NZ>o%skARht)An$_wKT8%YF1^|Dd2g2fIV->AWG(fSsvTc8 z$`xKU?zsok0{YVivSB>L)@``QoF(jqjUxoS41q-^?}bX}2+{1lQ2FP{+y$kNIwZX> zOP>{$?{QA2XL8VwLdxFgcQj=4Q;60B1J86)Jzau8-2ttdA(%RqNeQ2MoX&{)b6fcA znfK)T<~WZvcvj#QV4%BFSg#%xdc*1?v!4D=_pJl0>g|1MP7B*XXXw^~a&?hJjewWV zKnt@;Dd^c98H+sJ2M^&=;Nd=a_^f#NgyZ4M+0qjaKZZAAbD>00$*Pj9a5p;5FF_o; zd_1pfh%8r4+Sr}t-F6y#j6bU+UGXtBi6bm-z{8D53uYCsuDQXEW{HO(!{MXgDhzei5hXt0b1h+!GIp1wZD@A{qIhn+>)jLpy2VZd39)%=L=P- z*F7OsBTCx7eciqrn>-#HR;|~N5)XEkh|G({9-(uT=&*42A#PVX%yCer-SrY+x{D>( zoR<)G?Nk+!!F=2E7KR1kAqnmlhV>80jOMWIc`G+1CFCTr?P*$mWVp)R>W=RMtvY9s z>-s#LDhSZnU2 zy47mV9zT}pa64OhbUpl*Fd2X(7w%gbGoxDIk;AQw`Ja>W($ecqxNqYgB>{)KOt_Ie zRSg?aey>SpKH9j=vv#A`t@edlG}NhNc?3Daa|;?UoyVlhg7`X38}-?3kOu_;A>0Oe z|0;yw*4`_!^xqZ#p}+SqzbCWjmDbyEUzw%%7eU}0 zW6qmv^cEVMd?iits{ou|Nt5>%0nYDri{Vw=A5K-tDxaS(76 zG$Lg~a&Z?iW8a*QWOs+;0w?K_yw}hwKMbkkuc6gPidSc)Z#Xmi+AMu{5xM-bgDNh? znY@!Ko6YAR(|IRFcimp0-)S7;y276R^G^^lx1_YEBvi zdeRUYF=|(5!K3-L_nvs0FEt+Yg)=N`H?_x^G40S7LICw^GW;s8>7cokz|Fd@@zIgf zxXK|e=pK_s&kW(!2(u>esUo{y3%g-xShcT(-R~~4YF*^y_AGrLl57EGYaL=5#4@&Y zJ1z0$U1V*0}{V;@}Q4ILM#l zb@@RC_gHb>qEf|_Ta_!^QBzp?FtpHmqeiRJ4qpv`r%F2?ECP6Ziqj#sFp8iBQk=-l zAg}bZ>aC~qM>p3X10+FF~G7lkCS2b*mE5&cp;u>B((=pAtpGVJ&^kGBF^V8 zI+^WdcSYiVzho9YFm^&i{{Nbn)C7a15L~BZ4y%;93OET&sopzzDPOY=> zS*(NQb!3jKqDkmBiY=mf2YVp4r%2C4o^92l9JQOTlY~m$ugSCPgHLcG%xoWg`lM#| z0mrAqS^DWBWrP0N^NBhfco%ho6_&0!3AHMCeBG_VGI%u;6m&@V@y}&fg2V>8c9W~v zLIGS$2*-Lc{JQ>~FXxVY`Q#!HhgL*Sx#RHRDkT)mnZqm5v`Sw&Rc4722AP#;ZdazV z%i|q%bO>it;{rGgn_xu96*>%?K3!bcS~}|%z!CO*v;ZEn1>i>4sM^A)=l!}T1Af)jkpqYMhNgA^ZSP@j_&kC@r%&(bH0$$F~cc7*Bm8Nh5= zAyBF`(awZhk(>xbfC?^oC!YSw4T=Xp&LDIKzg?q`eC~%NdR^%)VLv22SzNWwySsN} z>2C|3$!m!1GpA8F8szT|TH^-+e%wK8zn#O6J2|E#%wQWg4n%ewRv~bTSA^vmSal~Y z@#SEl@1&)tWTF4X&Hq4_epQzBuib*@xSMT|Y&-u1@hY2sH(VK02NriaZw%u@UfN0I zP^*k7Z1AUi?r+qac972G+r^=gcH8r1hDNbv6r9*(+ReA9=*w~NsRQh0ThD)i8M`D& zT_Qu7@f@uV(=i0|{!r6lCD_yCc$WULnD-C$qJIhI1<^M- zlRAHX<}XZ3RAUNl(8hlO79EF0Fd?w$I4t^OvFM#nzV6D>Z_Dd_0~Hu-6`0WO9cF_@ z?)^URE0inzRn<;rA0=h%W&cp{C(T}KgPQ+u!0A40rD>@(70UJp9cY`oUWV$AdckT5 zkF>z^?OUyfle7HXhsM+jry(5m&i3Y2OiomLOu;MmNthsi4>%TX(@QSQ4E3 zyI|P2<=p#hf?;-Pw8nilL74dl8yF_OQX^{}T#w!FRcw`_0C;yCT9WDY7SwMj#GQl@ zotjf_rQErwd=-oJ+uMpa8as`7ZdV=C8dhF(TD)H)hqB&Odleql+c0BibU_&nGxk4~ zGV%$lBW&>qJ1SvS=cNVta1LBHL^PURKFsfwF-iudj$=zeDR^%+UcCc;UrVRn23I_?J}@?Xr$oU`IC0!@Clv_@t~fr8 z+r;y8D}bv;IAa$k`6NWpPf%MYA>yy4ww`pL=I$*0QIYNRPk7DVGc>BGyH&1&Dwwg$ z70zL^r|<3%4ybI~)#+tm!nJ1cBM5Y3%lAN3_1@R_2{}t$L7shIAzITNsHt@A>Soj1 z(Uxai-_6*dK5VSt&DefaWMh567<`H|LRw_6g~1bmho~!WsVh<-Hr~YMIhPNRZC6D( zh$TcUP^;t#_?-!kOz+NVIR!0r7Z%egX!)@glP}+PI!pf%$gA>gkTcPMHO^oR1qsiOd$W?+!M#H01o4D26^*IZlrO=tJ+$lP6{Q$!^>t z$R|rW;HA)hp5772+ik!-GDz9e?#t4DD{?met4ehpuU$S<9Ghqy z`{Rhx{#Y2la?1gd_uOy=Q5dm%&|yqYCsIDlCnfr$yvz_OM7@1I+%$#9ZZ%+C%r%G3 z^kCEP+p59Umf?s^?xh2EG#|k)a&cmZc5QXpCx8M z&WZWa__8V@0lmvj&AI}C^u-K-x{d*7{0UHp1fosnsEn(jrlU@`a5-m~6t}20*X$J3 zlT6-P!tG~iMq`B68J(SUT zFm|tpvNxw!%_#k9V$}oA@cN-N+0gQcp>i+wzMGETA$I~kihKjVyFC8lp53t9EvzgB zS>F;3lqdMDerkT68qqv8=9vI9l>$r6y}m(~Uod@@&iz;d)Bd7T8Ot)dk4 zSfBtKTeUIaY;vFM7{cQqw;7)csuH66N9oPNQI;*tIH~}#=#Dm*Cc0cdmK~1if|KWtL z+sW|`m4oc*v)E;$Omq8f-CC}}chwLCMDL)}T%p0?keI+E^IJ1-LI&I_Bo2FvU;hiZ zaS}A6Hu8IEoVRG<05*M@Af2}5tPFDHg2%L8W9Vuw2(zdvhZd9oT5k1is%D{^)>a6S zuO(|gR$tGD?-<7iPw9A%4j3q88(Z*|x-_Yg=l6@}@N}Bz`TgSgJ5nmb-(^zzMqdA- z^z5PMkM*1uScSH+lvvSlLY}(2*1{q;4t|7mC-_47?yz#+D3a)No+n9_iFrmVa&?JT z&YO7uo)pOUeuI4V8e<3bR)%C{Cr}$Zg2t&!ey^5y5*`x6C)79*(_FN42p0uE-PVF! z1g8Yvq@h6SlK9L}-jjmy-p9Rw4CT$df0UuNJm5}h)Rx%9D5>;iJ`(<{aLuw`*M9D! zEACe^AyQklPFR9cL`26EU4yJJt7T_g6te{gQeZyvq z9Ybs>8*BkK#EE;eCXR{W0(H~oZN;@Y0 zTn4c7Vgop#0G2EIvd~`bNq0+_n$XUffmA0>UyNc-y=~Jn^fEFuDEoGTBZ5{jNh+j= z<0bOW26YI_6)G^?kUyxz3Y=wf$;ZVquqwQ}U7FOy#J8!40{lbjqXE~F%iQ#g75SWk6&wH6x`YF;Y^Ok>zAXcZbiYU=;uzJQ3z`c z&le?aeNawOn~rc(7#VeN*y|-to%=3(L`=|tm66e0ktQ|9qQt&fMQrn5ZB2K(Y`Tes zBdDo|#2rQ-u*yT?&WD6m{>4fATX_FbS%JU0SlS!obzZ0{no=iqp;(50!;aoz)3~0b$^gchPHD3 zI-G7swXM8n&6@TlOZeZCC2eK?X}4!Rz-v9*M{Cw7UN~Mq8#iuL2buv_A+e?#YvZC{ z>D))%cd_l8qv*axlLPaEIaIBBo-v2*wi+Jh_2cpq7ApQxZw$?_64c}}fiTw(t2w@( zKhxafLiTQPy_MHz)1=icu5O?n)#?vZ`;csbi;5^Z<@i=L$!}Wx;(DtnJe!hePgaQ* z?Av(%iIk&R_dwx)N?AM2K%EyB$Nx4F3o)&pSZcBBnX_82Gg{xy>rbcbk`9TRxLudX zfMGBC=Xg5VG4t)B4+5GzeQy_ipH9j8dW&P`J9z&Y`3`mn^ zrfD?#`E>FU8s*_~Yv^kCK=)6k)8@JdF3Nl|<%X+WbVr@`FQsG@+z3R`B{(X2=>m9m z`>)b9W#2~pg~NQvF_J1Z^ef@wN0}pasCwYB_vx2eyO+B+^!}xEa+7ivx0Z{dx;xROEsfNQMS^%DTIe%-F8Z#L2~eT>{t8Fw zj9N-(Kq)j#G2&F8`^)JSnwH|^8BqG=lp8O(<2ad2aqg!iOqjiHyJgjRf`A0OB354c zDnW*LKf0-OyW*J|v8YjOy91mMgAD^9VEM81j?dDxIXYJKolX?B;C4V=PrE{vZuoX= zp}Yk0O8B?4>wx>UHePXboGx_{Dtrj5fsd2RjuDsi>U1EXpzjgDtz~uOFrFY zz&aDi6Cbi#c4e>&J~(z|ud4R#XJA%=j92##9A8Zu(8y|?7|`60P)O6? z;^}B>s->-o#YnGi1*q8gSJUQ-ajgm3zM8IFQ)+=W3XOjaCDC#5eJ!2dY1xo<(oZq)l@VU5@0B4y`YJdPZpmHH!v& zS5r}tlN0uFb|oxZHDdQMj#gXz&5Wbf)%Nz#*x}XTo%??4_W@JZO7@ac(%(C$P%c!8 zTUS5)XzQYfKRah0rdwg+25-lHaPHC8N$=$UU*Z4z`DsS23R(3z2~AwFbs0@G)5MG} zJ_|b)7yrhjWVje#+K^{C*$5zuDbNV;ai`7lk0G8RvHftj$$iVu{u{ zSbouqdVA2!j^-)-BOSKxw;X)<@dc~s>)N2>B&Y7q0T(l@?TVcN z!cFgbe8CiI_lyE|k3IJIf>f6df9UZAh5z>Wg3Dd2-}yfuuCgC)y%~nw2$7qB{`Nyq%dk$h*D=DE z2t#c_8LOLt=puNz-*tn}Lig6VIf>aj3#FO9>QNn0n$QV|+6_@RK-Bfy#)c{AF#0pm zG{bjZLlQgaSJ~XLH+j1jze&f{>lUw~%vsJMC)BHq@qAP2X6ucR)&^;noBP96rHZ<` z{EE00XnnaU_53Ly(1rb1hF;ayme(L4*VESe=Ma!Xno1{t+DyHaq$IhdyzIMIaEoL* zY0~OKX-cWUm5-AqW%`SYxeAk-bq{2b+YO78uC*H$x#{|{N%Pzdi<7v?>#`|!`(!$a zyUs384wt5vuHf(g^{wJJAK3)f7Bi_v(9krN{8< zie$Zi^?kk4V|Z0c*7#TdTd(wpu2Ns=)a0sEaFK`K|3gYO+w+xnV>1JomdtpaE^PA` zVNYYF_$LN5jndQpiAJY!XFDk-`sS1@ou16NuauRh+T-7UB(*V4FYYZ(D^0P_{`n(Z z!8fD0Jbv(F`=HVtKlq7#aNu70(EZUU*_lamphhc4$m$brUlQcbsSBsN% zCQgOPCTV&MuUfsN6(*ac=~2_i^`Ke0Bw6Md#uHZwF7d+TE)rcbWxlb`T_n0>ksbBa z?_k){WSN^jJ%;I9S{yKb=`)+YWu9O9z@~4R=a)XP>09QfuOMMso~#Je6rg5#q(&TE zJ|$d$Dq&i_(8|X_jfAO^tO(TP!c-~V>8bI;v?5aDg=s~k#tYMmNDacYGHI8f>mpTM zv$zr;(?3DY$|d5Wy@Q&S>o=G%@lB{{OWNjG^CbF$HkYyd}z7-X{aD2!Tep6=zg<80QvIezO zXk`wqG}WS(n%@>8;&H%*wRI74>7k8o+SvPC$R)Jk6ZK@am%7) zaX<>fEsKbMJ8LXqS~N9SYF&W7XxTO1HVe0ulf?lkmjf2vZG;5^h-%sS$2jGIg=1O}J&r(q*0+;g%W{l?&6-;-II-3sa7wJZ{NRlozHP zMIlTzC`!0xc^z(9UWZ$j*Ws2L6qVzaO7V72jmIrHit@6QqbM&+ps0vj{;9dtywAlO z6-ZIs@rA*cmWv}cEe(jM^?X{I>^sXkBEY$9>sk!JVl?)G@d1wi$wm0)hB|z+oJMBT z$jn-NGu}@fyy-1X30Bk`@2Tr1vY%^cxJ<+IqXiPG3ZV(lv^wHq%h@YRmzm={C!ILZ z&`$dcXn(GA*q^I$*I@SgT6A`q#gz`x(0&@~dMqlU(v`kZhut2%bjYc>9=Q3RZjG>LBHR`T zC$OT3#|sHNz2}l)rwIt#!!*7K#%`Vful}4PmRmra_3txHne@X3>|$?@Cd zDbvgb3cpQBjyB*oeIg5~w^w+R>w{8y46jPw0f>^XwFENQ9*cUc{{%j&>gRtN58CQTQEyP3t8d-UY(aZ@9>Yia~{O|{@| zRx&40BXBpX4&2SE19!9Pz}@U*PM{{Y$Fqw!d1|~po)f9@_IOUD#@pjLks9oAOEN!D zBYWK9(E-2@d1#qB)6jtnO0~?g{Ai0pW$Cd09NIUe{{Cb5jQ6a?EUQ<~i0tg{u#T@AojpV&8fdu=`N)njBQJT(Ho>&kYA zi4WMVPc{T<@`bj(c+6%`F+KC?HbiRtLfa6j@e6H3q=tpIG1(NT(L&o;2Yffy0pE>v k!1wxOQ=leaXxA5C>#6YzZBwMiFSJdO8o$ss<-qs<0cg{e!H22t>guZM z>ZR49h+BVOF-aXUt}=c3o$WV;QMAX(ySfX*pK;hlcZI-pgHI zZ;x+c#|OJ6S6<9A z&al!fo|*n9!dk?{ zlp))EnenpXOQPd0jCM44_KwbIxum%x+Bvh?={~A$Bsw+W5D34_(sLB05D34_a!WJ_ z!Y{K()H);yr`qQ0&Yws96o1{PkO~T~`>dsc!t2cLp(vz+!s{#?wK4=}X|}ls%21tY z5E7u~B@L?D!_o_NFDUI{MOw%-P})O6cBNUVq*%)~zvw#gxNYXhFMJvnxcP#mXDb>O zXnetPGz|+hzF_5*R=LH^fF1mbrF`QIJFzVO6-zAxN#V5)@bV(vn($f;2*UckJYr2HPJT}vA3X-@tIz^}aam0<>(~toT-N@7B z1_TOd+{jDol!go%!W;Tnt%3mLy5)D{=Zw6?9}lZGZ{f;O1i6Xvw{Ybs0Altt9gkuL4csNn)j@>Dj{>K9bC&(e$QEPx9Aw*^Bdt8W6qnW=Ec|Y)Ue<@m2|L9 z>i`g@11P5h2+r4O9RS31>$DC4V!Cx&2dTu%?VR@(i~|BCfSMBRtlN2dRP@HtEwIa~ zH8x1y&U@7Fwvmd1h+pl}aI;2}#&qvhYsdAX1V5O@KodEub6*{4LaZRU2V zMh2zLJX%JSCV~{eKXP8BdJT{Qs7cZ8`y)>;(!C({N8Vko4~B}d6_wUl!4!sWabBEw z!Z}-f0<`_Ma20;i#P-|5b2I_kep@&QP>e?}!s!-XHAsU9+go_wqZD~i+Um@nbi&lF zKKTGO`s4%PZ>vu}0PXQ$E{8l2rB@0Gi*}*q>%3r&3CO3p~-r%Wu);F|P z7A^bR&Z*~o8-H6*i#AOAw$?B}O#3#srP~IGY2W5>+dIi^TcrPga$Z;b0Qb_5SZgaK zAJL}PSVspo+=cCJvqeLL$j71=wzM`!W9`k+OWR{}=QdA|w#G^Z`v*aocADA(&}MQfTbp4W0o ztQ9#I^${a&t<$6B_4RFYnx|b9yCgP^7$}dn&FySKRu+xTnI4Uc(fy+}m(FZ%2M@8R z;x*QKX>2|wX^+k6XpMEs`82PR$==b%d7aVuZS$hDWAmez#>BisudFhcfT`WTHVzBK6wM=_xt22pzY^5^@;#(sr|ffe+`2Cem-D~ zm4(1=af7nQ{e0kY*2mJcu;sq(dUV=McHX!C47BII^=F`v_bt!S%Aq~?Ew7akR73!P zy+^7E5d;Aa9$}qA0+`4+4srgA6OWyodx(eftTDugDH4nyowsAhm;VU=DFV7^g{FVx z8JX4z3Jp^qc8+TtSALkAnI0Mg+F_oP>!AVu*}1=QT>NKl=6Go2<$mUQ`NHQ}v@8n^ zK3p*xfan1Aq6Ofn78)v51}-LEXcS4u0uVAUG^+KYkt0}Wc#B4kV4;CU`_PhF1+ArB z3!A31kxTs|ADYIUOdazO;xvHzIc?yrO@=4IyVwIts zA#gxqmEl=d2qab+o@Ir=2Wd&~P;o4k)XAteSzqJvO4Zd zl!l2``x2#LqSd}cIK8YfxaPvbL;&?fbc$KyPeiAfHAaqJUOL6BF{)((jy1$QYm8pf zQn~@gL~D$rhFJ%tM^CrS^^Vo@?{n7s6QpB;_5K9um|(q;Q>=87jtSNqrSjBd@pMeE z-iY?oy`aC|sOxLR#nNQh=0@l2i>^L*qfb5qOa&7Zb zXU&`^M?dOQ$OVN*eG0jt@Tj4ZeT(OU!lMQbyU$Aswr%b-cti1H&h2xCJA2x090^cc zoDEVteNr|^?KEudL2QuPY1GwQPfJpHwzD$ODC4zCQ9mVHfG+ zN}-Q@+uZG}zj(o@-9CYQ_@UiKx=t(dL14EL8KA5#9|U$AgN9m5nPr%?L0&SDVqWe{ zSTo95-+o;DB_mVjZB~F16o9f?0uYY%C8MCoN~e+7;jb7xOYZkHq&u$|>6vnpG#Z!) zVxRys5IEB9;NOgt-oSv$HHCj6kk=*2Wxh{nVum@h&4+DA%_4XYRodH^{Q|`aK6kRLyAfS zF-CwB1gG%DH<`}P)hCTk=cWKWeo=}+pR!T(g3e8*vQdEG_a+lIdJfGE(N>tw*0obk zO%H(O6{fOJKo|y4+9yCvzruull9^@TRBonxsmzQ#=Snj*-`Yr?D1)-|HC_7_RI&IP ztwc0}sWqlLN(020HD<0XvH%2AYfK!*$jSg>mDZR&WC8{dtF*@K*&7)b=u-l8o5_D~ z8WP(iTDO^@3@d^R2i?TLTGPxfc5Yr+8pkhLWf47)Vy&5(C2}`vz|h-Gl?Dk|Hl&J# zM>Jr>?X)PA?NI~Z4W?P(ArJ{P$P`H+2n?i1Z;?8J8HBK@tKZdA*~m@4K4@2L(nbXw zLNar-K4@2LB7NLOJ2!(SeW>gCYu57kLp}*QPCsO-><2g?@sMdt{{j#s9x_oKk}4&` zf5` zmc)W_DGZ&6(JVJ8kf6(Vd|gud{*IZGuOz1p`i@zs^0T(}9TSDZPsqb$ z(6RkPllLorlSRi-F&am<3tO6T5}V#Ux4qf9{pJ_Xr|kwDJv*iLANsS?cKgs&X$t6J z_76?ve*j|k4^8EN0AltJP57T<#N?o~*Lm)ik<%!X-s?|6N9w))6qHHt^#!C%daqe1 z9a09TOnR?bEIl?rOtIIj>SdAN$RL~fRQe74#w?%uvjjz-@*C*IET3w>0T8o%s{ID# zPM-_E(JzDi#^>5^0KzbU(r*C7f__eZqXh0I1MYFZbLsEGstgIW}T5apm2h0?Tdg(%LG zPGH=(T9Qm52|y_cKrr^L_JCP-@CP&H#()g8l$!luqCRl}7|SBY4x3Ka%Gk)mnk6)1 zp2K<#0b85%%P zxHg~+4Ir%G+JG`NfUtsV128m-I9XVR#m?uqmenr~s1TJUmLU;@v;rhuXVJF2&Us?> z$)oTSs=m(WiY)57fU+9&f~)HSD%t=9SJwqpw4sFd2IqmbXP%lxMsq_z1sgyZ1yHIQ zAf~$^pn^@d9lR-!a+^{$rLZ>zQj4tbNKx6OYsZ=8{&0??Swb7;akQcUVu+&^1rRJb zT2TO@C`T&_AQa`0qDby+l4cd;o;ZA!Po65GtF**GAsj6+K#*UhC8qp)jgUB-BwnK> z282-nrNjUs@ft00jvZVZNV#80OgZ=30J69@sdAD-5^w7Ia?RW9oK2c1G-9GnT4aD2 zvq_5#5Ik)PsIwA4h`cGF&Po6w@}>aJN+fa)iM*MX+G$%G9KP9~g2dje#ReMTX|Vxf zip^SVIyKw}v8Rr^PfMI5BnBuY1_+7o(-P;}!3P2VCOL{F+fQCn3fnIBz}w}K0rp4OBuzJ0lv$*Vf~lK zJsAiUieQgsn)#`KSt5eHfH4ai3jGF1@l*g;0F{V-gaL%7ogEu`mOmXZ^E@<)eoqIw z6*9vW@4X9Bslh|0O375sC6KO2x`BEZ;rXpn&iAQt4$0d=JS z5L5p-P$H8ifY8971FC8T5E^(k5R~UxfY89Rf!xY;vFN!p@ourd@e{M`_Gh8}y<0Ck zdNIpxz3c!n%Wl2wbkuv1tbJfES^JB6?EzsFK)Ln+G2M%__BR-C^0pn^6G-{2>mPS4 zV|jZ5xH^1+GAEnv*4_#5e{@~>hiqnCJnu5+raPZQ!b5Y0Eu@4`KA64i1cm%#du8<2=4{V3W*TMB(x4_@rjaW zevibX+dLvlYw=zH2W_tbMBfXb{!D3&O?LHufXnhfAkYIS*A*Z%{(c}mPh=Iw`HeB$Ox87o~`8*^xft0VEZuh*I^A$PwYjUxed1M&tx_-H5QEJn=pyCmYn0H-J z83aJ^xGtD4Gkbs_ye^1qC^{YG(WbdQ$iE88(-B}`-yXykxi*A70+{A_IB51R{@VHY z!95{dOdQ|VZguVM+Q{Q*msTzCAkV`=bptILK7U}!QSzE%W~>AE4zZ@ru6+-WWHZt0GeMTTBasQR z8XVF#Qq?*2)Pr&0Q_ap z?ByZQE;`7)SaC5FTW4MC)))*}iU2ZCmQk0_l{(BKoLEps4|C~cYp z!TfrE2xuM%_U%Wt7H;O_IY*G*6BYl1CEUPv&UCizd^_j6U}|^kGT2)_S<|7Qv*F44 z(++8iMI$WsP%ynfxG;d2>rk+pykZ21nGOYU#Yh&LPZs-QklzrJ77G~IKL)LQ>t+U( z0j4>A4my`Tb<2#OwM?QJGX1P=Lo{II&)POb17`YJ+lFX>ZTzflLpHz^ekR+Ppe6&2 ztDUx|Cyu!~r1VF4WQSLW)Iy5}!VBd`g#JVW=DIqBcMF#)LLj`@F`t<~c5xyhvY5po zwX0|xS)de!7jby3>$33;y%c?olZSNdRZ4#x!GA|rtdob54%xKuRlKy%6?s26}dsMtnxpimU2116E5$qS0# zW#gvfmc8Y|7Uzdg-!GZTbBGT0Lz0=O+x53hKp15Zco$Yndbgu`h9c`TWL`OL(@z2ms>}JI@*+NWk3YJDa{Zx^zB_PR54oo7hqP7-)>X>tK0mG8-uCSV5t6xj?}s*9K?9_g98DFrKDR z@_@2|Wn@_=3KU#pZFGkHa8h`qKT$qVHqu16eFQTFwt1Ix#}6lm@A4Tg0Lop&a36uv zjj96ahx&y#tCx}()(t3|S!T91OQ00eq5qsiv%~lK`YQy=eWbsO1xgWK4>@}eb%eJt zF3vZgQUsJOtXmP@yAm(maVP0q@?&LqtItb!pll^xP8TS}w)v3rryr+;AMzKW7$^_X zB8(O&Jy4W(djB*gvfZDk2T-=tL<{8-l%NREb@NZ0MIQ5IDgn}CB-2)D6s5NLg!91R zuJ9ASP^G|mf`qCNbA?d?aIXHjD*TksP8cXp5j)cbN(2{+&aR&?jO=1?H_~h(K-tCe z3aq;%J5k&GOV_wxYFPL${z658^cPyFL4r;hPTRNM%!Y(_Gk6S1rwk~&Sz5Z)PoR|J zXnpHmHX!^WgX5AYaAp0@d>dzQh5h2Lac z6aYY|Cve_m`Q5BD1x__i@$Lf#J3I1@Z|T**d50{0qnxb9Hvi$)nC#B*Km0YV0n$Hc zO-~YpYEe#i_nGXJ$cMh7Yk~72DO&CGUO2P5Qv&SP$Xn zL_YD^t^?91#P)iL)Eh60+}%O;`|v)0y52zAN7EfISgJ?*?_L*TCr0-BR#p$3{bXg& zNJ0%LY`b5E*qx&Y7Id8ilMImnndbjo8P#vrLs37-}u_@1EgL}cZ0x6S?j$Y5XI5Y$4C?H+WE32(7lGGsET+ZFqne2h^a_)JM zLBLwhyLDHtYA{^3dtnyq7g@o%ID&xEVBoCaMLn!9C8Z(o+3v$x?5oO^9C1-w8IP1RS{gSS~vh+2Bh#0!SN3%6p{0 z7zr1?{dJqwvG7LD#n}ohj|A36UeVLKLQHowO1o}pKD#Qs3CW2P^Jw5~;wZlj6*$M> zA&L8^d^S9MFGr#xRdEb(?&YOXtCzqz);8~Va|>AS$o<^&V8;UIetJB9jbwWiytF&D zfGvt_^%Xw~NLxwq?@G2u<7RRDF9mEGi){04W;C$2k0}_i#_Rf6t0dc}kdIyzW~(E6d|NsN zNPBp?jug#K)QltWOB5uh8ZsTraxsSFrx!&wY(g0?y~8ahWenM)}0;Tgj$`zw}LDGH|{m z6OgX#bbJ-#{-KifjePAZ{B+=aO$vWl3U`KW{@XoR$^I1iH}}%NGl2AOo?D>Z^c36$ zy3bd!H^M(~S>lDnQ-JgXk5@Uihz&T8dA|*Qur zYQuA`X94MIqp*iHKoC0HHWwN0ebsDWbdlj%)7d~;WWbtkm3*H=4{Q*-k;3gU%6PWYJH?ONj=viTSiTHWISz%<^*2{9?&&L-U+rO!0 zr?I+QeaX)U)~zJD+}G3Sr6L|zm0;@e&6l+_xfj&39u?CtJ=(QrorXcH4XpCxQWF>8 zF3r8Kjy(}x3%-@Fy8uXQjUExJM(8w#r-SbGy;-ja9wX`y8UxOH17@cpb|c)Z`?ubR z*mzQ?9cv?yHX5)Ob$V+e@B45)yD_rK@FGzYkTx0ERx2b+)5-nD8kiHo+ds|sbRgYJ zd@q$MX-1B`J+BW-W05UBsb*kpAyP{u)`hnD;P&BtSt^S>XvoiFU~Crx>p>%2Zrvj% zoB>zsHuhupM)1&2PdEce+i1dvB+^WHTDNz9_Hg(Sf4-SOdW7cd5Q=I+Rdv+>HZQ!x z;NrOfCTju84x^#J+Ey3Y=98{7fb|VO>96EPz!k-lZ>JKHv2aF0I<-uDF`XEtzNARKwDpM#GDUH2$9 zF!Hj$U*-VkW!f)mr5@XC^VRL2!SArDSA9xtz%8GU2-w7{)cvn3T*A?Is!G?(D^!hLW!yEpu);d!-7fb^*WuhuLGU5XDE-77}08Ido1 z1zZZ8FGvB8Nvx@dl`_vG7z*{1)=$r>Ao|P&53;DOEw=k-;iXg z8gRL7e&@Cv%^r<>XUK~p5V{;l-x)o7S@dBm;44sR-ah&mHh@(e@-=w{unv(XrBk{R zeiuewT;m=%mKC~Nj%6iLH0TP)mB2o1;62eBlIB%-nr?32cPyL2q6~ZXhGX3HZq~btYN-s~=b5Ubi z2&cBp-2TCFEXvB4nfO!-mkGdIh|>}}`V1iJqGHU|KwNHauNcd6S=n-&&=um*qAaj^icTM z623#?+|9&*MHsNkoq8hMT(%116xt#}TV-NdRv7|qG0@g*uO7!%vobsq%u{%%6rtTj z2*letl4l9<)^30QBsP^r*P8hFQ_8)B@Yb4mf89@@Ed|*aY+>1hCQ<^4wVbdXG!YrEm&9%Y+QaTS6WI;D z9`+hfuxiF*8&_?}tNKxW(CFBGC~5 zi0&}6yGsBU3p>n`GL)Ej2sN>t?t)3|_~BFu^G4I#m!0@XTnwORJ3Gx#z63COr&(BH zbz*c53MD8AxW7zdZ9}O_<`JM*B7!Y)0Wo|Nn|(+b3JdUU?aEGk8D2Jav#t$*Wm);PpY98GV{pVi#k82zjn zlB4OJ`Li_oLX1ZJ?K!vY3^qMZRX1<$JZSDYS|Q?=UYS2<7Su=pqn|VJ^rRT0QJQ-m z3sjs%Zw*fyH%>k^v;<;)BE5WY15Bw)dUXCgJAX(n!eI^BjTtg`SUY%W5a zZXaXumgz-sWi`g=K5oktR&l|f-13xL@b5|dn}l8)CsX=ru87mU(&(yXZh677ZAH1K zqD|ky2x3VC#Sr@{D-9P$xJ|~lcSQ!>7LC5ZeQOFky(kGewWVW}z!`Rd`@;@a?cQ`I zOLseGv%(s-`L5ES?0oB|Kj4ZeFc}}_b(H@fyd)O0j>T7fownnCbrB19%cDQtpk<9U z2!zq6xEG$qj?GA%(VaVs6|h^}$IoJ=;a?qeGQ=DXF$cR3+>AK1e{_+$X%0{?DpJr9=J!iA@N;QT-n`}~s&;;)6PM<%wdDwZ&?kXLO zDF(Rg2~;%poy~gZ`_i8DZkh$lO-bqF&OL|a#|!AP2lumvkGct_Xn>VuP(vm2esfHM)ZbwKV;iM{QmHDbP=e#QpHF8)b&s5 zB2ae4Z6l4-mjTrG5%m$MhgqCn@l)4c>LL&u0-%i#5i(73jsS3jsbG4-A+D0MdF}$b zDj^8>E7@3J1wo>0R7n+NVE$~p6-PmYP;qmd!*j2~ONw*>eszZ_q|7lU;0`!dw0(C6 zu5@yZYOmpI(NHtlYrqX9FiyV`lMp-DTY0=dvx7@!BM%NwGQi^CCofH0>kul0%)>p4 zWQQI( zbR||861QbwhimVQH!@mLK8E2=g{!ZpKgT3ei8-B+&6jY)GOkd{z9jj!NUQfGj0Ay=U+3J{pC5P!8pTxmjO6?mvaDnlPs zR>`a63KFPSu+nH18e?%ZiY2U4DkChhgjGsq)F76yN-ZHHi>l_enjO3mG^#yzNQc$p zPje6hWKh+3axB>)gR0@RnjJEzxHRw@W0vFxj|c@jwH`alqG~;MlttBg?7*Vxcs-dr zO^L^ZMjg@{1%aKq3euzK13PsC1}b*QpyIuG{S{)rVWg*?-o{kLjxwlvj~!)D^)-_` z456L+$(MTpYg?|Bh35jzGuqnc|4Oim6-QOriuG}aO<_gFpWx%gx=akqszGnkqR&UEs!VfqeTLS$$$? zdQyd^y8jlM3KK(1uN#z+g9|Cqy4W zNx>(BXKHVsk{Z{}BEyijP60}6&P=r%s6qIOOwYY4UyhywM1T>&(2G7` zCp#(lWG4h4oLdnLuT;U%k3KoZiDK)4nq()}=;<}cU|6jgR=y&4@)cl!f+2iGLNKJ@ zla~~H^8DZvhqK5_iasJ39vOY|jcbpHK7KS*&ccs|D*8|~R1a0Ur_}kur#l{`>tIkE z=j}&CpJKG=?jFg}hd|v`k{o>q)Lo^?(T70Y6;2L5VIEJ2J`rA~-91I0i2rB?OBhMl z-WIR!j0hh6`q3xK%e1@qqfgXmQ_fQvRGH^ql|hxkpyHJAc+scKbN8^QgkVU~r@~{0 zqEAISy`|OBrvlGm{pgbr41I&DH0CIFltIZ@7^m4)^znnCvM4_o!lDv_Aw{3+r07$f z6n(0bqEA9F^n*{0F-@>@9z~=hf}t`fKNu>5LNN5A&$k6u0sd&m^m$F4^w-_!|6d70 zbYiN=EEWeVb*^&DX0mR@<ItacJ-daKQLyxngL0G|PYjj;aja`}?`dUIk^``Rth*FPoxTvZw>;{cdJ(F#bi6wz zAym@wuEffk9c!P}+)2;rI-6s&Evti$d*XNbJ*JhRG$cc*D^7hy1<9dQp^QG&ZJ2dr z{1fNCmEznN{f%Mcp}&#fb*Mb~ojQw^|F0*%B{HI>9gVRK7~9Jo^)xGWFKuO+`UEJ? z)5+m=8q_ir=toJloIjg2B!=0W@j{wTfWv+z%+g^Hd)Q3rAIA4h$n>RZvMd#i_k48GjIR@Gv-LWOx$vxC2gh2x+;#gmDKQJ0YH;0d%5_OwGE|3-LJ zvtxLNiij~@v13TL9u@4=)3M%=c~sO>k0IHKdhD3CSRn2!Y-ixPkW->w!nT;ts`9NtOfM6f}J$5J!>2J|r zkke^Me|+HMry&FQK+TT7dj=S@#R5{&A;SJZj~%sp1}b*qgd)s!pt2973br|j57F$< z?irMnh73wdLk1!qcGT_};<3|HZJ!~YderV2l8}bH*2lfEgUw|N z+@CvGP4Bw@_BUudQ8(=9)7gSI@13#PbKOy!SY6ti_4J-ToO1p4<(=%NV8&>Q2=oVL zD^o_hS@*NBJN61TIOkvw5gY;wQm$vLcyEvG%dcQ-Lfdl|vGKt|vMRET1*y5#I;N|0 z*SPbRvGonOi3%wn0vz)ihKM3v3Vl6t4a4i@y>bkCuX9J=!2Wd-ZgTV(EHa)B!wbkU zD0KkL$@TKEzv6qGCP))&nK9?L60qFjBU3d>!V2Bg$5~nN%Z7!jfFzJm6$p$LRe|V5 zvImKbf9j`v?1D0drfW@X6*gb6pZBDU6@dGoIo z9@Jn_cqkKtlj|7ys%Ea$D$5v2<%nvx<|dZ$-)a&~RFgP6!>^Q=FJHBSMI$FuhAcS{ z7aB)o$nNngSVLV)LK;o?8Al++8gzwa^il3zD_HIENh|Ls&@-%HTYJllmN_CB&+1>2 zoPufrCkPyz2y%YMs=6i7QseCz?lHe(4Xoas^E<3Z5=xr4(TDw~&@0_ihehhn{O?ke zKx$Pg+&K;_JT_?tPh)no6`Cv0K;oV!z5j1y#{=Cb9ab|oiHaU+|Nqg7<(q;#;8xbh zI_dRpGI5^u-xB9YV&Y8cG=gC;r4|8}(-+gJDd{562gog^2E=zU_*fbjFll%LiGW1_ zy3*k16oe!^4f!oTpg|wHEL^6eD^8FA>5oX!INbgk7Cz}v>3U{Lji0V(ruwzAtpBK$ zQH$`^NY}Gfa-h<60(Do8mv`eD2UjNq>%QFNbe%xmWqZTKH4cHgD~}I_|EB!^Q@Y_H zPXv zl{?pEb>c73P;CuY6FjU7V1C|GdSpE=>=nRN-V*i-V5%M$_6lGKT2Wpu^8~CI3SiNs zq(7RL>759mkf@i}Qt`fwm+QRN&s)lltjCqBL;>Vw8ddHko+@oA_tILVEtR~dW{2u= zl{}t!8&v5RG7lTmYyCzDsSoOC&Il_M#TM0xK~0! zuI@kTanwS~PPajRN-HiOe?6rYmyeXv>QN2Ji7N7>3Cj~!(zgFJR%D?|7&%?_owLwH=59?3Bc zNy>4DB;~k6`7q6nXDh}~RciApZNofvl&K8!*ioi3OqmLpIXY!Bi;JbC$&XIO=aw)A zJohk;QKrJZi$iMBy~nCW=knr^K;1P;tvQz$hXm@b(cUO=O-&%W;$ptzykWw@5lE)O z>8s{3eC$~A&jdkbW7G-)0@GvEnHmrla174W3KJjfjpbuR?HzsIk!h@PoT3cdIDwB- z)dQT*LHY!bG=+r|Jkn$vC#d5(Fkz}E@^Nx82-L^16OA*(frHw_`DdJ33~CU^v2kiK zs6iZm#_8$-eHLk+%*RXP00eeU#v4K@4K2^f;YtmGos-ocg#{+qIfakc?D)2EimI&; zieP8F$Bwd%@nA<3sR>2!GhW%M_@fmQ_(aVPRSzbp&rZ}xgVR*?fKUWG69qdcQd5gi&qPl>s(LU{*(y#ZXQa%MUX3O{Beg=9l>t+o zqD zp3BdZ8UqB4ovZ8#5STt!*%KgW>|A9}aez?SRDPaR8G+iMrovdMB0_CKW#_3Cqz0j} z^OVM@K`h{TY5@(Hj5&?RG&^M8)70@EePCyrCB77ngF4yww0!x82mpbd3wTVkV5dphHz2Sxoi}TC zeEXiR?3+-8cA7nQlzlgQ>?reY_Sk_%&EPFyCl0GMINA3MW#53n&J1PWfWXcSW#53n k&P?8-+41drrm}BB5$v>h>?reY@z_!3-GXXENIeq!zZi#LC;$Ke delta 36601 zcmdVD33yf2)i<8K&$&ZR=97_2l6#X7h5%s{WD*IWGKdl;nG83{1)`bE08*`25l}%y zyvR{Ru-H1#RbuyGtHnc+mHYHvjg~VFgq3h zP0hB-Z!!~q%KgodYAdU|bxo|JX=PKav93A#jdev?5jHS5-nTN^+!?FQ^lE4K+?}^A z$gW77UQ$&(bX8m1s^(brs#FLuj#b0jJKDP1>bqAC>sr?y>m1e?>ul&~YVT_6 zXs=Ivt7L%nhiAAIuzdqkZs~cx>m4**iVkM!#WYZ|9b2 zn%r-jzht~$XnoH$lMQx`m0|g))D*>*27LskmGMHCuon^#RRx07mM!{r_MoHzwXYr+@q(EZA4j_tQxKsYeP3^Z`ox05Q^k z>X8Bj`G4w>0tESg>XG_6DnDVoT1*2FC;`-zXr!O8v=v4?x(+&Ax6AiJn>ZRFrr`8HjF7CQ5wT$a4!^qO_(P zImBZ>?Iu4>cS|BT`+F0&aF;^{IJAYQ$qfh;(AdID1}hF3)Pz?Lvzi3~%xhcX{n@RP zws`~~*5)>@js!n9L0}tKM*=_)*v27NBOq1*+n?Ymw04?v$oXfcY0wsW&67lP1u1=mfss(nA-2DJS z>Si92vL4_ND~I!ef)9W|3ZN!MV&(8Oxo1%eN)9iUG6)cq9NvF`)emzH+Wy;k${n1A z3L?|Q8C@Uij6@T^oRb^BjR#A`$b-a#-I@o0&>cY89YBnHx8?yrbhlgc03f>Ct$7fn zvE0u25HWB-paf7;qOEm1Pm74!II0D9*#M0VQn&M}!D>+8!Cub$t4iVlKuwBxu$QOx z(Y2tomxszU2ugc-ze;O3c#vxQ@8l^DfCuHhc<}Oh;U#zSV2S81m3Z)N%>zK_4xsD~ zAb9X?%>#hw?%SFN0MXsIi3bizdMYvDp2WQKZ;jvMQK0p|$D=?~zlYmWSkM9#?%~ip zBuS~X{P%KRE@lG=V*#kg!a?a?j}rBEFSkoHGAP~4BV|NsE=U3VF6Wi1)&MDhniOrn z@A9-FT??QYk1Zex+$Cds`72EGVo~;Sc_PdXR z0BLj7LZ9BpE5~RM3i>`i{4_-#lnx}8*PgxXfJfd(g&uhy#&^IY?*sV*9(fh%czdsW~VA=lP@swwOFt0k0{W~7awLYSyvS`&`PMopiAMuxUuP8&WFKY$^M6WM% zTOQj0(d)|`$Mz9Awk=}+E1VAwy}~0?+giIi+L}8fU8`e})@Tb3@Rj24wH?v+_E<+` zW^_$7($F03?2N36wZ=N4U9rZ<%C?S3*Lbw37DiQMWfM#-D>8p|v`gWxZE9|gM4Q(} z*L6l3&_}eXRgjep62mVn%djGAV}yjw-MTJu;f0m)-qdESj9i4y>sq>-yPDdYV|CO^ zXQZh!(%94)t#6Js4k41Q=%!iFZUfXI`v>02j)0W0 zaXumB7!gb@)|xy8;{MD;#`1@JqWi>uEgviYy6M6lOVCR9NE_)bEnSv4N8jSv@*D+- z9^c~qokq4i?E#{royNc_#mS(cHF%;1(a}z0%mmdDd=*o zIW)Hz1EejEHAFwR7*$eI3IL;{Ta44jTOZ29J zzybc1BxT#?V~M$KPfvQxqhN!=V;%(?6dp5deS+Aa@R%`psP%-TkZYSyCYH4SY|4`! zge&pHaFAP3D)S6gv>uHG{mrR=iP|qe02huN$G;M0+ zfzmIG@F*pId7$(QW6U_~8fFi&}{*pd4Efrtuyz zJW$u(;TIeQd#*KkX=uAYvS@Won0)w8&nK5=8xv3=EGFs?Cjm>ozlx zZiQimp_&G8yJ=>H5|6}7;`mDjg+vWzal4t3DeOxsK-0aZGSvhu3xdKZ69s6om)3$T zQYrxaEz`_*5r_l|q!}j=1P08wmx&$zbV9hNXX%y8*raD1}Lo{z5D&Ep3>66E|JG?j%09FRC@+VbcF2$mi+5zLY-r9FAjERlvbYQfTj z=D-n(rOfs}Wb&W5ET!Q*WCk)2h@@FFV(G)CX@?S78!OB4mo#og4aWAc3FCHiskG_rwzn2ZMavBaGl2M>5G8I5N87@@r=)|w{qstLF9smz|E8>9YsWJ)J_ zlOu`9kGTkIAQP(SH~JI2BP#2>(_O~BAs0wv1m)AvAd(G zb(Ixqk9Kt8kLm~d&n5hF$o8Xt*F7k*j< z^qLx@T}{BpzhW}Dk!WtDc5=}uPJ!vWpf66aI)Ku}0raYwou{z{vkJ8U7+CRF%|2DS z7AyX$i2&~XbjqhwH02GGj}6_yBHn$6xDELiERBBwPpc&hJ6E@LH#b^9059MTcXxsr zU|vT|PHk;d*XoG8b!m{32H)Z8uWqVu>O$yN1ON~nKpdbY+S&jg8w=Li*6MKs^0$n} ztl2?+-y0q`=$L%NR8fD-58QaegwZ2eNl}+KOcixOEx7T9iLl5Uk{clPZsLd6zkVV4 zS?_v%Q0V1duMhIG-Zhn-oG$#VcTJ`D0HTj~O{Mn$qK|h?=)K8;;n=N55+}A!s-x?i zBVHHeXdUsoAV=$nx2WW39We`~l1b-&baBKCNlO7Bx;SE1R#~JA(rHs4C0#HDf1;D4 zUMB(cepG7%)S{E4S{ndFCr7n5Alv<8XoE!~(tX(WA8VBW2(180l>iWX^A#n2(? zP5vTD|CyO`wNF}6WWj$%I)o;YLC4Jrq15Vb^iMdU zr-5=z6INmWjO!^_zpqwy3O!zx`!r8X}9$(6%YQp5` z-Q$DD@Dc^_vT4FSJ^h_Ouq8cuI4DFPJ$l9f(WXbw7$Anz<5LbgK+L$uryO*Em~oE} z4m!;^n`ZnF&3Fp_#HsL**9pz}Aw6f*5~iLrKy>nuo-;YCkBF7drj>p~PaF_h0hAL5 zh>1UqNAtu!~vqsQ+nb6 zF{Y>V!~tUBPw9yR#KfPXiN8;xltY%{(?0&p&UoBFgbP$-Nz6jRU7_{?Hq#0eO^ zprFtzz$~8e;UcOZtQx`q!fz5E-QK_aH$F4hMI)=`H@<>GYpg;8{A^<4o^kPKeP*VM zM%(OJAKVWzYH~o!|D2CM?~@@)z*rR&NWBIStMZ&rU2FkF*U$Nir7;W;9C*&BBBlVr zf#-dG8A|~O4m|J6>6a$fK8HF!EY==>qL;&7FSN@K>$OKMdO57u9w2%-tk<5R3%{qe zADu&M|9idmfY1t{Tzi1%?)S9zTMV4=w(Wn#m-3IEf;%>|+*f?K!TvQ_#5P^=zTxA4 z@9Ftg7BgDf+7dtCHz1Dxmn*{u7@+%x&mx9FTgtoMH+^Qg;HZEx85GEiSW$p6zUj-5 zOCt(E;!R&(iPr>Edeet1Ax#qqZ~4p$i4aF8v<@ioh|0lB>6UTwX!}0zH6oT>*mQZ~M}6g+&4owBGjRNpsE47oz>P4+i^g5?-4Y z=%KmCFSd1e+E~A9DSbl(dFyzo5@hDMm zM|`%lmVgXOM|=n%V_I>H1>grhbrA~)^8%~JhaR(m{E?5#n4QhZa094Gk>U1{Hz+dP zKJw-0m7xInM!s|^=!{>VgT>4x!ML|)%-tJ;4fpyLWfY>{d;Llv0D|zn{ygci0R-WD z{kROJLnD_q&3-@s%rCuOz`)+`#}>IRfIR}3`gp=`4hemnC^_(AMx?Ewp}PatbL6bH zj-F!&?&NXQO4%0JnAH=0l@1{aK=28Ft~>)p0S54dA7>y5yC}d}^Mt>6u$7Hm99aMv zd%|B*Wj)W;AVB1oG>D(_$P{^1w;H!`u}h;3T|LDQPGYN3>kWeB`IpIqAc_8E@*qg2 zf0;Z8I`Dp(JO~!VAbv@M_)fYS1PYGz{PbXNW=Jeel#m*f7tO3@@PXb2($2f0K+<5Rym(j`jU1_d5b+Blyk6Q!;5sXuS1 z*90^_^$#CGK|XHg+5W2oDYpbz=r1hdWkhtXPQ3o3KV)AW2==iqh1BJdOl=Azp8N5- zx=jHkw`- zz%szp$L2ud-p6iSwK<^XB#JSo%>gB#q5v&72b6${0`#;wpafJDKsGi9lz_?th{EOo zWaB*58E9-x-0_pS)3zq}MRK?`pca}Mk{oUg&^gzLo|}BwAj)m(5KS{`VOEhcP|p|96-r-uV3>>kZi0 z$VW^`d9SOL$cRUkkCHzdZs(_YfR{R+VZLPM%$ZI&c_ zwy`Slvv;m9Ud1U4OeB23SY_v069oysZCsRSe)o)$i+F}Y@B`r@#G9U$Bm%b4l6dOf z-xRmFBm%%_0SR1jgJ94$+7l1FH@LW6+#FCJIDd?G7vNy{%)^Iy?C5uc!7^x$1<*`uW?)RhpWmDYxp*md+)I!rWSvl4a(F;;Y z*Cj?A={G>4;u6AG$Abzr4XEpIm2pV+l5QK9C*nu$SGWk*8kf6d(}8K*;Yal#>LwDMA}C;hu1UA5ZfCBGO;h#vus=3H37!e z*~Y|AK6*O5k%3_nCkr?mSx&C?oa_{rW7qcVKl%a-U(3*!M9K!zwG5p>xM^g#|Ju^C z`=9+-=@y33OQakiZDAPx8i9k0ts4@5{`k_;8z6lO#Rkd^EWgnDhCsp1)%HZ!CzqFQ zXFOG*mnUH8!0F>KUMwWH4KqB7?Qc#9AM z%6DiHY6MDO_@p;|HZ6RQ*H2$y+(Z3bDOaG_How=?@EK>}?|D-y2GaLvN)JexD6!4^ z6B|D7sl4BtQwebHr#V%Ku1an52aM;1@*`86Z=KKb6746fQj}=%C_$=p!j{DoxhNlB<0Nm0&|LH6_TE08O-YSln`g&}bIf?!f%GQpQ>^4@1X7ZmIceX8M(ub@tWc`n%G}<=*!5k}t{Wbg#kJM;j{ewtN5u{Fo6$+$GRuehKxNuF--)TTP z#`+BqA%DPQZ1Z#GG-t8{rJs93Fa}tkvw}YA+!~AX+Zmt5MugXME=)YM91EQFyr{4B zo*dNaIKZ9FS?ox^jU0+n?vvAjw2|BS;>!ax8ixZMnX92S&bQ8CVdt@IYzGwREi(@2 z*Yfg8>orMpJdSc_at?dFbPIRY$#@`b;n}&?U@^}LILn=>HX9ngfxFvb0&s5Nd4<-; zlF&q)=FT>o9Sd*g?x8smNZUDPd9OS%CgD6kbWbiD%u12yENoW{c@nU8@QVJ{Wum(? zkVxYk$z$Ivy#>~an&uh6xrHOhF;3u2M&hq?eLkB|iezDNfeAX3fwP;JM64=-b0#8? z&foIckT7z0^~O3AID08me7zj@6dc*k@B+3Wyw79&6d>&*#{WqUyT&%}I`l#Tt7GB2 zxGU>5z`Bb=X0}O%Iu)+rp}9qDI}7jUuCkm8to=MImn=^@q^8;C_Z=2ue=7aHhcpdH z-zOwF^6AK8aXt;PA*IMi)#uN2;2hwHTu7CA7NU*LS$)~%;U1N(3*u)1r-xS#vc4xf zJKHu7Ie+cT?khc{lEZ;?HjobSaD_U3&%qU;^ZjC+zCZMqI_%Y5!V1NpN^B6Cy)JDfl+x)3>TM2tQ{8Mk;W&-J_Jim|DRI_aJXU?Wl))W4j z$}tC}SwQ+3A3V(3DTh6q&gcum?3VCTo-EA<(o?)3WL+zf&b7^7JNSfXbNJWZ%AE_O zU-P13eI(972+(;rf+O)+Pfq3l=~-S^XEJuJv$-|lptd|IDxZq~3ZT`u5upb*) z`X`U$wZQokaaj{5p4){Q@Ap&U5nhF})D+G-pqLcDVE{E(2+p@j@WI#UuSF;|$;; z+^ad;2e7Nc?|O@}2srQ3qR5$DfFKliMhwI`_JOxiE&$R8ys)n|N)TF%&<%I48puXR zkWZ?mX)%zFk~G~c$GrsCT--T4knIeA%w2811Xv&Q;)oj4QaJJ4**u6{89wgq@ufgI z&f&63=V2M{r?@kyidB^&ms1}E%Ybx(kffcmoDSeis-R!i8?H6H960NZOxya6T-^)d zV;}lO6+54mB4<lslQU)rj-Bfack|W(=Q;x`_aiyG74U1FtA?=0 zO1BzZ1S7%W6+qf*^bK1B1)rj}d84y@D60zJXt-h-1cXe``8Z%r-+&Oure`YaE0 zn{m&w^ECEpDe?d{yPJXY2(eq5vn{x9aYAEop#I3iX#viU2uHf^t+x3S=i)JJboeKR zy9HW-^AiJGV7Fvr8^U~t9vj0-S@;Q$QX8UrbYpTfWPhSdS2U(>KQNEUR`nL29% z+f@3jx44}^dX^SfZh|g4Qs0{Z4fedZwq3w^p4L`+)7@m#j+@A;!!LR`-N1Q~aFmd& z!3Ph{ritvkr7s)q$prV^eA&Rs6ccN*);9m_oHvQB3jf(-z*^w^nHcaxIqFNvTs=IA z{kZfML&nsxnwJ9U6$6T=SrA%B*6M*XSZnxi-ptkk=WjH#hb5tJ;F83dG?_gbe#4L< zbrAXnklrx*S6L58c3)TnehKQ|8jb2`gmWuMa-B@wS8#LwxFdQKv)2jW$S2A$33vB*`3PAEJC zFi>vdleS9)y{mz?!P$Kd>nz(~;@FVXt|qJvCMIzY6KETNc8xQ92HRhD4SYa_wt>*D zF|h>yWA}BYZvXCi8v)BZ@t%n|*#g?(iRuf-lO1w>k zx79>O^az1=4bX0MhM&tumEDMljl#Nyux_Npk>hf3*8(l!Y&e&FR+cbb)*-@zViT-8 zCOf?jXgd$>o5L<=k)7ULuOqyjG}ob`)6GD;)j8vQI4ieOK876HX2QCaQbOd=wg79l zv-NxoZMO+)SYjbygJKhA4zvJG^m?G}b^dxjyFRiPPPan4p3wH1uo3+e%n>nox4rNn$qu?QW;KmTjrJ8zB~jMiC(MZWAk?X5muQ|2;G1Q3N-~ z;WGqe-rwiETFb^xLQKv4o*Ag1%v9W>pd0}#34rK*W>y~wAkccBSzLzr5Dy@m{(#9# zLKB_)7O+C6bsnplfPkAvhq5Vw;TKIQJNSSZ$ddr*954%utuD|(+WZepUg^9rkF||M z6wV_+2{@<3ur~V4i@D#)>n(!Mb2F}Z|-d8l8agC*dN?gl3CG(G{m9p&CxDO zbbIu!lI)6TbF8DQ(=99iHWF3^8>X*8C-x1_Gic00&faY-KOib<&U9{A%+7S?HnV)T z#d&`*E9qT7*{N8h)W8} z`!!O78FXP`BJ;yYNTn&kgDP|*c@I36PfB>r#b?4Gn-Sc z2UYzqRS`(n)Z(HnkLNF>U_3#%?#RPwrXVmQ4>3vwpX7{7LiChWf(ZE#yU`6b0Om8HMtGf$Gp#VXC-6AqagH;!Z>M zMT=R)N2=BP6eMs1~$C0ddPj5NL;R%Y*FW?T6QLvoj_exu9lt2WhYQqRR}ZM4+>)03Di~nC^i?z+OohKw6+B6x=JMxOiBcS zx~jjD2qqAkriNgOTD$0sshg8T zvhu3=u$ig>5|NB*s9o8d*1tN7p5-87wEop{)C7U_Fg`<(_LhH`agM82HXvsR`NgqS zZ5`|6sbq@-N}fwyQ5;Z-GoJ7Zg%kR{VFjluWP5y)&Z%;~zk>Cj>*^fD6q0mKF;q#> zW*qW!l0I(Mbd)Cg))#1^tC=%4$`(!e4>i#v49aB$`W@;Vxc~vxXQC|g%&)GN`a&%g zLM>_TBx$DcU+u6j|1Z^nVT2ME>Hov_iwx+4A zyHi(7MTA!Z3Y2=P(_^Y5^($3-+Q^fM);ML`SeYPNRGX)o_NIB6RBof3tqp8c&PmD* zhCt4ZY>@M8lm(phM%I7X*U)qQzgW*@>Hcf}PgUJ;Pt^^_#{VK!mn{wisp=MZ+DfUq z1#a!XRdo}+#!A&qbQ}NQr0V49^YvAoJbg}5b!VKU>dutskydqM#E}E&YpS}_ls58C zx*}tAZ&i2TKUH=3RO3`t*LJF^yX3wvRdxAdVWq0O@Dx>dpv z25DrXtW@2(s=?P)b+Z-eWK}oIRdx3u0t08NvAdz`OElf;#N;P@r0OD0V-w3B5Rx93 z#4dMyR^WWu-iBYS94@0~kaI&5t6b(PzcZ|KQhs#}tLZhSM&)?*g%&G>GRjR>f{I*i z8a0?I@$EV*sL%HzKU}g?PxgM!kc(KqzO^o%JD)Hp^ntC5c3Ht86igfYe0-OQ$Zp=|Dkmg2RFxo%cr?qu3q{z^mFAG>*q|(7^R-Sl_ovWiC^G} zl2Z!n(@ZH6{wZ>ud@1?L6%Ba)Dg7z^(_qRlt+eS+rL@yhDy6eQ26$R9%hjwIe7>i5 zh0=~w5gGZU(=%NMRr{w@qHDA8CWY(V3Z+dYx;DGFe@dXP%IWQ&5~!=}cy9;QRt_qS zCRceQ%0DGgH_KDri1JSfls+Sz^8#K-4utkk3wrsd1-<;!f{e_#M#Vu|$QLPP=N-|7 z#-!d#yKpjjzsf)D!$YSj0;IJ2^zu*pAm$~g%3Igd8yiszp$}TY`0)r^SEe-PsJvBlM zGW@mhMqLh(?1d4|P?aDZcD-tR%L!L?ydrJ5gN_*El&OLA&2nCGKXV3O!p6p756DrN zE^39*stgBFCVb?Ic6i~+M;?$eQHm{TyaD(UkCvP4hJk$fNp=I3=-NR_@|25Apk>8n z%c|Z!GJ(3;;NCtmfx2o4`N*D&OrWkBs)SOx$OP)DY9*AyMUIn4ndBo6=OeUsC&OVl zGXIFn(r_4V(fe=Oa2PJWsfevf7=<;#U}BM%<1Z414oVgx6i0v+}gIbby!zZ3m(*p zwzRj`JHO~+gHwM#jB=MtQ*Ju+X&1Y}pFTqzzQ}~_momfIc^O-f^XC%bH~Ft&DK{|| zdbZ@y@ypn)0q4f`%s*^KE2y=0RfG7zK`ZFnZ2Cu)A_TlPGD$;((-^~7ZEK*!`8%QHkTyeU{HuP1+c9xyvF4sMk6gFvTk2bAp z)dBfPYDn9c&iL!u0J0%-LjNsdCKB`1m`P-jtZ|LUuM|0vZT@ZKq{i+{V&Rj_k)N_OW=N zvZ(&uICnW~Ze)?%TC&__cj7WeTkg)G8`+TD#w5F*E@O197X}*SWNv4Jij(a6dduI| z(X^_mRb0+wj;s*oYxY#t%Xy-%rqcQ5c2;TCDqMT1^K61uBlh!of|X^|rn*PNcyC(# zE3p4&XVXnA{I92Fudo8jOc$O`+Q^d(=z-3F1RL15R%6-!|IDdBmg+0a$+>L@8#c1$ z%fnMjT9p0!rY-H?#sA9wE%fZ)LE=y71V5}|ZS}fWH9^-DPEB#+SM(=r3i?tsJta%n z6!e{_k?uo?9?1;|%h2Tteb*585rn!7X$27kvpWNL5))OKxG#s=LKSJ$OsNG40%9h< zx=>bkPO{aV6ZAr5xw!an4QF9>Qwf<3DFN6Cd8b(2R3fYC*5u?+8I|bT0%gRgTOk5< zRiStQ*S!@YP*)XsM)xeb10Yz}^%40X?yV4kx+)|RiriZv0;N$2KEqsH>^!`a4bC71 z4<-sr^aAI|P8b0v#l1?B<6f{v#|Wc9#=XM5Oz3d0D<01nr3in7pBndycyTXbjLNWA zoGxx)aFqAD;wkTS#Z%tvil>5qLENjtI5qB7krekDkaBvm`Rawe;sg1~aW5(%wvu%_ z$TdX8%^rces!FY)y5b>FSA7lh^{ig@D?O-2w?E?2Z>U;c8b`HSUUkJoptJS+nRpVNw z$q}uuW0}sDhUqMDlH=Al@R}_cA{D}E-&T? z)K!zzs&RQSN1(1cL$`_t#LXPRx^A+_0r!KtxR@hQSDmRgC6}v4pst#tR*lPxIRbHT z;oCMGaZ~x(Vru~a|EDVc17ddEn5uk8fH(rCDjyOMmUe2}`%G7q!KZ1s zxFboxWhp36b1BmSIn5`2(TS1^aT|ClFW1w5P>BWjNAU)*iC7LkM&*BZLF&mC(AOHg24& zM8S{(M4+lO<|qL&qyQ19tIl&<3Hc#VSDmlq$B^q*Nnpm#YT~EK>Q6KB*ga>p68=IK?WKkmBuujjjeBGVVRVwAERs!X7tl)xn z`K*{-W2JR^&0#khu!bYTwoPn1TvShUzfhj~*FRPX z3p*@mmV2~nlQ73Gvx41fNTSs%s+Dvd&eOxKb+Kwj$gpYB?YKza3C$B}JYCUw-mLTu z=Zk)6LY5?oUpgZ`K}SFXDHE(Vd2MN>WUx>Jo&}51Ti~7DEnsQHqiG#& zYnmGI5}SB0n{EyCW8rYFMaxKkQ6&~Y*GV?Qg-7zeaGK$h3geP&A4>bNg?Iyi;gRMV zHMDWkT%(3IP8uR?wBN{UMmms=>rpr{-EBs@B0W_n)6lL+huyghb7l5$Gdsm*q4ULL zHGd-rT#&rbyW-**G^Chy#_*OL9+gb|<&FF}VEh?r#}foicx$eMWlF*y{vX$_r=6!oRHv@8_!6@Q~`aY3vN zKvDHVs;-q-40~CROPD*Vm;mgJqxE$YMNdP=kkJFK%jWEXR50V%XJ|e+}`qQ!T3EyJmab<-0T9LCe6%wy zYtDk%v+EX4nZ00oc1lg$%0*)7$+M|RlheUPR#)5kY{{^%7(NB3e3-fpp6)84Q@b?f z!b6{Rv912}v&6YAf~RLu@U%2tdi7A5{ZocGk6#9FqlaE`g8wlx1>Nb8PG3nb>k@Q9OCwFMOvDWXiINQc6PV7w{_5u zE;k0W09Hj-1*?_j(hW@ zEMNLf2|RCEe-U08r8h5Qk;WK)g95*EA%5NhKU2}W<4ClFP^3*kFN-2@jI$m*pqDVk z`Aa2zvxVeMO*1lJo<`f zj_3hVDTrGBz@VswW$o;uS;}o%jdwO=Y!3hXu{P21anF~#J)ioDfopET>+_TgdBI_)mTg-*3r>MD*azCo??(*J2Ai{J77`} zd7|ZH2n4YMC-d1xC{GZiQ^*}V8VLI!NE&u>FdgspD$`+#SrQ6GYLhoA9omu+x|vwo zDR|6R5D{v6N+vz=EKClv(x>2|8hpD*_gQdE&EimTZ<^{EB|UoClFX;6NMctIwZtz~ zCltis9fhryf;+}B29Ox(s-u3RLMgij})JLVWH;b%r_09?scx)zYrRG={ULKBNhtZGl&_l$KfAU z(SodO`ix$Ll$NGk$Um^>RG4M_Wl+ z)yv(W44uTwLw#M#^sr@BXt&O@O-UF0!Xuesr_4X%+5x$kZJyeAc)}ry75vTG_Y~mu%dm;;(GuYFo=S zwUSKSIfA4bd|{_O*|N}?f|bc89qLMSt;%bYaK^{1G_OUc@TsqQYq&5wrFXx;YuH(v zjQejr>!gO-v%XGw)}`>Q%i<~>$(PdqU*{mX9(WKQ_$q0!Wv1k^&{84AN&S40*Vh=s z`{nZ2s*+}kogarm6YYTV!*6yqH%ZkP)fAMVlh(O7@1ofkmxfC+xvte;g@2%gL!+oyu%HN!^-29p z%q+rNeF_3sittq5B;kq|r{FWQk;)a1%ixVVEFvA<^h0S-542!k=7pEw8(86OgTKW| zUU&(5r7fQluV+7PK~9(v7f9BU~H$b>I%6@(AC}XBg7! zOOPV8%tOoUQy*&}Kt8GCoHTvy>D*Tg~dJkXpaQe90V;OxJE4WRP>;DbkdniZt` z^>4riE+P**wPqkF^+OM^ontFu-O^zIj0mChnn11F(jH_DHYj9DZgy%-At>a6Le>;S z#jr_8hya)@=Um zCgUo@`aPB(42vK$dR5HMbN@^_bV6CAeVe=EEv@*>j@TQjgW97XTdX6wvgd}*TZQOa zSDo~EgxS|wjbG1*Rl|N&KS)v6hF@Zc;_KMXYKc}yG+SC_;H9&!tF5j+hJU-(#$v6Z z0xPwpN!8h@IVto{JZ0pWq9S^!Y@{n5uRk-CYx!H+I=hU=6=iSNlP+9JLC3qrk zERP<$pIfND+;yGA$n<&h=~O*`-uzIWoHk}{g-8nHLV!afddh{)djr-@^|_Er6`a^c{TAo~u7^sv{9pUobHlUg{%EpDMzynT$O! z0efKV`Ra412!0KtlZ@`94dGNZv%Vv1b2LqADLS| z{fwC^zy1BvZt`lg6+y3;e|bhum)Og=BSBO8?73tv|LSSn<;M&w zhk6wsU*oG}$>rIWIMzhOGi{`t>_kGVDZymLfl3fOnJ%<}VUJUa-1J(}%)a1@GkQiN zq(&c4)I>m}7#u571*g(+bA4JUMZSPliglfuJW~38x~2@2N%JXR!1#g13a}Zp*xM5SzzsHVC@KZ*o`>$Q2&K&j?nMS%1}=i)M`9a z^B)Hp+SE$YHu4bGBA(K=md+UQt%IUsII=KmrQSv)ZSidc`0y3 ze&>(Kic`sP@#N&VJ(Tgd6#Xm`KXL@OKY94j;v`(IWJcCB#nwue%J624o*!^$3v>io zMT`ELv!_aA9C?pQB#nQckXMCI@{=W!c8wjh%!MLVXMzJvX%%s27g0AB7vI70pj zBh{ZG^Wk7r2T}$pU(E8p2cbV@W?Y&oL=2t&R7r3dDLEoVK}0w*eDs)x>5B_@DO0_y z3jmgJrp%C1NpP8X*O)NH)g&I-Ad8&DZWH4V`kgEb+TfzIA^n?s5ap!KE0@jrS`66A3Fo10en9{UpHO<9*$pT`x@?aOc zUv?0W7x()1WpTXM;|xpi8Ib&5!^_xMVvO(o?aPvh^FW>YLN50*zI>GGi=wT|dU?Ie zTxXfQ-evArB$UFjoR6TcC@as);aF}A2m6UMw|#Yd#f`}ytk$`-Nu=uBi*O3F*5M*t zW|Y(03@bR^wU+bnvN%>4r>S}H;79bv#EP-CR#sfRI>Cb?;* zWokV5a-p6#;K>KcSBlowyNQGpt*v(x2`TrZ-c2N=FBcj(o`8_!@sd9pjEnIdOgs&} zi9+$|LPN`b>Vi0cr}bleOqC?`~Ea zXDZ5gS!XqG8X+iS|Kk@bjMXk_isG(L6_0&m5ER8-jhLpKHGWsjXhOI`PQ~Zuu{0U- zqcg}ub4pz9XF4C65qqSKKzW5NYCn4qTu_O~o1p4ewJFu-I|S;MZE8=d^hW}9RlCZA zQZHN(sH-l)MSRlNI|S;g4z*n=`;qPz2-bC-YQL)VM*?X%C@Nxh^EEQxO(s5ctMvzj zxpoIaf=hrvvKz0(6VCvFXKUPND7_5Cyu_B~5I+G#~>@ zr~~Os@yni)G;O1G>T5OBxy!N6psXqyG!h#x5&mu2A%97&9J(Ph0p z-?{t?Ubwj2I7dXT#NEafDisOUt|NrzRbiA8@F@If)XV6MjQ_8Bt%kY48CEh-xA3)K0nWlhsC7wK!KfJ+3W*xqq0jw+W z(Ai+slAh7QTk?Quc_<=p#__(H{QX;ixEKd0o&=X+b$|i!4n2WrdmU51Itviz%5^Mz zn)sHEK*dND=Fp)*^uatlj5bG{j3P&qK=If-Rf^g;m`MjGV9?*paEvGr^f$ADa}|A8 zx=>h1pMw#7h!;Kwx=^)%co8TMVpM|u7M3f2jRPRaZ(#`WN%C||Z()_ok!yg@+EDF| ze+%BjQ1sCRZ?QF~CJ-_LK;DQHIE zwK$1{DA2Vx(_`KwX)=S;6tJ$`sU=P3aGCZ{-O93S#7ZFz6F;4&B@Gbh1C){m zP}C|x0|@fFw4lWSgZ?fpX#{HeyGYW=96&Or?ZeL(AoM`13lJ&{u>7bDAUe2>!5~nz z=-@U6ivS@LLJm^n+)ap(K-37<9o&XD!0IuspeQAy0Emrv0-3}ciJEvUkRg#&Ql;NP z0jT>VdD)Gj>so;f!&V_JIx&p>a}4#PV$r8B&gP%drCr2~{w z-uL@^)>?ZXU0kQ{ka_=j^?~@Tz1Moyv!3;=XFa#IcKoUz$;ZijqcOi&KfJusSZ$nH zJ9~Jp-ke=oSYBO&g+fpemEY;2nVs2ko)qahsT5SqI?nV;?>d$s_vSRZ$Fs-beJ68AM5Yb&$$nc2o%eP&_lY-4wq0l0n> zfJv)3OuY)#s=cmZD3`8`+nX1cR%?&#?lBJWrGbCb9&g`(`IanpcU(NHQNm#2XW1HMQ`S>9}{LuJrP&fo%&=+_-Lx(+4vqxg=h?sy(6B7}Bi& z<#F$N@|1SP#pY^lWz_^-vFZw?zBrFCGbvZB>SyT(vH|C9z_)BdcRl2diw5DqGlQVzShaSgYvL}ocf8RKkgjT{9*8eW{c)!)lev1>+R}0f#+s|2 zt*tGt&OBOMT&p9V)wst$ykE~s*T>y%RV}awK4#LhSal1nq9^M9;bvngu%>md2iE;v z8FM+oeeKr6+sISeDoste!%ux}arX|e>#X*=$F4zbap1#ITo5rz+vD!3(-Y;Hk@4wc zqvgI_sW*=8!ST`Yp{c$oEmy{-p8K4>eA-~zJt(wRxZ_A)k+ss}090OF9-&G5`X|)s zC~b?otj@9V@zK7{6z1eqwK8_1uPcRlqB=f3(HEx>PL(Gohfb9Hy3-oRPEVC5`+8Ck zLW?f8rCpTArcd?tmUhJ3tjEdYLW>&PH#`EV`2QNT)g{I#MzS6nMl-hW+L(KY|T3g z-moPJY!Wt-ap1kV_{yzS$}MqtQQUsMzIv_^)T+&U*C~6wf!U@i}P$)3(Jf385xGn-Ek{sJ5=M8uON(?sD;;Bv-$c`eWkWqpQ*oW zjd9P^mgZ*8)tcva?-GDx(e7OJYE?Yp>y@qA(9+yVDwU4LR~i&(2mxni&(&uinOR#s z`@G%PwDxZpLKo@`tI9MrN|SL<1Iy(6!pp(Av0{5@?~%=-sfB`rqNEy}!X7zVoSZ0^ zN2=Wc%JIfZ35J?)qy+9Zs&&mb0(l|U?Cgw<8QWCzg%Q`>-Peljj@elo?82;HaZT34 z6&bNw8`|a@IL1b~u_-++U)a)!0 z-0rJdtII(5FG_>3|LjbCsdfgAsjcv%x%+ynTv%OM!-|-lE!)c>dsjLT?`WKPc-C>` z3}DVKJhuBL8gx~+>xJ>-ggq|p2X^yZZDrY%pC;U1-+i;I>Yhsnvd3Bx%?k@>R|CYo zR;}Bfd1(9MK2gG!La4USs`c``wO#BEH|o@M&s+38i+&$M{Q*eD>FE>W@@vnttKm5ay^BzXQ?siD`(r! zkgr-=&%P4)X~S0bMY}#Otz~}ODj~O(Y zCcH5+e`aNCn336g=P&Mz2yFdy4h7l{(|X@OX8E1;Gk%bex%}l!sXp19!&ljtE*TDH&+wiryr#rKGph zr;e4YebM@bQ|K!WG+Bjml7u;a*4BGV*8{(ni@RI&YMGt2#l^;jncCt465FvxY>$TY zQ?wIq98B-_>$PYEZ(s&S3kepz|A~+ab~gpDE^oM6B?VHitBn9tYqy*)$Hib%1HUt#Q0Qs zZ20ud^w@o4;}48g`&z(TwP$hJPsDv2kt$skZ-o{sduP0TY@9ujjj??EczJkg()m5o zLE!gjU1dL=k9Th2DnpFBn20l)4{eg-TeKOJ&M<+0(T7mbg`m>X>cZIt^m%84J2Pf` z_S|f~(!sd2+-NSWE<7rid~iO=tSJyQ@oy?it7+|}`h0CeZCNn+YF~Jh)<%DsYY>pB zdla;Hw^&3E}e@F6kNqlTm)?2 zrny<)WN_5iucj^ge?J#@hR1BTh2`4nIRjTb7WL(-R@QTL$t5H$u`BmnW$Cz!_1c^n zJ&p6{nc=3nKv9SN^I`v3v9Q&Z+QQYqkvPQ`I;X{oW=O0LbUys6#bP}->40-*nV9A|GXNjCN= z7br4o)b=S1clSv4oZK&G>hlXr-f!#*?B3>v7jc{${@6duVX-yNZ=Bv^eVOiPF}~)> z+=DNgUsyf2c4m8KMSYHw;l}dCm4*3ptI3huZhM|@kW`jt2a{xI zaWS!X%_M|12IJ)1>gsay=wU{IXl^XiQ6P^-?nc^JJOtiC;&2=%)%x6m7V?=j4Ix2Q zPu7}s&`I2s*po90OSP4YhF9}oasgT0q7pFkPMnY7KrvO?J66aAX z^e;47g}cFy_+E0B7cp>|jb`KQ>IJch5QT%1=5l>j97L-HG1iKBXUTC z2+7#^RGf@fPGLxoBva!Dt=rA*lJVoosd9DrBw&V)Rj|8HTZhLhQ)BAv_;@u=hLVY) z>QrTTdUU9oOiWiN#wW{(MmAEJ93CCQMjsiZfBH0^^qm8t2ea&lsPe8h-7S+3q+87@!Wos5o){Ku2& z$ufwH3{4GL*T8^Wyk`K%rU}Z3aD-;c)#|iGUG_7}4?r1)K1ACQqwx5cMk`(^k5^9% ztmt4fa4>n`WSQqo^q8a8kO(3pZ+NP;8XdwbOmsHRWUPE*v~mI!w=8_d1?GXuWO+Xm zRspvYf+;dTFhr*?3LT3h!HX%H#=10z6?4A5MZu7U)^FS)vS zQDUZ+NI}@Jy~n8(sV!afGEr$O+CsIX?W+&Y52khS=Swq75NXK*2GGiCb3fzijB`;b zzrSs_esz}ex3*2kU2)Xe?S8t42ip$X!(1t^pYGwIw%hF?jr7w!9BjMG9^PEa>!*8o zxa|gecvC5_pYGvpZP(kw>q~k4bPu1?wkQ5UdhaUb`PskfX`7{jxs3-xHN4Xs@rv9; zj6$(9IC!;%<8k$h$)Pi1hd8M%&chF@=guRQ8%ujvlM9WNN0PZUIp32rjm9d9Ky7)M z5+r#zjtXt1!t>gW=K5cnOfjj5^woxR7+7wZiCD4eLVgp4*)%tJ0&UA+e-?Bxg`5A_ zF#t1=p!YZHVyigL7uq1}^E$7NyW>KkEov*}pC9dud*Y%B1(o7ZYNynFT`1A)`PbhV zN_k4R?2QkDe6Cb@VcUbb{u^Or$C{`ahWjEJ60L0KF~|Yxg`L;L&j*8?FnCdP&A>s& z-P$J)LQjth(jAaMTHX3xVe+JLgR`^+8ods3bGDjcxqSp|9ZP$Q=rbrU68-!Nq*ITBS9a3;Kc~0Vq}rvZz_{D~cj$GHH`CvLnsey-t6#e1WJ{cJO6`>T!R zL*}VjCtXcJ+&EoD`nxx!QIQ36Z+E}%p-6xCUb8op=nBdkd9J*G#d0kIsh`70_gzAXg8L<%~l6MMhEtX59aNod7wN0_g2J>7r3 zH6_~xK04i4Gab5FJ6mUOTdALy^HY(uCghqvG#J0csKt(XI=ZfZ!gdUo{~6G;{lm)@_j+wvJ1_{L39KOPoXk7bOm6nw!KFEKis0?7kp){6)#0%Z1@TS85M%$;wQ`{5CSJ!h3+t@7$f0>3IH{Jt=TrpDSDg@bY7ufoposwE)+)daeby zrs&+xD^uW_q;t&lHjg>e^l;&D`dSn9a0@t1)x+BcviE>{_{IZS8E_A?k{sjE_hLHd zlIV-+bV;Hw`sEl!lIV;5P6Sc{$YMI@R9uP<`UqA#V4BZrf>&Mb%a4lp{OK-S``JlCM7mGX~Xf+cwgMIKG3nKCo?Xv#H|0rqIhl9Zv# zqbW<~v}7+_f+cwg@Cz-##gZ3VfD6+LDN9lx;1^Qi!!~*`(yC*fMcPI$rZcB)^kTP@ zmb?SZ#jBlGKnWlh(F%UpW-ekMeMNM0|5`RNZg-MWEPvp+h1qjk%ukq}t)@ce-#jaO z$$6mUPjZ`*>=r-!jiuS3LPfTHzamAx3o5)K6~EfPUqJxW4O83qD+X?26-nw8f3WRG zb2$_45^F!$d2KunJLZ|v!Vl%5qy0n9wvj1<1;ySeE!73P?wSSfP#iCGv|B zTxa#cWU}skj_}Mj$P2)NK}?J7Dk2X3TiE}Ha@XGxig^{E`~3Jpb{&-1!mD%9)%~ZE zVM{z~nq_51G(Jr{o!Iy(PD>fHoBnQEL(1ZV?MRH<=bg_-C z@zuF2cE@)>twO2&$8v2?=W_iwuiGdyU4*HurOPRX#Cc4mQsKvPoqLdC1^(Jf?XSy4 zKbOl}2b6jJx?D$hJc*2SmR;eW<_f#|hm+e9G?H|-Wx&=~R?uD0kH%MU8|w9i`ofAc zZqdD%9K6i;dEYr#ZrXy}ny2ygpYT{O4P+ZuFoxdq9iV z=XR9h3%&*AH_)PgJ{d#oJh(9bAkv6tr zcX`K_E}bl#Mb?ByEeg-#!XxN_ErUTIsOJo|$WQ^bFc_HUr=VeqyJRa5vf<}x@lzUp z7!17m=I`5J?l|(y2P5{N=WH-i#@;MChrz)7kLL=P@EwPjN|#ok%kovFLp-RHkWa9!6ot1;mjduref%D{ZrV#;L>iE&I7n-XJSc+_sO5T%~!dxt73Cvr^%HZRirAL+;7nZyPZ{bZutp=G*XBT96M{dW?_>iYUTq?XXSGW={ z#%O(Me)ZhebVv!0Eus3^+{FNVXKv>e@r@o}cd76*BI{0;+y(9H5JEA5Dpe z;nfY8pAlYx&3a0OcegT44=s3*O=u4--kl@-7+P#A6@Io=WNf1a4>G3NMvI@#^<}Kw zTPnOKSGX!2TrVwnup@gwi}&PqNrQCKaC@onFI!t|rv(qP7Tam@FV$kNZ_!sOyq9qd zTqOb1Tql>D<1R=z5*t)A8z5PLJYawU)Q(c&ea{Hg4nR>e1GNKC?{l=0(9f0HKah)l zF%|j}t{=#C^x)UhFj09*gyTkIZQuol9Z616y45F5sCFnIkXS|7FfTO z!}{|@g!3=u(7+rdUjA|}+T&l+rHTO(${a;pL9VIB@yod#SA`;wei@_HLjuDOWjJA5 zs2Ct&T2M?OX?!2b?Fcww2T}wl5}GtTioYs-4-J!%#+N zHKmE+%10Aop!#jT02^s|6C+f~M?n=)*t?eO9o&0RiXn~9t}QNJJfy65)jOT1!A%3sEp{f73G3xOsLbs((2)4rK=@o6H%!xr(r#mRg4BUJ^Rkj*LOx)gV;V<8Z5FB(4S0+mAXn>h~lIovUjDPDVB{G{U; z*z1$I=%)UkaU27a5V4*OjH0l2ZGj90%}DCaTLo#*B-y~leG*G5@ zZU8i#)^}(n=1I1i$%|f?+WE!5`y|cSYdg3cTap%|y;N0TWm5!=2)}VnERnF1 zaJohm0r!FQgeXj7)~J@TNiBn~)vhtwE68CzDHQ6|KM9z@R%x`bszjr@(o|e3hViq3 z;)UT4evldEYSp9_A`X!;i=bE>CVZF@4ljZR9|+e8;Xakkz0~ASWpV;X0>$eT-H>ob zL;f^N%`*lbc@+aBT=EpV!|o?d^V7NAfs3%}DZ)j++-0CpGyfuoQ{rd4jD)PNOsLnq z-kc0L`f%uVFnK`IF@0kr*UW?(lIhZhNvu~Ujn*|8JxXX?nL})wrR^Ho7C}CW0dxv_ zdgN9RwbDHrUAte7~0*AlU8c zgoP9>lMEM#y%QS=lVCdu+kqsuq@K^4pej0vF$dP((%d03wk)1+tGy-vV$jIKa&Iyz z$^pmG!VNSEZ3o>VjA`TKGRkIyShRe1%HEnk=d~g2PAeQ0#KM$aiMp|Y4u&bnqIr<+ z#bjOdaaRbPW-Vx&HKWzC3-)SF)|u7?Z3%8U0U`B4X+tGtM4H_}cvqMhYxtQ6^U34| zesmlv#+Xn#!;w?~IOxiZFr`;w*sYPp+Wf&*^e#dH_+$?3{WQYkAqr?p;zR3Chd-3c zPJ(KZT0!E)$j-mWG+UX%f06485-&#L{Y4H(<^Zm_VyXRWxwgN;HMeV>Yc6bR^2ABc z`IU!xUvqd?Sc$N&iEV}9+Dq-<%C)^A%JpB}ItVv^wy{GgjYJy6w{oOnXe8|#$+yEu zDA7Mf*FQx@^6fAZiZuOp7zsuC|8^KjyGHVz96=RnCX|3gF+kEtzLV>|F}$_PH|-Ba zAblrCV8!8tHTK;c!DRj<^R~INoCx1S=o$(yBmfk!%?PF@Db?AaZvq7R}CM3xM^e zG*YDBswHvysZ*HQmJk`x&SCGOI@Tp^wVl$GW-f+@V~fqiz_Lmuut+spX^`=WAhZ~` z*QTjc*8t?HBFW;8Bc!o5&ZC3-Wdy;X=Z{1LXZGM;peP1;b+joyBS(n6Wpa4kV_q10 z0L#2OB8ZaK0N|@5LNw_Ers&lXF`Cc=6@E0bAWdsfGE^5M@jB~4hW(GW_Mm0)qY-hM zvdzj9+FL!h=WS$zhvP+C%QXGT2y5%5SuiQXFfqGyi-^)n z-D$L228|fz=BSawR7Ne`1xyj66un{CZYg3v8F@p6BAEVUgsi$9S=CW$e^b=<1hVSJ zR$1i~?Oh_11{II#GubkSGR7l3H(# zx}A;BTOhqP+T%nVMIgO3BDd})sMA?$e_Pb{FQCqq>!>3^K-Rq{h>U@^MV-p4DHJ+I zkhcedP@;2+u5*eE1*-s=>`os`V*g~X^tjaWo9rVxy=EvrN;$j7IzC8v<%t}S+|3! z69(OgE==np;}k92?3f1h(tnPnNj^*gw5^f!$iG5 ziU#~kO%%ldiG)OoeqDDN=e|GU!UqRQocsPLQCfn?L+R(E=w|;?vW#MYBw6nqpKY+(bU@BcvD<9NHqOm6y4Q-SHb~AHZ1g0Hat$5SP&OwA2pF&+i?9xcVr{VKtV$4gAph=1X+m`Z+>nl;#~P)blaUCK_ET36@s>3 zE84vQLH~Rsf^x3`Cm<;I+LIZAQahi_5R`lE$qd0F2tJu1SOmc*h2RH!Sv7H~{Xa)- z|1qCCGM2pXfBlcRu?yVCNB-lsq%7kVTT1FgYd*TAliF5!u7~SaCz2t90{gp6AGp)fw)DT+d{43WE6`6?pP$%1Zzc^4&IxU;Hbrb zR^Z#gkjjPCxv;#Z+;={I8o{#)t_ zrp&mD7(Wchdl6isw)x)3Jv2LOy_H52dC5tgFk-J$Hc&Z{LKvAf+_^GVG7~1PDrt{tw=k)MKp~)YcPoZWGV@WyU60<7}yf<~*Bx0{)sBbD-pMX~TFiNU1mOSZ*-2)~UG~ znt1kLwKMhlQk6<1}-bP$KCn5 z=Fyl#bjnwDwrSv7^ALtq8!1((E+YpA7tl&A=XO@_(_Ya@xW>vnT={Z)L^xr^ShoHc zhy36N+~2q?+`2nxgmh*)swv*rW^(6k{0YkHIeZd$3!bk(afQWDTx8-R`LGb3OHbE~ zi6EpNd0X7g4%3kMnKjUcMJii825|KyM-7!M@4QL0#K>>Mcd6)>51UGA^(0rcyT%r$ zJHmYx;f~wm5SGZIb0Bew^3eQZ<4kSwP&$M7hUPJrRxY+2&#}RCV8EG4*=&bd#lfX; zzPX2WR#HQ^gLDsb^2JnQcZfxB#?R;>ewstW$@1bFXM6VBP}ViMP(QNjk^Qk{&OI(A=NcD`>`HA{V5;HQMV&=F-6YHfj+z!@9}5rS zeh{E^_>wvoUK1rQ{sdcNAmYQEGr=H2a3`8#oI2>`URql={Ez`0s?=#E2|KA=b}-BJk@ARhjL1dwpQFy5 zE;l`v*7(O!bYuThITF&?OElPfnR0}TUCu0N#e%XMlGsqP{Yj@WsSV=ZE9fH!h^NIG zF#^`-G235s*`>VMtR4BwQoyv-@^RW<@Dp&ZQxc8?XPIe_aB}44Z0&$MiUy#beR>EGAQN*Yp0>uyz>O;XmeQEK3 zno(a`{GUc8XHiiZ>i=m(_V~N=Mt#)yuSC(^{cp;9>Zjonb0>T5HFd}{E8C6nX5d{0 z+pM#Pq9G-NB9A*z=Jq71eAzh}FDQ`OY(_4|5Z0m7oYDw$>(Mluur*{oQz0@CZ)EdS zVC?{k&H-(}*2Dw{pykLY`RRnFW90xbU0<@Un58hovaY@o z^!?bZtFJ_T&M!cb8U9Lii}NW^MBo2PwEw7IeG&TpSEA=UKi+TCPu-`ZsLVl(Dajr$ z;}m|8-ibb}lZS5I$}ppDS_FxwGbCh~JstHO3Y87;ZAU^8B%Y4$7_l^~ILa#w`E>N0 z;rI(q1_1h-QS_qz|1-#dt+?=r)5IC>U+AY4haAPbUJ#Fp zk(6fqohbUh@tMCMIgu`tAi#Y%OzfpAAzM$!24kzq^MI!%DSs#G*cp%DPzJd6)xU`f zy#volfx`^a92lfcycBUM^2`nfkzK$L6l9mlW>6R)V7kodYYz~Ae-rg=i%1$_lUz7KawphnNDSDS3MW*jH`Ri|pKLw+}3C!2ya6L&x%rGv?IMDq(Yqk0MJxMZPi+h3Gh0w&M8p=zN-WnoNNywh{ zhz^bKWF@Ii3={Ij>mc_7Q-M~E8l3fnZ2@U6QJoaEx=Tk=uZag^vpG4!KE zr?+JND7NvIe4le8b5{fXyd}TyfR}om2I|%qgd+XCCI8&JeLti@y)~1NN`s;p`r&93 z#dJIh6TCGWkJ6ytnvJK6e%_jmr;C2xDhcVvLypuF`6wWvy#%?;!PtCx)s{eY2XvnrC$`qKBe;i=^Z$ zr$g&uXkZYA*2B==oej-SPUL+=0&E%DyYt*x;Pmz$hW76KEiNL#TZZ=T{OxzfKjm13 zLly7KNBjF<)#9<>mT2eHN=Ua7N{mXkNPA~0!=Y@xIJ1OguD3L4wUg2TZE$ah6cWmS zLXq{dDjn`Nh6_?*xZ4=+`?5_=$3EVd*S9~+`LK=QzE3N#Vspijj}PRz;^J<1p$40^ zov+zUiX7QmKW?W~xFn=vIk>#E zU;=Dy5d=1yOLvp@5(u~*dP0-52tA+7=&6%7Pv%oM(RS$hWd4R5Eu%e-IA!xIn%3n^ zwdD5iXUd(bx@namxi)d-h~G6Cs4PUC0(4x~2ksz{7Qy{jGTe2v=T|ab;L|J=;e~(a z))?n{K9VOl_JcVyCtBz&`uR%+HBA->HkL)OY>712nK%?JnaAPiGGAMv2K1e}9Qesp z-yn<4T13u>8Ss~Jn}yhF9TMr_cQAMc5C*@4!G9zhybdFMB=55};%Enh|45!Rxlg*m zmpGY_j|Tfc`oA!E>?^zFJqSpYyiGH8St^*j)V9G^LRhA^Ge|E_h!TWgFo6&y2=VcZ z5G4rl@q8&*OC<>L@%*ia;@@zD*jXz4mL$S2UycxAzTg4fTECfdpD`1dR_+cEWp+#J z=(VnG@SEAdy7s7~oUjuDFo-~aoewQ|WWb1K!+;#|dxo=lZ)BwN>irz_<%}$uO~-SU7S?o>YqYE-PJ_QDNA{yMv){{;(DCv#*y}fFnC6|0_k%m_6 z!OhcMYX75r+fNm^&KxjOoBk-@d1d@xicDg+95GMj@zi~yC_!w7fMl549-OD#G@#pq z2Sc@554mc-QnNO7Y_;q*p10qGRuj9;(pfl z3ZCb+)tV-2DdTf+m@%|>ZoAAI%xJ&IGT55kmuEVnZd!H0u2B7Wqmle$c?byWI!~p4 z!zWMHec&N^p;7y8S2rVJ@*GXT`PyUl=AXpt(nYiMaX41wu(-F#5YRI>Wb^J8g32OP za)edQ`q4Ot&;+t4(tgbk&Ib~dK* zOSMmq`Dm4GZ;&5r#L>y^Z7z9a^@4?San9P(4@S5G#Z0@7*vOt2-xzbY-kIkG1e}e4 zfVG`~pvaYj7aOt zPC`15GLxc#rYRTZaDi}QUCzFlW_Vs#Bc%7Fc}2*tTNVcQpWjVPMLrB1l72Dawcv6{ z$1bL*5EILgv&oN5Jl8XC$HGlnWp$I9znm=$jepkD(By@jGfqTK; z&@R1+LAqTukkc)+^>Oe_f#&lqO2Oa;YFqh!3zqi|_qcJ0R_<|@boRtay+lu5a9{o6 z6e8mw_oH&nn0~%U&$)nHcdT&?5>Ym##VQ>^?z525lPoqG90$`eUUTaL&PQNQ)s~N| zb=lDJ0<~4BhELF}7o%AXT!EctolO+-spY8;DO~Jd9x-F^xHHAKxicHbg z6PJ44B%KM>>&FVwHm-zFz*%7%eZPzoRyeW{{<@!yZ9zKOj}^Lm+zK_FthI=+v*Pt0 z;_C#lWDu8Xe8#EPoDt*z3}NuP44A}hYY|{p)KyE}5-YDSeE+Um0-%E!XvM)II`GOh zZ$aku1+QFFgjTOF_^X!WrF%mG-TGO$YN=c5@*4_XU8V#c6rH+E5u&`IfVwQLw};~e zg|@%+TCY56Zz^=|ir+0~PLH(SCkoiGZ;EW0*axEmuZjO$NK3qr*Us&le)%E;Ss`Lv zG804UsZP}6oKbMd7L*jJY~c_GSW;?cnu&8kSIMxA2LmWX1@Is`n+J6Yy)ta{VG+r- z=2Az~<%=-&vVm_Agf9vR8#!L{d?{u5sxD6xpqwDBN<8 zd;!R>;POuwqJL4yyV@2W{^TvY?goJvzF}MLUc#}j~8uuyN|Yoi3wgI zue0BaM4z+-V@DyDwwo<;j9Qcs8iT*I5nGa2#m=yUQrUMnZ1B>YxXs~=t*q5L_ZE{S z1nu#)164BsMfZ{Z`P}D_%Ip7yzE-wMdeQX1IOfeQes1?QlfVyzZ>70UrAhpe?=)p?ovC=hV8xt|%BQ}HhFU}2C$n~Aq7f-xo)MgEHrbVRchYCL659h+z zA1ZLit`nom{rXUWW0JmzRQ*ulT9<`IWu)qd3j1#NQk6@hex(royHu)bk^D-bW1C$^ z)#EOS`q%92{dTjJZBT(o8b>N!-3?LHbjdE(k`?u@3naVjH(AMzQ22xe>S}X@ln&M-f_Zq+@X45OTheQ^?@^#^0h0`OtZAS7|2N8TmrOQtZ z3bD9LUuLkJTi2TjSt+)ByH?gq=&(HhM3YL5VP`=lgX~ zy?mWrb-CmjJVRDeN%oxXKXMKT@{j|^^Tvi>vr{t~?FNfRc@Ss0gkS9LEgPV^{l_#S zC0M}e^8D!QmPTrcfc1BLNH20S#TKf%HEBwZA+L_JE`IN@x%1N245jAUJeSW}BDP{; zz)BrJ7`J@AL#opP(Q7wJk{&`~9X=KvH$uod%z7Vb!dgPIM-Q6su z1G?vQ!Um4;S>*nvZ-|i!6BMeHoF3_rdC3JDmfL3-^D#JbGm!fAm9q+GLI`>Af`RCg zIxs^OhxI9^c8Q7kEg`2R)d-U1BPPl5HSj0oqjV8yqPr6g>EiSB;&Sm`=#24-)f_&E zTr-~1S-4wV348bixX_uP*)FaOdKYF(4;;7-jh$!LR@_M}+jGcIQ2yh3fMWWEr8Sln zl=HV)pfiLs?2av=VyoLTC9twty?8DlI-h~DAuiRxkPlEybdEcU%6e=;qB!0OHoOk=NCvrwI$%L?QolUtCxJK zt`GI<<6j|--~8FS;>_54JxsG4oFfv8o}tIvRV|__+mz&kH8;W>lyexjw=i)G zxWYFLjqgMl2ZWa_!67V_b?e@z2)G6BScfjQq&LLTQ9gu@rrhp5fzRxweinw(#5~2N zC2hzjYnhuME!U%Ap#68X4!A7*10D@sk`wMl@dP%{-hm}$u^d3P(Ra^ToLf{|FPnlq z13z=xu*8B`XEx>KM&vt+!=i3amRo@xCoQ6=ekM~?l}7!Ug1=0U-O08mF4OaE+0Q>y zxcWdSvY&sZz=arJlzX&Cn!@J_xJUnM<{owX*t6yw4Q^@Yzh}P)(dElz`m`qcm{NK< z*Qkv{T7n~l{0$5>EkeZ4Wki&};d2F_>H}FI;^zw9F-j34ey-phqZA?H=L$GR?=czz z>8S#r__*ENm+mWU&+yYC4mIW10wJKLv$;=Ys42hpsce3gU;9+yD(6M&iIiXaRH5HR z6)1w*Q-zxk*ag`=I{f~n@1xfSfS?M*fGYRTQ}kXN-h%3vg4c#3sD3GUZIpoY=g*SY zrbk|zKM!6TO5j1!d2J}7!2P+rHhb`k_26lHT7I$ZtZ03*V=j5vlrZzK^~l5aba0AM z0#a)cNLG~pXIrWLYlXINdH;`6jJ_uSkNjoZ0-% zC{Gc}ecU+JSFg;=qOJSdVMqa%X&-ge_d$%iK2@VD;Mgo3mk)c z2zvF3UVmHQ&d;ig5w{pf>;407U&;tad z3r9eTP~~sKDx(NGe_QBt7iUm}fPY)ya?GbBCwpZRysC(t{B$NK&18Gl#H2>yjy$nb zJd0A&La8Z@6o_M-fktwPysGFO*bu>};DsbbX!NS07m^f_M6W7(AxRNQ^r|94QqJgJ z7Tv3he}^-=SL^Q8Meo_BL|3gv_{fS{T)i^LUR(VBIimr{xPchPExGyHq8Dqt1)0|t zy)&93<9=(BEb zMNoY_@Zfev^~AHpgWJV}PXr#M1RfL}4^o5&p9nnIS89J-vF%@Z9#n$k+lr(C{Z}0u z?Gq2aw@89{OC&xVr+8Kw(0B4|bD!@9h4TojY>fNY$P_wm(P{5vN^Y0sZ1*Sd^#%Dl z{B;7(8SE5CoHk}i%Rj&g7)6NBuCTQ!V2XjmWf!&O#$sb$Uonx9bFRT5Yj1U7Z-uVd zEV+Q7S?3C76m48R0>q6McC01bedrt)>@??b*|MRYvEv}B#*u&b!DkuJF3>ZfTu8ED z1jv5?+pjXF7S?5M9qUqrAKx2R8ATZ3y) z>(eTG|Mzi~0T5Jy7*N$Jdw*DEytV%PDx(Oh?+>f2k5T=?v$V?kw90-VtTIaALD8); zimb9<2&-&Isr?s=ZNKMNnKEgAvDkS<{AsPS9a?1{)hZ)zs3HdyZWr(0c~%#h>*Lv8 zW8P)-|Gy>1yu!Cktm9jXFylwV5~Bzgd^9XEitz16!xE#&68mUaViZ|oAJr23s+5Br z3YGp^G5W1yzEw1PrqSW}L+;S`XjzOmVIWKFyrI2Rq+gsFb?2 zH^gZ8F!Pj`SBw~P{aUf(DvQff)T;UQVquR3ZmmoO9=NZ7*aKGJuNS$4?C}P>@sJTT^g3+;zGt&%T;rqEV2i{`HkY0SH*XEIFx_0SlBtRH_66g0^8hxmQQIH z6u3FyzA=TT1xXmt(tRz(9vCS5D31gIp_<<=M*qI(j%HJafp`!YNP_RTi~gcO-U98n zi+fx!ogxhM+eLpCpCSzO?~Cp3co0Px=-(H2++;F(hqUoed>>^r06`Uq0aa<^p9nIV zx1joopp8=m)lUQ&tz!ni`z*=m9g@+%8)P&k@Sx~qG)2hpyOPmAp2s&(Dz$&Q*!JaO zuK&Ay^gDI&rGZrK*xVrQg1qK1b-C@SGe7-SnLh!hzzJ?O%VkM zr$!z6DGDIfl6L1#cQyUDyPZGV;g^9udenp1x8I2i2IH=`XBe&3h8xQlryBeA?|0rp z^SU}8^)zQth&vE!X7uAOw?(lC4d^@zUJ2gpn>Arg|4EVC9+fwv8dUi2 z#lnqg1qm}AWH9Ba`0rYb5|7rxns78MeE`h=UgQoT1#wHl^0URrT`x%qcodz)p@_u! zY_WT%xsE9U>$AmO&ZSNfNS`edF}E-Al!V;pi_u|-t?gefJg!Z$lm>q#qQD{xm)5<4Wm!A&pj`|gShurLR?Rn8p{(7juZZ&|!PgOT6} zvCS|)Cig)v3{8ake9 zWO%Ara)*zp45X)uTm#`CQTjsBUs_v=6wsg;ASqAs3q_yy#akeKp}6BlPkjY6zEI@S zS_g@ye^&IDc9$%RkYa$O+e!W`!%1HKKP&q4fm8<4pJgmS-7gj+cNnu2$q7#}K#~*w ziy2OG!hf;2Bk+ry@Lv>zI-FpaFBSa_v?b>w&mif(g)e0|nUB0!ayQUY8Ax9$as%yu z%du_j#HCzp`+9q>|6$Gz^OcyUzeck48_+J^C6-CQW#hk0Z70?2grYmH&%H{=*#$C5 zYDOFvMm4iYFv|2p-_MIXv0GcdlMSn%Z8#~clU`qwKj4RVz+5}E0^i=w6}R8$V}l%R z_}dCo)|YHuULbTS244@6H!XtBg%5SKfyi)kxv?~7ZeZP?mfj^{Cs!=&Zb3q1a4SKr zh|WrKjm3;vL1YB!D-4*?m65U{xH{FK>jWR;ytXFB0PPkIQ#Y5;Dj~>vkH6R08fO zayOldH9{1R^JX?~%YApd?be`Pie0b2m%1PNNB?{2OJ(ra>{XJSnYAC#A~~BR^XC@$ zgo+=Dbs8>E!Jpvf)1~n9nW`lk>ar9MDEyP{d+O#N)($T0=bwZOwc8FRNBD2@6017J zkS>skzIkBma@O$7hK2_@a&vWb-)y=~c{-$*Fg4T8T5) z7ti{SjaeAkZcov$TA1G06@^I|3hX0F^_%j?N-P6W%ARdOQewCh>TKdOHd%Trm|Y3z z%==Un(t^n9Lk$5`ny%@N`Z)au;KwlyQzo~ZHl=(Dj;S-~rVQ_qWFLtE&4n}MaVk-R z%Uh&srl}s8=98`2rc?U_$t@bRU%RUG?L`wK2M}AT3_Hv>6f%z(J%e4 ziTX&Qk9F*>woQj`H#aH~OniI3(KNqZ;|y7cYt2AhQz6r_%?)_E*5-8)4*A&NxS~TH zcBgEBym1;N0gXa78hzi6#2!oAg%8~BVS)KAM!lX@aah{&W?bS#e|sT3vUbLEm9iq7 z;_B-N{z9HfK8Mx$29JnB>+p!vaHjfpZ-;bJP;9WT(w-CpUaB--iw-R|BB9e2bP#G=%xTH$NS#)0O_665TH zQA!?mM#;V&xZRyY@X9ed2udGHh*SC~qR73y-Ru1n(OBNzzRRV?QbbjIdpoz;>x98h zS&{E(kG|HPH$9aywcpW>A@r&atQN}Ba6a5#IN1O7_M{RtdPxa=YF%>LtL~T7(cGjn z!{T&(8!U`b+lCpWUSNy60~@#qfHEXgp=Q$LqGfba&7`E1M4d8(LVQeeOAI_im_(9j z6S1kEiRG}2Q<-nxW`Q?l!*=mIY!|=lAhC~^=+KCl%~nC0&!AU$W;Boc?qJV6d|12x z9hyJ=4H_TryNfsej|wOTccm4y^^!}M{ZWBCS;JNra^AL3gx7VBd~f=d*QPKT4qeaI%+=kHlT90%c!KiNrlr_Kd=W%rP{#6v#9wm^icu_V1pcE2q`mN=`> zxeDw7_SlEp{g;F60SouT?K_>b*B-FYKirP9H-&-!^TX|XZjJlmP6t$94*zibwFlxq ziu_^+$j7wUe;t3HJ9?-|VQtt+er2%AUSkn*FL==(mc}CW!+6|`h&lmIN|6*tN0RKH%+qXL8(Ren7Orh^`GriyR=v(t7<5n(g~U^tqvvHI zBACr*t1Z?KT1(k|?9j>6jO)GCZVf~#W#5J`T7(U{b!%rE7(#cq&xEl!j9a$omo#Ig z%hqSt>_C{RVX4}2A8yhzo|3JK`>7RMOHS4{oYqHc!Zfz8&BJ*xUCKnhQ|(JcixrRN zv##z-95X3G#t+JO*XCE_;L`Fr+qu&U#%_*WN`!?cVSXJ`kYdRQNMv?rgZ7*3@TuTo zHQ^N#XQi-NLYXHPf3jVCv~e(Up!xm-oS-TM!5_|LF}88LeT_{pG?4|vBy0agsj&+{ zmELYMj|RT-h)j`vc9+HB!(96_5SNNP^6zFg;&`_!Z-w8<

U(2(+1y~*fmeca<-g!3&-fLdoVMpoN@we)g zDxudQSP0SSmPjjRR_E_P4PUp1;VWGdQmC2IX0E|0jC#Uzryh6!BYt6|d?MM4mO4+U zASf+jc)yUeY48?4lp!)g#5Ob@34+LXwm z&>UOg(&%MND>>q-r$hB~VxwTNce+_QkQ_secqCw*iZb=YCqBU54;v?z35}G!%|4+@ z$PM?(#$ZDFmC(EtWMF#Wh!@QG&>%(B@vD0YqFm?chwU$yM5H^h6_5_c%%m5{0nR>= zZAWr}Ki%#l8CHYs=;`)dH>Eq6+~7~Q;|5nGLp3P>WqaX3T0yRG9%L}(4FAhkm~w~z zW&5t1vnB>(-~JRPAimmOIFeRSsyh!dm~xAMwH2lu<6mvxbtr2B%&)fJcDt@ki3+>; z$Y*=oPjuvz5#Y4aGcHERM!4vX$edePhTm2%=)A)EV?`#bWb|)vPfL)Z@v~)}2)BKw z`)lxf%8+R`8+=+ejd|ITG0Ew)7i6gI&IT_FmV0-S(=c&Z31cpb3@7MAqZZe(CRYJM zUFG$y_Rg#0$FS^n$!_~jJ9qCrtg|XZ>Aj_}gl$16TQc>{>mijCPlEQ06g}!;4%0P~ z)@ypEyNV>flGb1g9-yI<0QAhv(REtH9{o;x_Z8tSXnv>tO7|HtDl=!_Y3DOyvPXBx z9{rp4=*K&pJxZC{f72fCivL*--lME>eNQUt54M`4(m_%)(rpbJ=4~COPDGheW~VR^ zUYtR-Z5cp88(ptG4UT$m@Su6koPK8rFH)b9oT&K@v`?*;wN9x&+d1^vz* zpx=Ej=y&b`>fHCF-;Mim1LKD}3ODuNvvJ(wM|Z_EvQ6jnoYQdY@iR%%OVx$+QXpn{ zOE3MQ4lf9-hV{AY`m70f|4;`Xro7;h0?Ai*6t3;(R9NO4&}5h#V|c+xqbO(@b}?%U@{(fooFOkM_UaCA5D9sO@?PEHKc#99K>pPo zL_GgEuo4em*HO5p|C6Mdtw$f*;~67wxZ7P4EceuB+lsuAz_Jd^sPQDpBAt>8%P2)6 zgB<;sp-NbM)ILY8Z!zrS`&{)Wb*4;DI}Tv zx(@#-SW!qp%GY&pJ;49?K!kXjCHbJXT63QoIceG%3ejaj|?qrM~FEvAC04b~ zBpCV~u9od(n~)eOkCs(4XBbV4*qaC~rE`k!s*#+ssuz>x;qkGNNjj%+sxme`Ri1RX zo*bX9PJ-*#uZc^;x;B{)P`b#Obw^gk(8&Ge>QrU2GInC7G6IN>I6rhkm|sbgYI&qG zH8VU^9hn)MK6R{IWej`b-L223szbx~g}3<ZD#r8+s~%D6=1_7ml?5wPplJARj|v}ZUynvTk`p|P3a@sV=hHo;O_A;t0gD&@Xj zbypc4uB7AJZY_pKp6^=p31DS-Nyx>qAVHZ;kM+tZWfA+f?f4Wv9aTs=KuSlm}W-M3%kKQYYQ zOjKs{;DEZ(gVt_tRm*!tqX){zJU#~l9UmC4j`SVU;&`Aubl;3ft4FZICn@lI;KjcU* zxjL)nW}M;s#rTO_>8iMty-z3llN%aN4ZV2gcttD7t>WLwkx6s?vGVAN%JeBXMHGUQ zSO%?Y{pa-o;T^}SLl3e-hbNAQ)<2nNTswz{#)d|QW>{SJ%?u3>k53~N|0)mdwuLvt zGZQDr`u?YGp}y8XyTm}hnTHX(eVy@>m5IJ@<(cg5{z*EnZ|8-PfBFxm%g4&YGbbx! zeSfXX*RS)>C&!0jmzm1glBZV6^#PpqvNNJF@3`$!=d`S+BGGx?5UyZeGsi0Ui?T0%oiVb7!8kA z`o7ruV60N@`%;I7>}#Z5eYr!d;-6|Bw_aUe>0lG?KVBId9U24omKpWq_}xyC=mfH2 zXzcW*815IlSjT%pCD+r`>2*x;WS6KIs-$TCQdjBL_?GY(JST=um#ddR_LsXfjiK`7 z@X6`pLsaeiP?yMxI65(NpZ=50!kc1EQe|ueY7Y&KoE#bgHl_P3Bh!e-Z^xyp<6Z9c z2_$XHtG{k}HNi&E^6ER`74mNqz+;tkBl}FZs8y+!jhy0IZIBz5=dbl>!a}8TrE1dg z*Ly^u@H8dRZ}hO+^`ws{Cdc}Ivj@HCn#yE#sI0XoDY1drKf6tYs!R=yo@V_{O4?f= zuk96=-8-ZKty_S9-m3|8FD|jx{-T#H{3=%^BSv7;r+dZj?rn4%doY+f0u(-oti$OS;{2BPh3$NjQ2~y z8iH;}qp&tn11s79YS5;XpVzWxhFAf<0{58UO`o2qv&|kQQFI(Qg z`*fZ?l*w}Sezx}j=U-i;DQK;?X~X*9HQE)ak`D0~uPOD%SMVeiu+{SMauqeF-1np* zhF&esaAH9!#qjiGWrD%>{p22W$j;+z*(O5Xu%~o5J}^CrmN)J+q!i}XJIqh*(VC=z z2^@5Dbea=0rz*qMzBlgCw$G~(1caa2H|;Sx@oa0|=FNMIPLng`Qxl`(m)3hqb((Um$e+Cmk00#51?DkhFz

FU-ZPfZcAdNUXN;*0X0&M#(j8zxYSsO z^=6(SJABC_yFF_!5tp(@&wTKo{DYh1psMc38V3s0;g=Q{&}G-c?-;1=BgvK| zXz?ahTj3r(74LW%H`n3MnORv|tj{QS;qr6;yB_cDzHODJ$7p%!q{Wv(OAqvijW7YP>6ea4xeM9KVq4 zGAMt;fOS=Ou5Uc5wnvY}Jp}SOgXUM_Wnlf4ht=&H*-9(Ny^Ky5@N(zc47a9V4x|5> z4R6A=;#&Em7vl{WI@xd;jJ|2GdXMbga#;lGWiG~QX+0Ti1ka7zHG+kO%VGX4k9l#S zIkB*C^qx4@DRcLclgqXJwr|_gWR-qBk;{`;EzRG3WdHaq8fxhIk)0obJt*; z*MZS|T-+Y7#05@^f?ZG4+h0#({zMBGmbkuzFJ@?qB2Y@Ao%)kIj(ZlQ30n2=A>$v83?bh2McexSDpz*> zIowI71u@co&aVlnG&5w5{#_073RWInAlga-&ia;(vn0ppD2?6co2F3NpnUR9Y(ovy zbR)G3pEYpDnOvTjj69&=tNY-!b)o~;P}!eONI~+ z_`m|!o!@x; z5&EwSKN2Ao^(3E^!x^OPq`?~Hxr9)S{Z0clj=$|vVLEL$JSj0@ySw7J!R+Ze;e3D8 z*MFu3o^`i=ZMTx&R&Yjt%j4Rw%ZN$Wai^^hy3!mGM=cG|t}S|AvP+5SvD+gfUh41N z9)Cu>loJWBjkfpyel8o)7TDM$@w7#FISZ&>EYC}EPtV!LD+;-a_0#24^9fHW^H>&| zM}H_;CcBaRR8|qm%-ZgfV=l^?64;@`q&RLBaE{mM^T6Utu9P8v6zfE}NlQ)Gi)ddu zbP@YnvKS{2;H#@MUfQXLEl5%J_ zf1@>!9khfPGU36BAM_J6=1y0E<)aJr3r(Gfueo_rqE#&nI7OCNE(rB_Wb+_<{q{a% zZbI|}ZMQh)){q8@J7D`lF%om%Xx~XIs=%uoy;P+HfE$sjt|G5)N-JuC+>}-nN8FTF zq;PXuQJvkKRuuhirlQsxVdi~p2OUa6d|%P^#HLru@Am*G@M^yYKnVc*eNTD8K9E)v zN(a)4Lg_$Sk;1K>E_vkyP;w%GlHM`IDUyZQF%P#rKi7Yf9K0+v9S$k^GeD$>5Zh%s z?ILXZgsq)p5nK6tax{X&+@|;fa$9V(ygj<5|8R1OJVQ^xmtfA@lbhq?)+?{>;M?@i zPn@>PK9qIZazi_e>N9=qB3cb;egCA304QKXu1Nllw0}*&9r$fr|70)c@7QxgJZJqw zO3c1a(|9k=b8m{8LBz&XCgRbzo)3DVy9WZ6OWd|j^wrco!1V;Z_&LmN@dGGk8e}{SZ+)Fvd^rOx%dcubM!;0 z19q>=S$CZ@Pc83Hdz&(RIkzKKqo&RNEZ+oR&s^rd?}L_yHB!}Js>g@*Pc50`tG17% z7i*}GbVTHDfLRz3$s76Sr6ba!c^+N--JKp2v&kRfu9|Q-XcY$>#B4(-G{Q%$Hq__Yk z{!Cw(_ViE_XGrs*gULgdGp$>sRq#gDLu{*_k@SQVM$@v{y>tCPCAF5PHlrp~sPk_M9G%b2Fp;RyWrq@vB5k zc<&^{d@PwX$5epo*65&pYSMiNgVWy+LB$P&WqCd%27id1oV29*hY)Z2as=C-)BYA0 zxaIxgEc&JF3(JUcrhz-&?yxSjU`_HT7&$tT_nANG=~MvglhElOf;3g`<;!jpepS)` z6imgOIyaYbm$KtD$6RTl>38P`QvP#C7i1JbP&7F&fJL5S3o=}*Ku-=C?o`O%+jWJJ zkc};W@9t~kQILVkqtW&KLzhFw9B3v^bkn!DgR}|MS628=wx7WEmfpFF0nn~@?qSG} zc3o+`a|uI!^y=&4=hJ&fDL+Q!_}U6nI6W7(@FL z0XpbvtjBdFmK>uicFqpW&I$IHijvp8V=ft&drg}p#|cLnBy%e7M%=u!Vg%c??l(thEXS2Ot8<~1r}YUe3fL^gPXB15qsFP!f*>KxZ!rS zbks#hRo_t;9aX!Wtm~qq>UB58Lv$3E@>6VQ&$Xk$KGlnZoO3$nMBbj*IdcGpb+djo z#w}gOP2j*qjV-3jsXYTS>^r(k`TL`rtV(xFQ@$X^bH@jKtu}vuSHG)6 z)BCU0b`W*oQg|`fj0k#9OH;0~=DBOiC%^J9_7v@*>5F}@+e-Ndeba3%O}U4e=dLNY zMdu&%P5Byq{z2cgx0L@!k&`ODEls&)mglZ17boZckr(5=H2r^?I}<27t191fzxzSL z4G?lOQV_UF2&q8kK!o5_sU)O`sl-$ToXJg9B`K&>6*VLzfJS5z2W(MMY;8rQJ5N~- z=rf9nkI=MD+ji4WSG!!g%tN2;(7o)|<rSbfYM6`FUq@|alu&ERQrwxj$ipgvLG28}kxvY~GYChwpLTMa zBjLy=2H#0Y;}rDHynJfUDWApJ0zPLnoINCoy~0dP9a5tlJsI$KdN^x zM`wWK@{p20BB9MQKAJY(i%vgAk08t!OcJ#W@ETL39mN)6UKg{njXbeRTix$F){m0n z)3&dAh$G+ppwc!>vqTlsjM1qh8;3^ff7RNteq$TCa!RZ@>5s30>%s{2yK5933R1LK z*1kIUm0A``T|sR@d#H|L{+rJP(I~#ifCp8`!2?+O5etOUVgsg}0H&g)-3ju4;b7@H4Ggcj_Nm=9Y8vQ~9NRTd>aQ;j*C~>C9 zQ(3)Hp`r3BYAI0l_f`+qHcp3NIQSa!OagZpU&B3)El$-~HlyX%01V?|!GoSejaG(0 z2@6UduuM7wiCK5`(`jXyHCz^(#?u+Rfp8M_Lk4O!0@OA!X3=xyW(3n}Y6yX0H2M#E`jiO>UZ zU?(35+5>b+`l_Pv{UMOJ*pP@AzcriQavn$&pBox{=J0#afX4QsC{?U}LmEah_|{;2=b0q+trTqB zYr{|?{V?ahomN_E;7%#$cNXxZ zU(kqu(j<(H82=v=m-EG|J3YUj96vJHodvqwAN9qbK?_wa{s@b&=UfsK=6S*kkmw`u zGa-ZX9V)Ik4T!VG<8JxQ%46*vvQ$>J-W0P@7WRR@fq|tFt(voP|~Qkj~z_bexJHa?kA@a@G{so3B@6R(;GG z;RJTlI6anhz)M(l@IWRyaUMfok1Ezq8ASu(Hsc8tiG9_#w+9Ds%I=ghA6XRLs{N4hr<#uDD zR&KY4LSOwTM{+_$hD<@XgQh;?7}LTLCDayGIT&aW3fqNnppXJh+D*B#b38^aHE`J- z0HtLRaJXWmmPsg4NGM1{WR1^Qqcb(`0IJA6P*eGMcDeMF!#uT|zr6tCf7D>sMw4wR zH*S0XNY6MrOuKZLnySbqE4<$*Eh+?%j9xEoQfsx^W2Bc(P9Eat4RJyTig+Y8gC zm!9?`b#^&_r;gO)MI*;XqHb@<=Fs-6r{R`d?n^42oTt5&?S1>LJ0_MF?khXmTZTwi zlzA;P7R0Pydd1TAEsK$MH&NMwa~KqBu@3wwd#JeDU!`OYUW6XgWHB3i`luGE)#J1} zA(^s&kVLnB1S_>9$nB1Q3mLxv4Yf-%Tg^EvHD@fV)uDWe!3 zcJ?P)xaWYh4G z1NortNb_u3Ie)jL!+jZT9qA}vTZy6D>sUQ^I_L&(&yfyRu4pC%&VQ=>P)7v@h@;_J z(_F)jG&0s}H^Eqk)g^G0@*uXyLYmDIaV`evj!%5Dp_eLTTtm62hEHP_PLoimiu52U z%AskX@ZFL_`wfHTa{k@|{;|C$N}(-&5;xGtZrIEfXA zX>l8_m&11lh?O{a8If%~|D38AV56b0T@ z1M69#kpM@ah#5C>nuIUa#bol+17y>I?EQr^&MIv($YzxD4~pW})*$1Waa(A{0A{vP zRwC%tt>MPBW83~qr8V>4HL_fN$x3o&sJJgPnCFAqmkyg}W;y?`c-fU`QI+;717^QF)+7<&s>w%SEUcP@ zLA95-kJwn3<3Afvm@SIF*?{s@E66PB#-PBnMdafHM zNN2m#aW&SJUi>XlNd^6|Wr;EN6x{-$kao0%IT<#J)fVRbl*H;foAX>+3JZ;wf<_}k zGcC}(kVWPxkSqw7&x}nZY*HB04CgWfgG9WX%M70pUJg4OKd+pBPBi#$=qO4brfMuz zn1UB0=<;K@)<>Hky5x}v+nq|Cx|%^i3P+TJf%N0EtI_tmapFu{Kg zC{d4Bs{PbhA`-zx^k#mDHSB(^6GeC2_VYjxC`cbc(C2ihKIux_+;aW}ovH^5^y{op z33nivwzebj^ynTVdg*F*B2e$^@9hnCt645+HwlsLFwT?H)T(-b`^INI!3 zf1zqdP^F{B*xFT-qIF$IJ7r)yR57@!+T>{2iTeeuQ~E<-gJSdjJyf8kHCOK zTbyY&_A1SM6(A)W#moAHTY{RnuYg7n6@^^|H2$T8-5T4G(qrH2E1Z6bt;}OoUOX*q&OAo_tHSA1N-J&D`Q`j6IBClQM&&fhDnZ8&rAn*> z2CHr*8Mh{r=QAMhoEiq4&wx)AaPw1#1d3Zw&i}UHbx3G=LfJ^pRFqX=w*t>u(G5FI2Ba z;JUD!|2+@s)z}Tq$^J4ID;l^Nl`_fLFi4V;Vaov^Fn>y{1RBNxR zW>N*oISln%<+ZJCfGy#;j{5-3->`~=iG^T-wMIf-2qwNRLcZK~;-Yf?j|JifE{^vg z*Y@q=meS)flhYw+M1(um4#-7}%d6z~n?8IIvcU+uk2f*OiBUexO1FDZ^W>jCrG0mwGLdpUGLA{DHS$V~!mnjy} zNqA+=Mr>sbKpB;bR@U$xDHm58>K4-+LSo|iQrPj%j~w(SFA6pdwYbrZcK7cYcSpU~ zmbd0fl-ssyLTU3CGZTYFVX&B)j!PH>_t3@V{CkqpxQD_eS^L1yIM-R*I&_<2j;#Oz zbnAC798AB8069d^sbV#nsudS&9wVrGy+f+Kt6ubmOV_Rmq1}%;?I@PNDZzX(FauST z!xsbd_aukk?>l@+Isbtu_TDC#wL@|`L`PX2Z7r*`*Abc@Bs330OPs11n_^LbMa>m{*rS3Bkk_n3KbO~p!)r; zO1DnZ!aXkaoe$;BXOx54_YCUO_jD?#kIaEpZmVIgJ zE?ht!VjMNOCBP4akz|$t|Bpm6M6|(3mzMK?mg0LkE+^QIQJd+w_`V1+F_=Z0$ZJVk z*Vmj)>!vDg@=|7Dpg8GLX8C8SkH;KK%gXsr3t3Ccr%io4&{w?yt_LsO3k^x#)@*@8 zyYm=1E7e{CU3@tFB9bazGm97;k?2Ah;OYmi?);bXKinxDKP}LA`@j1dmzVRONd`Tk zH4au(RKj`vNqAFSNV1$F;R3sbWLwPCQghRWSVm!Dj8b$rpbNFbfk&I4%5?R>X{|dh zX8-sA@gr4Snp#@@S?RlJcwor9c=+J419A1Kro85?QU7w*%&MZaSHpjG;ytpH}AnickG1u*}w!a3&!Rc|FV(hAvPsd_{SMEeZC zDGaP{1Udi_j21g*rcqZiDlb}h7!J-#M*VeRVQc9ZzWb|a56#n-A4pJL5mc?Z;kVdb z4JvKqXF*Y8o&8kqbzX|mj4>$Y1PR+}MYT03323$y4qJEU&pI2ID++{WQh*@@Siyesc@}M2)Itx3Ewwlp+A)c_+jCL5&P&)27zLaae z3>-ggT3+3FW7V6i^k1~tTe~+#?Kdsla2#%%>R#>U)vfN{mJPy4#73YcIITt3yJbqC zB>Q7=B@y~q-Mt+0N1O$3G|pscaePtli7<*zT6eOw9K3{pOF;ljjOe@+1iT>woo$BB z%gXs(S=QRJ8WLN%D$v-vzJWdR(VzSvHCwcoF#%&(vmy5DGA6hyyKqtI$n)KtXvt^5H`sUJY?q68j(-5A5qY(VLd<`Rj)Oy=S|tHbAoH}dE7*0 z*?Hz#H2;g1BJ;E96ej#XkKuU3>MMet=P}&9*_?ArKXvq6&UH_QusNlzmSBF%fSaLo*G!` z<>2G(85J3BcYIvIWk^OCzJh=z;GedOR&&@`k%E&s|6y)ZFx)X#X%$dj6&*?eAf&%^ zGnP~{qsU=F4`CsFFuX_uR{$hnMBDTVfP7~*e_`qS4&+*{8{jD|-|$V_%5COI|4`hb zjzJ=_hIBF);781YY_g z20oI_t&}ddfnQwCKbE2EC}>Pq1R8jv1Nve{;>XdXdod$@ETi`5ZO-u8xUrGc!$Tv{ zG$hf~fWY?tag^}t;IW6R$nN#+j>1S@s^k27W3deo0U`?0HbDHi1nGDDsH`jJk4Q26 zn!MIYYIX$gbdT8tDej1pY)dFQO`r&}4YfiVh@$F7f>sgyoJj%dSRYU?QMbNtP$?wQ z(_<=zx!dfn8*dUOOOFIi46v#vbR8))5OXIWj);?}Iih*gpH8$H_Jby`qJx^i7T6tu zKbRO)hmN|BtLZE$!8mHpMZDeaNeo9=;@pjZx>d+&Zc3<5HmMa7r05T3*dY@Wqgn^{ z!Ee-~*Ma>@yC9wVf5k?+UYu_KkPV@G5h zj~+Y1odKorG(Z0#iUScT_7%oRd(dzm;{`(qBoQ-A&yy*K4~%dmCBMjMsn!%+|7q03 zGat=Eedw7PVBb@UbIooIM7WnhcYGi@hGF5b-?K^0qsG{b1U!cJ_-MiB@(y`}Nsu>U33C zmXl_EIsd%&>#Htt1Fn%G$BsxNUxZ{y`Ht1`%JySNL=;21Kxw5+VK^Ur7mQ2(;`pRp zXhJRfS+)BhQF4Ra4ij57_JNv+4xN ztBY%2&)Pq)wL4qfKxY<-g7b9k80Jqq;1ee-ZeS#S9QBqBjP%8fCcPffv9X+gIm<34 zqNBS;uR7c(r7zKSmHNx5nu7{GSsL^}c#GDF2aY91b5FOz-m<286PyIDINrEcgAwW|bJPZP-;T25_wj8pL@Oi+LiuU_rR^+EmWJA{$kAUVgWI z!ocweM`d6W1M*H(1~xI^SF#0*O1BvCca-zLk&Q8E`xb`W7AUwnK6;?9AK+PN!+Olb z{4E_wI^W?sKJ8owurdAlL1=dX+uvmK7RK1dtIPS_{S(-P3LLP#pyM{*4GT{r(8VCCJz?K@EUtvew8^Fnp zSwX_3H}ex0*FokGTH1A5(PmZz%&lSbHnXCqvrCqj-sQ*cCET1z3AxLh(qSQF&W-P` zp8Py5Q(m2(ZXzQ^v$kF3d?LK7D+;EBdi}i^qwCa{rC?*|IzP1`o$kU{6;$o zn-omGS^qSgiAw9H&0P2Yu16z{ngHDbpnwvUye$Cw&1}wj@n)y9oPS&HI&OBvay#Ic zh}X7i@7^jm4MA!cpH0rJh6n^$Uii__E=3@tAR(+9h{MJt{HHEVl+Kj9`X$1ZUL?xu z?b|nGy`}iva52>h_y8Cc@=n12b~blj=?=&9)^h$`PJYXQdgv)tdve;SwDPF=&OM42 zv_D-Sn~n$B`h|OnSwddeI-6G)h~x03TV5tmq3o4!JpSTM3k#G zZTyDHX3NGN=g%rZSA0!P;-nTg>BG&M7SJ+?&*rP+Wv#AWa_QC^ZXvvLQ6RRH-Wg1% zq4A{SlHCl_#w*<%0J$ZB6aWpp&E4Bz0a5r&DEEVdF|ItS=`}UrrQj5-MV!7Aoc=&K zU13stdpZBH)PVVhQP|sFQv(G31otc38IM<@7OF!Ie%a@Su+My~U zgSBhV9V`pLV-Va9mi6;&+RS+Ec@3SCGGdar_B1X(I#Q*#y5n6+tMV*ryuRj6l?vTq zKkiSSEp9kH-BFBQ=XeVtIb1tfGK)T2Bz^yo0&S9qye1IL#;`01L+ep=1g<7J5s zzuBsJ*Ws^8=5P&b{$*CVu=F}3?rZ6zB*o!IBW@^9Rl`OV-)odv zTWz}LS-B?gR{KG{Hq@bDc?dbF=jJqEJNJo~Me=n{oA}wafCmIoL%0_3{#_b^pS_or z^Z%6phXOBc9#$%6QoR-2Ot~*(JYI?Zf|oJge`d4KEv>fYzPy~@Rzw3Qjkz{lv$fDX zWGRxqxS3#1mh%zKVhPZ5-%T4D@CVM@<0+VuZw$cv3UCHepl0h&3 zG+u0`2O&4g8hdth1|Jk|ie^S5DHa4Ue+prZal@Q*b7-W+?PPI)KPT|=oh`VAN5B86MoLL8nX<8Sw*MtW&;!Q~*SbNumYeXw(m8_Vp`~QjSJ2;7!d!hNDz# zDM-;n4ofkgw?wFb!*LEZNW1pXhlHzLtdUhj6=oM}{9q9kCR8!#;Vz_z>3Ivt+NPGh z)VAe~nA(1!7=|r>qxnhO)|9=iqZ#2P>93f-@Zv+Dy-U?T2pA9#9LJA0?&X?HpQM3J&V zzYKVy4hPmn17XFZYeqsn3qCpT)?!(_hKUS12KKZHNxWM|>sN zwR?!g@3RZ5V;{1Rq#jDK^y=^0b!L!3`qgi^5GHlXZ|I|5vn1rFrRxj=azb2~Z_|E~ zltvmck_ac-mU~$gU`Hz6%c4FjDjqel(_hXXEoQ5!hT9gmH$(t)Wd%X0(!^3W+=}ER zMFdsBgLhK)S52S<<;Mj=N3`3u`h@3xKw{QQf+Xw*q(_U(Rt0nS`f~mysb{hp()jFY zB912dyPnZ_A*vtOGuoGG>c$zL?QXbAAYli3msA2I@AP<0=CnbQcjh&Wk$Iu?O1w|5HrUciKDEm~uKKH_pECj{+3v9^8IqE`J--k; zkfD2Oaq~kU5A?+C7y@~Jr|qy3?0Gg^&i_Hk`*|bjKMV3A=^LGioj*767q=y`F}XGv z$nZtlrmCfx%9J5$)CyE@))lZwt0UrIo*`y2Mf6ABy&*$!lFu^Z!dRx_4V?T53(jvi*?<+U0Jbp~h4%P%YvS7kI97YxI~o%j>~4 zrcua-aOhq3)~cB7ul5*&m+uodLCkvQuEXm0dg|7VAPkg5TN<75 z5KRzwzDWkgjc=`^wKm#g_XieRr6>T_9Z5?Py+MM83x#x(FsiIM#a7CkOUzfk*l@iq zf1{z(kmtARh}L-WlF<_E8X1(0qB^kf#NI|&JFAP#XoR(YQ_Ls?td4TUBi&H}tIC&_ z%7<~_(GW>!b_O@UGh>(xOdZ1(hf=iO>a2QK{7%M_;sohTMR_+0kbn_2w^4xnwlp_4 z+{d`!iH3U|BGO+o?;T?#ejGKYF-H2XG^Zt|WQ>z(rIF5OwVoCJY*A(7jK$C6SmTWK zJ&hIg$^GU0pNjZ-(I>kh@@~@aE60yMaQx^ej~{*K@uMF)e)PTe?0-b%sMAmrG^O+Ek+ z%oEwx0f6|4*w&-2)ZA3ge_G@^{bPZ%_Y956>u!~+fC^-6^1?Y>_VnEp-2tubn>qpw zOnBDJe*}QOw%Q$ts@{kCJ~3vgGtzUID|&0XA~da?o4UC4cGzkW*Eg{?ppO^pH?g*# z7P(j-Fa#eYM@Wb4H6VBb~;bn!491eTxaFop7MhsyacP1*naQm@$2E1B}5ViCz*+k4csTPB;qer-`b4{&DAlgsNLPFN}UMk5DNqJ1wmUs z#KL}2yy)W6CtdE{T+V+bHvXXNRcU3lFyjC@`co&heOOhx5-}11yPCRb2ZUR3Wj(6) zDkYlx7dI-x8c#BlbROr+DeuhF>f9M%#t2c(#7S9vvaeeJg;SriU8gVA3}IJnpQ7A{e+*UamEGNeqzgKuF} zUW}3Vw=n9z7tfwi`iAe*t>yebisZ&WsZiIGjmvlPW8;nEejHNTo(ki)?&v0X&zCC| zg%$e;C1aAENcpf3lo(QZ*&#xRdi#3lG=;@(HDKN6T2q~w!L~p6sdsFyR}i6X+(pF1 z+WuoP`OZh|?-(3VjjQ^*VFjy0$WccY*)$=_G-9WYTEJ{vhf!enc2TFiL8|+#HJvO{ zRBn+PkG~bHgV)*NbleKo|D)J)e(4Ekudn60DTfDr2A=^jCuT=u%c_V3@Gdeo>jeUt ziyc7g+J`z5GC*AuNHU#N8Ly$HRHq#@=Zu@;hw6&Dn}UYe3A^k&QS~u?R)dLRd*q)e zC^jCRM6j=Akt`$fw%4-A+j4lDM>M{U%c-1%Q@SCIj8iT-^lkAHIl2xGmF+0a*lQWj zb?)G}`_6v5tc+V_;}B&V#c^%Cw#L)nN*~f1*L@w3F?keruLH6>@-wHE{w%ZV0f%{f zcb;9^a<6ISf$YOHZM{RD0iQ^|3E!PT|Dw-syzQ2*EC$&)5(88wcvr8Qou@&xPt9>A zsF@0ZrRHAWAjdC?zSfTINCM75e=VdXjY z!-|)aPG#)uS*6p6nmkOIXJKeb`V0o|&X=#2S)-Dyujl!jaxNwgJAZE`;Yu4xcuEIEzCNp{W;pNLeF3?gu;OUbE1TNw0Hp@p32n0YF6~MP(>A<9#RZdB-cQ7 zDdXlaA%ni)bjueq$IPJgD>@B^bzM zn??wgy0obY=eG&x;B;!h`EA1aTXQPH-)L0&1|I*3`0TFduIo7$SO&MTkXXU6UzWPJ zwJ^_(s~<7k3A|9gJD!|32qfk_H-Mzd#C(P;(z-+^=XRdIBS+H zCtw>JBFAY6ey>(G;vN#hC*(Lur#a#1;4X@Gx}61?2+jm<*HQp=LHf*6-jSo@y^Fqp zEaeWK-_KH8?(|HJ`VyN6C6&Ict%N^YX|o*8b=do4iieYoi_}i76H8=@km$)s*Hl)M z&<$cD$PTaZ>4cne$t6Af16>OPPnFc%(iIZafzXQRB-g3N55T5Fc5+}_cWl${1b1!< zsEj`jZ~n8^^}bK;5|c z9iqYeb0ToQ<*evVoWooZ>n( zck%drIf^{7hfVrIlV+NRyh}s!&Y5AzyENqca+LUgcb;-L&p#l2hC7JnLAOE5h=`Gz z2Hz13o^aOUD(KlB)R~$#-gq31b;}DzML*9xqY&0OmM=`&=AfLSE*-(9bY$4U?!cF{ zb^crKh}fV3DeEwcTJM7nR(=79Vt4ppn3 z&xpgNb{g*C@k6o_&X@nAQ5c$F#i>a%fwxVljF zs8+w5+J{67oKQs3nd6%@B=59@!}TUXcqAv#o~RNX*f;b1V>zi=Hv{4Sld^W&fd(rq zt^dse7GUNFV5!Bf&*W;o)?j@Lk3W%fOFATMVs<@C1Ppi4f57ZyN9J1u9{|h<@V!Ow zeIh67>y3`gxAOecvKtO*ziC<1cZkC2=vXHW(y4(sXnFw}j*HXZ!5JIRYQp7P1uL*b zh5fC9_0v+=r>MK$X`jojTeEs>Q6v6QQPpqRU~dY&n5Hu5=kiGl7?cmE*+6Ic2c~~C zpL&jepi$ z3`0Rc@+0XT&*HQ-?O5J-CXv^I?tp4fyI7KLjhgB4Szz|df5bIWky-7&}8-aY$r{mv$%RX^agIJ5Ap`9Hsp{^uZL_p zkTIu|oC=3Okxz2SrUTg%f^0XCQ8fHXYF5!oraL%NP9(dv?s;5W**i8iJi2!I@*dR= zT()Ouh#Z(@XvWKX2Zomu1~j@{83Wq;Q3`2>E}jnCjC$B+a2T1@Pk@SzKbg;LHLT45 zwkPvTR+L(RjY8vJfl0KSg%`ZCt)s(t*TWtn|fJ_CF$8?|PPW7U?>mT|1Q+@9_l z-@QCO^WV?^VOE;l9)>8dD`ow?a|)G0YjNGGd+wh<|DMmxIUmt2H*u4-6W=*{|NK+l z#{a*}|F`pYnRZSR>~k_kIelF#qs(EH8J&C=FDvTaEu?qtt@7bQ>2#QnyAbCT8DsLg z9>(ZmjIG|B#u!)dF-no3bX>R~h~qA!ohAHdXXOPi2+qhWD?4{%hv_R0uD`sq0rbiJ zqix0k)?RzhLvvR$-O|YIM6up|z`ch}vvEy;@9FIi&7DjkpD{GweGfb|H&+MZ4?Hxt z@LwL9dzueC{|kBP%n0HJ727LQ%5gTVX}{{zHTcx zc@aQfi4kD0qrZ$Ozr7h3p{cz!w7G+V=4g$~=tyzVOWmP&9&fSD|#K?I!n@*NHJ0$(*%n-ZDy{)zIcNzs&Z5 zUy2p2dl3LG2f#~S(jVKH$|LKs7wMWXPUoB4fUz^;#2RkxPN-%r!>xI)R1@ERR`SVe z0^8}8Qa$% zeNT_{F+MsyTOA&KUyt-LJ}PA^!lVDGNBVe%wQUB*DcPB?RoAxA4SNc`Im0JloIX@#i^VU+iG{rvt+k?3+isoAt!!;ETIJQcy~^P^A}r=D*8K11%%Ld&&Dq(sTLLtN z$q|~fi<`n+g~*fFr5@C8aGvX!#n(2rHhb9A>>65JU zBhaWJ;&eJcIy4#3%*bZmTB7Bq9!6;L8O1jg!t9xYGczG4n{43BOvuR`oS7geH1s@2 zbf+B^D$bc~y>fGu>0(PZV#HKP#H4TRmpnZ;5L3a4Ngv~*^8zsyCK)m5V|+9>TV)he zm}JDHkMYs@ftU)DjF|LM#6*wKa(+QJ&k@Ea`UzfuF)4(3H_;2G%#WWy*aZtOGKBFA zgjKS6PE7h3x35whGJNSf+rD`LU;4uEH80>xU)c7|3&d1lhw=s4!U#J=Vn1>6;v7iiQ}k!lsKTWxLCx?r^`T0sG&MT>&J@0sO!?zP(A``7!2A97~SoO9;P znX}BybM1H_@Wm^BpS9vf!*VJgV?}$q*4ga!_BHk>mXer}G}=r|%CL$*GNMPbUTgm{ zdtD;m$f64iyAM5UMq}H|dG$3@n`iZy(cDxwdq$5rEzNDs_4B67Kc-gCXqa6+qq)bl zhSsSqjdR+XTLyW5Mqe-NWCa+#H6@x)}?s7$!5{ZC~_(%6yq zr~CfH%Gkp#(=@r?HeY7EH2<#lj-dvNgvVVx;^Oja8)mmP&#o944%gPTHdOcP8*Z3A zwRu{@P|HfSd^E}w!(7luU~;DJWdzzG4H^U%msmG4!7^Z*uQ6VfKR-NbdbqWrttvdD zaehN&R3Ms zd0bD{#~COA)RY(|dt6Vp4BY}-%@!a?9jDoLgQR}j{|!sHCHlj8T_fMH#4duqpCtWO zO9}||0ZRG+A?deTQh*@;t(FuZ$bYLP^>cKdWV}X90}v(C z`D$+DZ|C7L*sKlV=4jUoN~7nFJ{7rRgoT%xq5t2oT#LE24zn(q2&-USa2Ue8?(8PL#ro-8*YP(WinFRWA=GH3{|>0vbq z0+`p$w_Z4V$j#n(ShaaGSB}EZO^m;pD@Oqk<8S7$s$Q_FfbGAHCv1&AbYaiPZ9GwK z(E#ak6Ib>R2s{AF1q2B4n>2ZVAis$#`v(Z}n`nZx*#oq?n>m+L0R&0_H6>ElX09Ah z1lMQ-PbK_Ph}>0q1I0U)pgDA@so~cxjo{6FNw;{rB*MUqc7QaXL73acJT_Jh4!)OClZI zt91Yf>;Ou30HK3>wGIG+-Mv}|0Kx8F(t$&Uo z@bt!}hHza=LwG?;-JCfM)549jBjLJmYvb%0O$}DKd0tyXiyRzYfDBFyrIcoNcy`^a zhMI7A+{{L7<2iGh8Y#_b4%asW>k>-sdRgJ}hPm?^=hrnM=b}Cmq`7HYxVXByd3M9( zYwG6LO(p?~!_9Nr8j+QS>t;_2N5tsf;qnDD8(JVkU0BIl*R-H+A(*t(&2DX~Ym@x6 ztWwCTaQ(ct@WSSK;aPPH!wc#JUn|^j9pszZ*w(Z#JauMsb8CaA6ETkp#4NH5sL-@W zkP=-O3-GbG0JK>?=C*VNz{Uc6%qzNEcN<~>uv-puJ}Cb-Bl^tbF40S`YB!+4XwudF z#lIu3zHK7pT3RGJpH z+==!lC(mS8o$xr&o;%@jppbWhXK3Zno;$%SWCRrv0ASS+#SlRdVE@6^1vCK@8OKS^ ze~3ov&d)r_16kGxlEV}U#&^;8>dqikqn} z8UxxXo{{OI0X`l5pZZae)7;E((a6i4=2_Xo=UKEY%M3n93F^bt0qRBb!BH(URH_VI zFkWWlNyh>Zb6#e2(~CxqV42}A8aaYx1{UojOKKIgR<|#kI*kok?Q!sf|7t@e5*GJ^ z!)inMXMo_a+6Wa{Z$Ld*DUjUUUeqw2M>cyTsJ7T_C}#*9kl1XvmK6Yr&4z1P0SLL- z6EZ*)Nk|F+sIP3(F}Pl}L?{EGRt8ldj^Yy`WH54!4C&(mf{|mCcUPKB6tpS_YY>ba zqu(IK2nEU(PwP~m0Mv|AQ=pHcnp%kV26>BuXK_O20M&GSv6Ta84Pw9;a<8I^miDQJ*|jXniSi-J9@=6 zH(YtQM?MAQ@Ak;2fc)Kt3WF9;0r|U)61_etAb+>fRfa+Ig8bb^uQRM$rTD3~`9SoH zYi}F;fJZ+S^dInyGZpk7Fl_C!QbGR#T8B|Wz%<)@DC%oEf5<}~fiw_!$Rm&j0uLFs zOfW2-1_BQmW!!n=^(MkaD!qxNbE6+O01&dBPY*Frjgj;e>8ZS-0w+9cm8N3r%EPCG%yy#Kmn#8a3tIQKN|_RnoN+P z9rI@+v6J<-4--ljNzlReX|3<_D-Y@^p%r2t)cXJ+1|2jqWPY#47|8D_=mW$)IB2NQ z1Q7e+pn=eIm~@^@Y1N_V*7;vvdB~$rN%SF)KIH|6jEo+NAms&zjNVd%AOQM@jJ_jv zFX$gKhK{g~Ncx!TvFP6y9J}(EN1uYwF^@jUa7??2WRV*jGYaK`0R;VHMyRKvPch+` z(Y=RthhNAKVOF>ATUcxiT8* z1>nkyqZsrl8$~bZY%rCL0))I9OxWl|;*F`@Vn+AeKK`O)A4I;zRQ3r7!vISA1PJ!G zn6OVWvlLY2X2MZrX5=|XD#w`cYG5KfnQrM@?J$7l0t~sEN}dsZuihN6kW+#G)4}ebg-PrBupn z|6?Zqt*cTJ=P@&oijzH>H6xXFz|8Wa2UnIA<1d*wiXO<;VIpy?P^cLCWVC8kW$}}- zXp~<*8H)yZUv%NB%5M8&(P*ao2<<54R4Fux119RU4^!G-Syktc$jMMejApq(fdpM1 z@N`M(`vEf}Tg{v{=m9fV>&1W~4w#+0>s~10fQf^`e~^bsp|brWllRJhhlNMcVKhp% z>5UDj#HKaOX=#XV-}vg)wB3NCxl?NYk;k33+efBKQ$P>gKQfj70SN9NnackF1ow|j z_@AMIIVc^BzIgMH$&^VS^jJ_yeb8e;ne;(#f|N-gG;^gxO5v1AA2jo&#|8)%2hFnX z7Ws`7vYF4N-@xDC^0~*wFZz_-rD-R`RHDyBeWOn3nWT!D0F*NU2*FNh z513~A|7|9$_sKv@soB3xoKH-FU}+@SDKnaOTiuXTS|l`r&ndlz05Rm0nIX>u0Yaox zW-w$OBr{B-y}Tm&@NJ)6vBIYe4Q-&X!l&X8K*I4ULjwp3D}2h(0Ky7Z_>`dmgcYpt z!O$q;q+uCWM!(opRK3!tLR6YqhFB1@6(H$4jke{Q=x?{2KMa3D)oVPd$fDNxl+~aY zQmyf+Xaf*Zt?{X7LkaD==oV z>;_+Ap7jkWDxGv4jb7{gdty|Jgf{SrYDEFWkf>G^K!_C8iUJ5lMYW;;LQzpt6wN!G zX0sXdzI4!Lk35}-Zq_pf3gPG(0|fcadd8H0Zxu66rx|b6GX{iF0OgDUV#Ztbj5BQi z?Y@NnQ8T8T`*t6)xOeE}B!gyrfBVs`@3D#ZYnjjpM)&I}1H_p7^^^fZru%)WRsx7A z-|thk5{2yyo683V+O_v#q~#EkdSjE|8KWl~1*jF0b+uG?{R_%psht_b#MCeFX} znFS))3m9C`kn1&I7Qgf13ZN9xk1&AnY;@lpU5lUfnOQCxMZaf#opP=I3JvgcQQw_s zMxOJTX)YRVv*&!7wus7^pyhww$6xTtVz1P~f{ z-ltBj073&V`24b-1qcni;L9vc7K@%qj9(J_8-Ie!OCA^6-!JK9M=!X%q?a8axV)s7 zol3n|$=ds7lC{68*B%f?0hDVG5bR#1wO?nz$=kO7EnmW+_7Cn_%d+0`;p*@e%A9Pv zTRY(6AG9yNC!HB{=Uo@waBsH={w-E#kiY=l13rrshF~Ty+1~e=#X_S3#$?bSZ@@$Y zWPIP3DwjqyfW-U0>_Tq{ru4oKhw+*w5dP*fOC&-BOlTd@;t?g!{5P5&-R2QdT8qE= zP-wdiAo@2S&Yvl*vB|DJ^l^Fo4+!)C%5?<@jeqD%&Jr04K+yWomo4)bJ4cxIhdv~B zx0CVOv>=~EU%YR7dTY|A^^o$ zT!BdqVd!T*bwz873J0L3L@C#2o=}u>eddd(a}8jF)MuWuf&332m**xnr_>6dCPk^$ zKRlr*wfcuIQ?CrAR{tPdkV0W$KKH5HEnB8f9;H5X8u_^=mB^obw!Gc~Hb{N$!}Zp? z3^t#I#KxEKb+psYcQU>r=e|4>i<-R4&^q7i(z`ISKcgpAw#*)p>S z2*TU^xQ3$YD2q1Dc0d2hFRLTKz~1i17FiL%9sx`|9`~D7`Cmsr{q&n|UULVMzfA6tan31I*}>NmT)2((Mk;7ytK z$@AzD>7+k;$1@8jpVStMMp*1g ze{zm+VF1DFq`#BAVgv|IC;hl$B#X@^i~Y{e*9D}-0tWVXekXLjdm<7AQg>yfSJ&w{YajSVCkmD+6j*(Kxc0l>zmx zLo|TU$^abvdwTw8SQ~xzx%UUJjip3(vNo0y*~!{~ddxwT$fvFi;27b9hq&eR9M6i{ zzu%w1cyW<4cst8WG|H34n7Ey6Pk!N}%(Mn8xpiJWJt?biXladJ`et#`D}O8vv-0HL zVW;j2mgAh-&N^pbXl2fwSJ%|ows3OGyrzc9^>`dc$W^ZjE+nK^R0}y z5v_BZXh6ju)PS^@0b}Pj{RjTk=$^Mq!>|1W_V9+Lf&DFC!>qbSK{EA^3Q06Y5C8Qc zHYV!4-Oo6Fm)ZWuw?oVt*w6AewoabY*chGib~yi=yG%?lag49A^^(TMfq2?Rb$;X2 zy0+-mZx?5MtGeCRhH23w?^IY{eb4B9T|&Yu?M3fiWU;H-=X^53S2zq8NS1Ak<0bk1 z!ZEt63fE1W=JdLYDyUDvwn)Y`q-p!6CUoG00pvl(Hm3S=#c{UJB>K~% zUwr;$!E~NMl&A^9Psib2Z_5M(^qJ9r9qL;!llv3`Qhj4)Vv;pWAON@~I`VLH;WeD@ zOo;+=ZQ~joN3B5m1`wjmioSk$Wx*^?Z;}YZ0LCm-^0Ng3rZp$p^2MCOIoz)hOd!lj zvaGd|1g6>+{o#xC1#LWCVfcX2W@lN01qqncg6REUo>jO2h9!CUfv^DQY_CcZ0ozy< zu3y9Z-|t3zsplgG(oBxL91lqE8%MMaU&|b-03Yaac~BKwiPpGp%ZYi>tv^ zEZT6aw7aB5w;HP$Y({~=Ud3=b@*7DSSA93J=xASC~8YE!_D1# z*1qbiwX8zYqeG_kj3+DdFm!-v8CF!1$3@-7X#eAzLL0r=!Jy5JEW;L;SfGikxLc!# zkG~qal|fmOY&vjmWtmyl>yj@n+cveo_0PYs&?b*r29P!pv%63Lfe`M~wzR)^qLc-< zcr(ZZ(iWP*e1U@-vtLE6Z?6mf3YMps*+BUf%gMDC2^8FfZI9mZ?c(5e#*-9E7Erdc zlr-x+fr7iPJEH^tJvw-&$0!>pcM>C9Q9_s;+q@@w*T2sX-s1_J1C)D6;2r{{6J3KQ zpX?QUKs~3#uuedEfTgBe*9w$ey6BvEa#rvmPk*^Ud5H8kSD@t4;laU^t-(ha7ex@L zs+x&I(H{Xp9{@PoFe4zZA7NJ(4 zbipBawCej2p~pN%U4Zf!F9XL{o;X3EJWb+E6DT1%I@o`DdgxgOcO%Uv1e9l4R*rR_ z6eoeOPtYl{&EId@$OZ&|&)_j6og$$8o+Tw)y#z`z-ePY$ z$od5F>f5JKih=SngDXZ}2Njp#KGNyO*rMPc7#Fwf)CinE(A#7jbOJ23%|AJ>F;*Y? z6XW9inHqufCsq)&K9|ZV!{ch_V$Kc+@gQ68@iHL2K_U+jgu2@1U!4gC8yNg6(cP?g)MA ziCqb#Pf6?@5~&JxpYxKR-5LDcV^;;F&xzeRLZoUOk~?bx?7Ywyo|RPt=L@p3=Om#T zQ~}P>0DC@klp&hPg{=Y7QHI0+zeuDWc-`&%C6T=u!i#XN?H)k-nzXGZ*Apk%&hL}h zrr-(2#hnw>-4jSBSmy#|MZJ)tJButfAcSYuqId>QFW{VHW#!htB)8r;wRT>&*f*h5 z>XA2)dIRYc3l>>5Vs3rl`nmI2GV2ju#$CDl0BIS|&9^?3l={NwbEjVl`+I1GdeI3| zeSx)tgVZoV>I_`90x6Z%hF5bgDr+E}0i@Nuw41d{lIn-UWA1E8WxIpxxa&ds0c#!a z)LFTz{&3mO^fcBhw2^aBr+`v_;B4f1U96*$(g65u=kYZ5Rq3r9aZze~0FZ9wc8>Uv z0fWw@_r$zw{(R@}Ls-Z;l#cCy2EAp@1o|dkTxNYDX%2+3F7svjv;3O=U$F8x-$jNP`q_@ zev{1x1@S&s+|z>2P~hz3g<-3^z&RT)QJu^jRuy`fyB_Rp;5DW}}Ru!*TQ2u_Tw>!a^O~HTL1a z>fq^FR6dcD8eyAHI@O)o`@ttYq!B=Rl8~h2BXK_9l;^Xm;66{`BZ0Gz;q;r9^pNC46^F7ZtUv%E- z0`L2xwM;_KC!1{==+NGsj3TNB#i(+;L z3x4WhT?(vE39F~j<`}#paXu_zy@Q85jgJA&A=0?a7sleN8>eR}8z21AGl8+d`I1aP zy0URNA#v_0Wj#a3JcW+~&M{K><8pGB;d0SAUdnzGI?mnn?=m1A=b1U$O^>(DZ=9FQ z*qgy`xI72OjK>4%8y+sDM{58t$1#YruPeJD_?@>XmjmZJS`<063AXuzGpQTv9s0rB zCli441JCVZ^$~Qgu+8O$^GG+=H@w_%jp+&?EjM6Ho21wiap+(;tekDGScykV%Gpf> z)=HxwtVFsJw|NH7%byz_79L$U$C*2bb#mS+XH&YNS?4TQV$2!?$Aq$Sn1lqH#&p4$ z^PM~Uuwd~d%CWrxlQ3XC4UqZERpgqtRUqmL z88zIfay77`24*g|^<-Sd(F3dku)eV2y2h!_lnU0RWHQ*JUFR8-F=(@aMSe@upS{&t0dNR+^uyC>d6vWXtyChp@FSU2i9&QSZwW-3}?X8 zI`zHSL!ow$;S3Zx5JlNs!n+c>2;@2t^)rgz6&3)Ls;9i4^CkbHI z2$a1>O>ecUuCdMEI?+C?XYjY)N?rq;-+C)~Ez)4;?|l$jp7qw`THriOYeHZCfo>D- zw4AEG?EBygp1zxa^aANyX1=pcRHwc_ zdntrwJ!lRF{h0>cECE#!``Ka;&5dfSt-1xRm` zls8J1w30hsFp#YZ9`F{s6-WnYvE?plBX8Vg5Q5(a-deW-=L1^nDxunWlnyrxVl|-YjL>0E0Skb0m=v&A zN_`z=x@-vB7d&dn+a#>+bwE05AZ9fQLJKL;JurmL4t?#-Y$0&IrkSb3fJL_XFQ@S= z_GIW^hP+Gyp+!LYm(jJmMIXNcz8;?$cGM1KeOSr2o+hsc*0-cd>68}3?ZU|O%bjCq zvs`D_*{mRh2A#ky2KILb-Wt>w+2t&J+Th}YF>qP}tkXt*(AqEOxfCwg zbn0s1A(or&UR(;S2b;qE>2jM!KbY;JN`bL zg<0_$6JK-Tz5#eRgrSZeeaN`zDCPzrt}{DIMzTy+v<~&TLcD+`K-|!A z**R=1E8BoW4TZOy@HUt@Oq(m{tpM6Bs0{P#DgW*6d~+@vQG|vr3LghMgn;Qhz{G$X zG2k|5(s}H`qT4V|q1{Mmx0zU#&4xf*2{fmp+bFh$6*(pnTsfbWgy)zDfOuX<^H~MF zUv_*rnoVM1JPWK*csL!Rdk1^#5II=2PyHSWO@_O?(TT)LbjNN@Al@3b0)OYT&o4swLrVix#nV&KKIem z$tkQQtovx~q-g7awbOaxVu-fWL~bpy))CfD6G;;S1nVDFJbG$51XjSBpQyY z&}1A zkZj?Ffd2_I;T4>Wo{1x8oN4WK(#Np=1L!2{2{TYaPmBZUxaiqyBFzy1i0(DhJ4*m} z4|~mmA{;RBKnCU4OUJNt2GJ3hJDT2!?8A5DVgS8!*=Gi_C4kZU%-jO24Wn@k_B4(I zoFB%p<}>N|%OyZB4$ze-#?Z5or_Cf1-U!gc@~6#k8C6So>`39`{;}-D06TU#Jt%)x z52xqk&zea&Vz`ar&zgCKIGVvHHk8!AIgZs2pi?uKxp+*jr58`h&43&&9+K1O=@^|& zQK|VdHZ4L2YVK%yN&W(@5J^k#$X_sX$|V5aFPM0pl8@0ib$byDl%Gb=5ic1vO1?(4 z1R|Zjx%6@{7hpHWwjIvxi!|aatZ+S-DZ$ z!)YAPN~ZkADNeW<|2>2M#-P_mT}mI;<#Avo)t0SwigVUJmX~=E+QRCVMG%V{D2CWe ztR&n4;ldf8=j9o60aQE1d3QV;7mh<7TibT1Hd*r>Qqi(!C)5ssDqw!U5!Afy#SIA z2JHdPSHEC+&Z-G4oDwUBQ+F-PVK+NZO<;wcev-oZ&WQ;4!uh z-vHV-GUtO0?98OWU|0o)aKSfj;hctnS7Gk`f!;^X z{qPm6D%+d&==YN>SaL!_4`=ReEIX1zw@kRwHH?Jr;`Ro2x4wif^7aNdyM`}O+-;e7 zgJt;nbi5Qu!ry;?!+3oau0p7V?s=#;zb2`slN*5;t;kG3m z3#}l`C>>{}3NpYy9nZ^gQbee@Y|h}BOYrz2S%6>NWr|s57#HK(I8n6W2GTY%jc#tk z*P@|(tlNM~NMMXx3??KF_F5Lt(c<7<+Q@go z7KbHjak7n(iWy53;^esEutXtFjw=pJ6yoH#;=n27^3GbE2(Gt{T(|`(4_%Pu2E^SN z#G$*cT-=>Wap*;Q9`C%w!fhUPYH`dwV;NKOa4An`R~|0q>FmnGr97P>58V}F{$^o9 z5AU+DFwqvvk8oxLd9yG#=9(L|ka19NNSPax=0>3I3M+GC(%cBtT}8aNFexUjEkdyF zD^}*lq`47DODJB`m+&&FGeEeWD^V5&2r4TPKlC9kHKDQ+yzL>Cp${r6kIU96T#Dy1C-eg=;tQYaS#38B{mC zQkLS7L6!3gEe;t}L>hRxajldG?+%4H6|OkSqAFZ*ltopz;=rORc{Q0kvBdjBqY~+k zf*?+13F%SvL7d7yeU&(5P?0KLeZAOk80o5~%9y0YQ3h4*ilYpwx_pd_A+%FHc99#v z9?P`S@FJjLMsv%;p9o%c`C%2t>UuZ>$Fscrf8cw^;c9?g8xzZVJA)^)uJKWZVi~S! zV`5pkvuH9aD2Gpm+gA}!1}_ zXFd_TLNT$c_$kv$)e_5)wZqA%V?~`i#fL17Q+u&RF$HI9Bl?YKn796!SY$&#GfX(=#eG zRsOfolp7nGGQ{4Mp{emdLQ|vL`+pLe+~|YYr2Q*JpJcT;5n!+zl4GI|pt#_Z!c(=k zPe_btXOUt^Tc-deHfO5Z4b&ifMXKvwm9I#3-93B-g5eUe0mZ8*1VcCaK%Dfr;FBH` zd{D6}pM1)t7%!>)rteuTIDDEj21 zMR#|Jk3IzIu7ddJL!j;|jE_D9>aJjX@CourO!NuyBJJ)e`h>h^Gg!h^rK*?49W|J%AgPo-RN^7$I8L)d`z1+wT*uNjsE?KAVd{ZNou|* zv{aqt6wPFv@{6A{t$-SDU)2zg) zD#nQ#D!KR1#fqNJ(V47ta2ySHtUbZ<)l<=zM=$Y`d;esGQ|*jyWJMGay&6!CvLmq( z(I;xTdgrD_Ha>o4UNBsMdZs7pnJQV&47~;?vQ*0rjfsa;%N1B@v+7!|ZD_*}yoB2t z>SkG1E7f-5@8X@Nm7=sNNxaTR{Z}h^H!-c?<|a!HLED|W;U@@UFOLmIzEO@hFZ=9t211+Bsz~av97TJ zbt9fnQ>{1fCju&!eb}vLLKjg1;%%5O0T2F=lcNF*By{z{Y#=dRcqt0AiG1dd!Ys9r z52M~BwBssbcXSAeNJBd!7zup5JIo*f&5 zZN%URc_kTy?YJOp#|7amp6vx;9fPxsOI3_hE;IYbF*y6jF*rLm26y6lOCvEsxKmsZ z?i3en0ezOx*~)X;l?O+F zhE3$-(*>z3ssQux85DVND*0@)3op>(crLSxafuQ~dCUS=9OW?!bU-00B4ICZ)r0e= zAP;MC@N-!x{B*reCK|*EgcJwwqm7`e9r`jj#KT$~!z~{|#)V28Lpt!V5T}|7ctd7P zVOKqd6esM8W7=jhFJ0<%n#1}>N_DwwYU*Z*ftL~TG{BVPOWoO;l6t92-IUZzL0u&r zn#AStlTetduyhjvfDxP+r$T*A>WF5&3U2WWA;-P7N= zT8X1}&j44Pu4?-XaMh!B&w!YO<1aXIZDDiRQs=!ER$f*4-+qs_4JU-HJ=z-ap1rMZ z)*L5zAFE7yrDWr9B#QY<|ekI26s>afY*5uNEKjFI0+*o$McCGqJ)?PTYjy=-jv zICgHfWM5(>(&{e4uh8XDc9m4y*O~tmE5AG*U*p(kSO&gp$ny)-5(t-k&!WmN zA7Vt4X$avDoP+Em=79awQ%oMPH~uIo%e8Wb&ueXKo`pM$vGeB4X>Mr?k3()&yuoKB z(F(|94A0E7^kKwg;y4-R+14O#6q0+}@i`g|t3BU2{WKdoCT>=lngn6UvyAo3ven6Y zEM2;T_-VSMowd)fvx?&AW+)0C*`GO@AM`tx{9n#J>={nMEv$xBJ7>SZisE@BxqI^G zbelKDJH@Ab_h(hR$+R*_RB_FLTLG!!OD%IA8vlz*E1bsttS~<97h~+JtbnRy#O+OT z@1IG3tDJrNSvWUNd3vPXLZ?HWQiHLJ>0~~~vY?N$=UCDIT5<75Pxn2?CQSQZYG$L~ z%8Av?uo*L28fL)I?EgFMbp2`VTxt2HHqV>gCR~M4JJR`PKRdG`4p$=Bms{3?#@W-F z7mUL`7e$z_w!bs^c~;pa4m}3TUcsC#&$9t+p!53ktR@(Tm*!EsY<6QCj5PBF*GS`a zUH1Z8)%Aa;YbW*hY-IB2n}YJYtM%-^l^`Ln1gSuN!s%=DKti&}lYA6a0zSH^#l;kT z;**F+W{CF$pes?H9}$Ea1|RY$M0DXR7KKP@LK0pPPzeA`T$>w7$Rj0502X$W?4tY^ zAGBzQ`f@svOjH)7CG_;lqO`;mkxNnGll~tUW7HyIB6>62vM3^M#t7DZnJRZvH)90q zE<3&~B2af_#g|0{>ar-CcZw^E1gy)V2ySrlW$FTb;q)yJL6&kSsmr{$vM4XEEb7ej zb*AklcAbrjR4S|zyL^?pfSye3^4(OJjwbQVxsmT?+6e0fJgCK?o3VnpvZx@gEGlpl zf4avgP-0?x}mRC z%S$Dm5U17^N4bVtR~)#85&Rr24%K`k;%dGTaW&tFxSDSyKSzthBd&Tz{`iXX99JCW z8qRUWQ8nK=t~hWF=kd{696B*M&;6{HT*G+7YZ$GJAG*0HVWu>Ga+4R~rv}Iv@ruj1SQ$T8H~4sNK~1{* z5@oAgUf~m{yDoLR#ETIEb=MeW{5&8Yj1Z{1#=4`#gAoGB`03a4#_{o{6`>a+TpGV< zAmiusdj%8tM5#|e(B}kYZGbSP2?3FD5GM3FAx&1VfS}GR_(Z8r0=4m9p*%me33X1C z>WmNxX>AjgKB++%|3tOCbOdKk;#X^N$oMCz0}u2;oJp2+wSXYbB;{%WL7c1j)mj|S z_^&c95UvfcP*q^OS}iXP62^bED~>Y$t6g!hz*Bg=7Ke;~3ci-2NQiHvAf;-Y$PP^YYoaD;m5U2&B0*Sq2<k{)3rEc{L_^2 zY8)W}&e7;mEAgHuW**_pms!iEHAgHuW**_qt zbRM5CHA>qs)=Q?Ho zfFRCw%KiaCoP~Um7RR&yh06X7*Zvo&6{bN#oJFoU%KjI*;=ukF^QBrG$_p2BnKc{2 z{uitBX+RKXu}V_`L7XLgsTL>3{+AdHVm}g&P|s3V9A*DYU2&BCFNHYt^D72;`0+@5 zIa?~W8DOMBXpo0x0HM8Q(1h;Ac3H+y!lYwQV*c0#emost&g8*1@b#jZH!$OJoIocM z4O?I zN?3?SiJ~8`#aFqKxtXH5k{J^fSD7l}7X)0%WUh!7S2CF_qQ#XA|63-UC$41?EgJpS zJzC_?IU3@(?$a@wKjGZqkUKCV=md`jA|Rrj2NE OCCieDBbvPP+y4iQE<*JH delta 36214 zcmdsgd3;sH*?(rvxl2y=la-t0-Xw%A5LQ7pkpL=-s9}?xo8$t~Y-Ryb>#Yi?AOc?C zC?Z&FacQe)id5UG)%LZvTB}xHm)b?GuUa=$TE&Im_nGCK8`ReJxBT(*<45z%JoC)V zGtbPk%yZ_x{Tu(`+X9C5^woD8>uvUG&sFwxR%T}VW|%==wjclXX9w`#V0J40o0@G^ zTxTX;&i(D+8Y`>2bxo|JX=PKaad~s}qIE@C5jG$=&bKn!+!?FO^lE4I+?uyF=={8k z6((kv=G6>Y)z-GEIaaf(tvTAds-~@D)zJ2iwyw7N?v+Ek*0skvhc?DK8#l5E8?QgyG1h)dVuYbz*Jx`ZD97vyPrCL0gW*gNhQ+sZ%dN-53)3bHd8&>vqZkeXZ z{kHiS<9$Py_gp#2U}snvmXAtJQEX|@M_^ix1{r~NSc3+E6_wU{CP)Ts^NGXH-;c6Jj3i_ zMIi_Z&#^x6LRS;~irL1&c5-)F%R zF+?9vvCSil%kcxk5CO^|0tERZda?jP{s^-RL~j5={s`+6Qj}2nfu5|7Gf)DkDKSj; z13lR)R10i1TYw<-0nN5QM(Vfye`hJ%67QYSFaCEHEEe?rG}3?QkpcpJfRa8yjPxIR zqyRzwA9|z!LH-|lq<)UdPZ+Ne(*OiY05v5V=_f3$Qq;y#EwK9z)Yu^P2^%~_4T{_5 zr;JNvKj$C?P?I95{*`zma15ui$(mH$r=Pqz)S` z7HLZq%<7vMGvo9472GPeN-RI&5hvDjv$P^TGQG8-xx2GzO{~VUvaEot#UN0f?iGMH z>v@Ll6<{Fid2W$cfT67C#U*YJK-j>|Vz-Ax&rEv?N<5+rL^mW8B|dH7xdkp!TGI_2 z;&BJzxY_^!L4F6#kal~3mUk!TaxQ>C381D#{MyOY$rDGlz|N7oA0SBW zWpI3FPR00^W2YEmRt4o{PN7PX+{@DeG5071#&{rX#dG3TJ|zmcch%vq=) zGF6<>^|8)KH1XK%-1v<=SSCguBp&S1JOG650LtzFV&r=?4*;UOJ(>pq(cK=+gCLFN zCe8FWp@C< zgYRk{07Q4+)jR-*?!HSra7fZqi3ztQ=A3;){5FpQt^aKv1)BP8+?K+E7NBq&hvp$k zN~Pt$o%0GY8$cKfKs^=?O1FEIsJGj>U8<2m>2@9|Craml6u|FuUZrXckOHVl(f0d3 zPb<>3AoYFTN3IW=inf(~t!ZLVXnJSjr{|o$=uVFSZNEFY(m$zU``yX2H38awcXALQ zZH`*#(>r<9XbnO^-^quaq{xHP!Nk(K)0Z6d$or_!Bk#lb4tnH$Ab-#!?*sXR9(f^5i{pY6986=fPa-BU&npR{h1qDU1Fde^K{}GW7bQW-vhX`XaaG zu?-NtzQ}QGAE9I0BKE(+d3ER&9+}eC+SSq4+!^Uw9gDO^TX2A{6o0Snh_<)KIwCWo zYod{c=4fYUWL2y+))DQBHAYspbws+xp+$`_sv;|!U}{;BxvQgH3U_T&b8{rxyf(V7 zGtz)QqD`%WtZa}NwyZqEimZ(h5;k}1y2P?&Rq@``rmu{ght8L`bT@Z3wKvC>Q!ky7 zrp`!XQ)jfkIo3FsNVcMzW1Xvcq=>FTBz7c9%Mv}s~%I0b<#wdT05H>W1@9u zQ@j-xV?#ulRrw#H7x8?Q+GPwQf~nFtu1~@)U^sGZGm~@AZl96aTezjQs1mWjhw3mF|%?(py@(EOCy$#k1u(3J^WM z#rw%K7$BtUEk1CZ8a#(|y~Rh3wFZje0Qa55yUVJVyyKBKK>i(%Jn6P~c=ixQfHv1V zeAoyLg8Vytg7%I_%8)-0pl zca#s5%2enAfP*Kh4xtVKj+tQ1q6wHVK0e|6i^RC-%$!enAlI5o<1mG-aV)VbdRoOX zoTNgxi&F6a7*EfzPFHB?`m@9r(diYRaWlh3V?g_iXXm(RfIm+()lZLq&dqEWjn2Ez zd2XIK{VZCR<5*iYP#>laP%oMfN7iw!tY+Y%J^0c8Mnjnt7WbovjfOg@0iuVEM!4MiJ=lYl0?C~{e`uV~ z<2yYPj_fsRH;+wzQ@^0Wttj&>OX1}RPk1+D4{8bn9CjM3v&NAOh~!{s{pDgbpysp!b@ zCX|YP9B)G8t2l9QMycq?@n)2YjvQ}BaAdAGxE_UtjsWV8$f3F3>xdkh>y2!^yyVba zZ}gY8IMxvTTyG4Ll2QN|9bIpnG|u`^9`0$jxi|64)khcY^*TsH2YbB^($K+PBRiyc zl7KvUnOg*lR?F>srv?Yg7-j;$msiZF7Gj@4U+v@At^3gZzGvd^*VQH*Vk=Z1dK{`{(VLaH~f@1N3k8gfj#5Z#8Uv zW@Ui>t+Wo)#Q-yH^Nz$r=g*vYhese21n%$%WP-pQhAmAEi)Vts9Y$4uOSt70&w>u^ zIioq3O*rV0$O4Ij9*HcFIB2-~F$*LP8s(MNrD6=(w)vxk)AH@wA9)0_LEuLofou@? zkvE2H5cm;|;Q`5p9NWCl;8mf!63g2oiFK_vP6VjsB?qML^GM}@)P07s|1F*aQui5f zfPXDX*|zyW;+(d}CqCd&utDJgkAe*f4;Z#SL2OWXz^EQ#JuE5Y+U6sPMeVL;>hS6(1;m#NN;@9+)P>Nx`rZ)sYGz##A}AqQ2?a29(^(!-ZZj@DuQG-ylIS(90UQ-f72K>P1l0{o5rN6 z*1M8E=K5Y@&)WCn?|Jk|8@=bzr!l;z&xy?NhyV0^a$&YH9u-n9(>R$jSD8wm0R+*j%xvj=0Tfh9 zO5t%D7_4n>N=&_^awtkSneGg!O)`jq2r#VLY);92pY=h~exu;X2X=00;rvZ5B%-5g-I;x7lwntU}NyBke|$?>7yJZ4#{;%|N;p zh82ct8o*7anH5Uh7cY(DFBudPHJHUsW=5v4FR1`c_nFF66R<1@3ZqOEpv68~3$jS5 z0PuHAGv7rZ5-5;noIns5Fyme%cKFi?;kKT|-(JEd-sbT`IKo<%fI~=Tw&sU$goz)! z=}<|hP7n2bblDyrKje`h=l_tYEHvPN#39p`M;}12^pJ^QmSidI$wOwTG_+9*mL4(( z3|A~=w*Ov}Kj^ZQhI6kO$Uq>HX3dDDKQ>J}l*rmpRe`^xaVu&twjY}?ZU-t(m$+U*A36=xslq*MWZ+crtgBjIKk=wN*4#vt7dkd#um&f)B<2&#a}i14AQk& z@mEa*aPOj1KAoZ|Zqh2Kd-mu+Fwtj~kG`B{XKu z4)Xin@VG(8-9mQmv_BB$j^G$RCaQ@@Uz}EmEHr0KHfEz-UEm}-Zi23CJBaPw;o9x-!gGIUFRI} zx*$jEh}Q)uCT)tK)ha05Of@dK%;+T%NdW*Miv@G>yxBO0xk%D}Ztu05Ph|eM+-s+Wrl` zluISB+ePX!y6Cjl^s51IRFLY+2K=`13-*t zhfi4!81^S^<CrO=h~f13l!FcsGw$&z2OS`0+~b3T zPBYG?8Q)7Yo{T?nD%|UJLUX=X&l$CZspkw3o!qPEOwQ_kVx_ZbrSH=d2ZUAt<-`GE z;`ix^=h*(A`chs{6DPOyr@mmm^&Cw+hbI1L&o#TBXUiVdV?rT1dQ?vwAlf{tCk_x} zdQ?vwASV8(o;W~E{85_t`y@&^WGO!G;{Jp2(=1na36(4S}e?u0rO;@~c`1s#? zdcKpzjFz^x#4q>vkK_Lp%J2aO=)U2zh+)u{@~-zypIIR|Dqu_o1@aR`ZDCw zhysv!)0bE3HNlkL^x;ZK(*(j>KC@CH#L)?@14=xibTYq1)1ymeB1&uVmJdN5w*W-n z^5GhUyiS|s>TMsFmr#H}51?FEfZ+JszO-CnkpKj(w|#liT(k3qXus`)!M=xt*QNz| zKXLq)z47-w5;UOqeahqq3gP%v*a{#9^S-aDpBfAnJd!~(*+0BTa?7JuMVt{rMY>H}Yn48{QzZE+DRIfSMk z`c$TX%`r~`H6^m>KJ*4f7Tt%id1YJy*dXf^^UZtod=>-u2B?S4fWh3NNozY++5Abh(&PkL+sLHKq* zE<@?i$fZqlho67$mtHSmVBg`#7P&frJp!2ec-U_a4t<;`J@{Nkq^+T$y93s9@OW;J|6-gj;6x855H?I34 zbo!LuznJiPF228o#g8Qqk2c9MzdBTC!n93}`Sbd^ETNro%#V#ihYDc;{?u;{auH~+ zpun3l?UqlIY0`fA)Sp)+n#2VSL1e%{mndj_<~K*UL}}Zgz#~c<=d)y@v~@o7=MC|i zfaYiZVZ$lN$IU$3e?=hW`Tz_4iAB7Oh_2O%*MIs__7#C(AL~L$T^`BQ#z5k!pRHTI zF`(oYg^=5gfwX*akO4$r8v_OMx*H&R+8DrfH_2@t$?cT^?jTo>95KMazA|9tSz8%c z2AKNT6iD3uz_wMJ0%}g87<1YbPy#9n&~j5i38*MQPn!ZtKt%y$V^cs0s4RdeYzja& z&QhI$#+Jm*51uo1OLAW%hg$+_p{XIs;g&#NSny92psy_fs(geGvwiH*0bV~zvLu-{oL*BoJ7}ukLE9p?)l+=hnZR1&&o}(|ISS; zdBL{cfU%9l#FUi#doFxow#DZ5Jo@UnK9~)3j{4@O+=1AhWf?xmkul#_C_>LZ6H8?* zOdNmfosxw-8)v7Fia5#;?HXa3fPlU@5qx`8$ztwP2(Xci#lcjoMIZoJnmG6Ew$i1X zE;@+Olm;FMBC7zQv_zy)d*2++h0i4E_xmo{*}LNI~Q zkZM_*Bnh8wtV-DLUQ@D)Q#_bR_<*s>&b1~85`Nn_FLD37r<9(@GZca!2myefYAaHxa$VNpl!4#j=xu3(k`wLs1F=OMmwy$RRST^HaZihzrVb{ z8~~!%MyK0bDo{Jo+qJSc%Qn^|e)s;yei9e=ea0G|u1017cTHA~HCT{E^60um{77F# z7S|8PIv!N0X+T|vON_&kY`Se+l6d0CT?!Y0TH_LzY&vi+0om(djAQT_wh`|+@WCdg zkl~RVah|3KX8<{#nQbK`VO*(Qo>=(N&hX_7Q^LjuflTa(%UQN9ZkYh%(riN_oE@~JE%w8Dq>Xm_|$&hGP0F)b9MwWHHKqnyvO;o3ybz6dBcvw|X|w9;7mz&TA^|5@L%{oYvm0A)XoWv)O8k<0qxXY2%JjWe8@SMvx~|eVq9E}Qz393qTFIc zhyeDr%}1Q~Ijavp!nlapQ6X?1VI^hOo02_Ma8jKM4EDFON4-H;0qId1^vQxyKimAZ zbCJo$mHnD=;a-7IKj8eD91WtdrsLsg?He{lX5VBabGgV)_)ApL>5J53B}h;9DK zxg*HV2>;2GvmwCw6Uo^ll28qjp`4sl_RH`q4BAAlS`Cn1VF;K$Baw#M=3kwosqESC zUp;OQ1=3%M+iG&d;2Jt_T5M<8n~aM~7_fU7klti{N|YQ8M=FxDCykvPeuqh~131Hh z^A4*TU>%jcjerN{e3-`m5q@7KX#;5lkltry}o$~OPboO3hSU&4R)NR0y4--*;@LFyz}pFqlFwUJ|t3zr1_odl#~tZ#o2>IXd9 zHosub{h91w*%zJ=j0V;hte}rNtj6H$6|bnWUXwJ( z;UIUmtURzX`TX{>p6lOV+GD6B>Xxr=Cko-ND>wom!LBVID2?$#2O@UPDSL=8Ck#vhmo_Z zH`b}Z*+&`Tm&;*K#yRa=TENza_j`<=45aXP> zoEgPza2fJY_4zXmI0rc*7E+;}hA5-6qnKS1?onB~AbuKfdU(}9>j$#4({1yxGqi-= zS$0?@g#+nyARXr6N_G04fvZ91(-NG%Kk=624B-5PSN5lb5j2^OdqHPbDZ4cM0Oukg zK!w10fR|TklVgT${@i)9lsz8)xwmdJfb?^o-$!ewnYQ^0=l5l-C;SVQTMkMyf%FSr zJ=EGIhdqnV=mTMPefUvNmSzFzQC<+Tu98S++U9SZr4hC%{2OoO&IHnLcu|Qy5@#dy z=lIKUBtGfM$!s7!$xF+v4U*DXh}t>7EN54SpXToQa~6=E=6y=Vy*kEtHbQcT&#z!r zEc~p8bvCe`C9EDPDd*s@J$zRsJBgLO;9;EutQQDtnBZoeZNBXADmJ3*Wsl=^zc|dxd=j7{SdOqT4&fERivt@5_89>8~=L6|29_dS&W&ju9KF#@4e|A~f zyWXNK0M5I#C~{`!BKYK7I)IG`f8gztbAj^#FD$l33OWlBvEj~>1K6kt(nz&1EdPMxa_ckeC-&eaAM?x%8gE8xyLPYz}elx;D%h(UtE zD}c1cC=Oc#1fQa|xy||h5H={h&2XhM3Y={QBuOdZdYqijM?;{5cfd_kXJtK*b{G%= z<&ZVdf%<$6yC#ezN3C2MfONfqow8jHsgX|9D~7T}c#k*kMj-8>abGDp5QE`+_@rSh zm4$EiNX3A4Gm+XTu~ypVEr*v5XF(Re#gJdMV0tTob&F9}Vcjk}T!quqSwDh(KYYOJ za21daP=`N~NUL#pI(AUml)*GBa8DmmXF-uzpEc+a%Tn$lE*I zJeJukjATE(4K4uIuMBJhrJXwHP`zm!dpZ0YZ&)2b`V9?hz2reB9jZ5sXB*3&^p>|1 zNKex8%5Bg^XX?ZWP+?DdtJ?*fr)hPiGu=%_?UfT)P53zvryDrW5sng*HTcrO`TYd; z{jwJg_h?!Jq!$ewO);@3Yi;w7&P@~9s_-8@2CN0nABh1!k)ytl?A6Ls*w4ydF=Q|u z%XuM?UNN9~ngyYCWUhX33TqAj)tlKm;QWr>;fF^xr7#is^7q(XQVc;AMfgNmUeNu(&| za2XK4Fb+>UjpeZNFX)hxh?f!K7j#NJB@r(N;$?@|p3dg5$YqF4C_IEOP;TN=whIKk zD}c7%dFu?;S-#%Hxgn`tL0IceOyV{s(AER(O6St)?2hs);R7nP^@Mh%i51vo2(%4A z+jMyP40b&$-(quEX!o zW|y$YE^n?^6W%VG>k!fDCZOHmTyr*@l^ZAzLk?{dVckHfAaZD%fwjkZ>1+&bj|pp7 zVj*0EViRT#)BsKN8ldfShMvQ&iR^>Ztz(7A`~mKQL47M=WzJK0iRl{hiM6 zIc&_yh^U!AFawp8k&4?Blp}m40T8{@%<3Zn1X%AhOUe-x;sIpQA9QY>!%iEIAe+~k zawiYs6K&Cea)A$;fjkMI^+B_+#Ogw8B+1`n@+zl%E^8Z$$eTxi(rr)`6m2M#@*Xpl z25$r?P5vGeM-2QMjz}l`bsz61Gm@_Q^>M53L^*RYDBivPlHsiiu5 zjK!N8i?Hbj_-gw*XPnC_i*9o&SRG33A(lzsk`-~(rq=dxHlNG-1nWeZz1O)Q%5rf$ zIKMfU%^kbRsTgqGp`yMrB`wksTZ!8z`n;?SiGr;|>XES5u?`u5(XQ^!x?of1;&uYEL+IZCT==VS_vBAx%i5zp{=DQ*4h<`HZ~%sExM9E;_5=W+p5)FKr7mP zs3@W@Q($4u`ZiN4qGm?fwI$*_F}eSVvc}#FkW~kdxV*B&?0a0Fis`H11>{Mr8Gs|b2ol_RE(q7e*oC_AQN%?g- zxbN!#3DIZ{W*4(BbiUKT#pDG)|*- z!6W-k!Jd3$JTAI|W=b4|NB84`C4b^vw4uJRtqXb4NJRk`Wk#XAZlF5!RhTMnPzXX_ zg}Bp@ePK0?B0fU(#oWG%jNzCRQRcQU#EXB(R0CX{8S#+3(xF<=4h6(56G5OI!Yvab zP6Xkeub3mkASc3I;>AX@l`AGE%4l`X;@0nzn1~GcFbN{Dmqs2A+VXFROoT6{Rgas@ zl^H4$sHiSi%g*Go6R4{y)Uq?V>;&qnN?}&}K~pR{fx4=%2-Z>68?>|pQdJx(%cxQ! z!GuT%P&ezRB!WqaAdpr`b^)03{KCFxLU%r3+l=(ez`uipF-##J=r7`H13-nN zme;d#RthatkgC{JJH@%7o=xdfm*3tIThr9m-KpD1W#c%k0lK?)n_6dF1FJ2n%Tvm! zcaw{yej4e#)WAmOoS>he&IdKJ`Tvei!WH*d>7*>(Ywdramf|GHU!#_Wd1`4GHt<)e zrEIaWrCOTjX%?lH=DD?BtCr$8|49i=@ER+%G{J5Be^V{R>45q-)RH_Y{(ZG1Pm11Z z=@h7?IP{XMmQIythgM6Y#qj{>-&9K{Db3-XN=3$~UTW!}tCr%R{qLwHd|&Zpwbb@y zwRFLqU#FJx#k!Izi11}roJ^TywY1Ew{(qvD;-oJBx75;7b%;x~Bux{gmeMG5QYa!) zOCohqKte4o5sZujHAyWkMg!7#QY|e}&81q>q=i~aqwGGRh)69VK`&V?Empe26M(@6 zX-}do)zX;?pRa&k`ghdQEJZq5EzNY*(guVO;S)7>H*}q-l`2P{ps+&8E>p!-J}Ypx zY-z)tbseRF&L5gsm8YO3STagF4NN3^-D0BYL0KeVvQWV||P3 zTr7Kz<*O&(tjHAT$!!8|ZJ6obG z;>UK*7A93cb2MZ zYHOWynpv&%&$dectT)1B=dETo`71rNi=+k|;au6mMx5xO#WB~h;ND5nF!-#6)%-i! zu7$+eFMTw)YjLEZ!OJQVt{c2KUrN4m(gL36Mt@4rEtoP?t4R7& zsmQdHDrtt3X_^+ya`jjSpX+Hkp(3e-tY6ZBnefxZX(${!;kr?guFk@<4X)cKR3w$? z+U(w*8-cnir?=-upsuoedu}$5d#+nUfSm`CU}4q^~qW-`JqgGlsh!*KLfB}j+eQR8z*sLOGRwBc%uh%r_vb|f}d z@XEWGv+e>mCJyZ@M`60Cl}4*l)1pjxkCp9kP?h)CKLuNamYDQQe|)Sa{)S)<;7f(% zd0LuEbnQSTdCGYt(6ZuEsSfJxJrby!RrmHD3Di}CdwY)r>Z&2Voks$7RgDr#;XESJ zL9o<#kji0vxYnhlriURjj<_t<^e~H_NYiS1nE2d5tLfozlZlbZT6(yvOQEGl@=>I- zg(e2&k&-ezZ4o{gY0=ALq7utFQl#Vy2+KK&FH)pEAsl6peW}#+Xg+3^YE3dYx|j1f zx|j1fx|j1fhDT;9(w>?gW6W^X^rOR_bGleN-@u%QyV!t{BmU*vpXax>t!*9J)zpGl zprS49?e$JUH>*zlp!_zNeHw2tNUe5eOr?-MyYgY}34;Z+DJ}9PtR4JmFYaAEc0{#INq9LTs3`e_+*ice9E(vnw zcaXw--|pmYX8$t@Ntn6~wiS{vvA<;-O8wu?vUA);xvh#^`j+-+)2h}l8BZ+3X}N~g zREJJ5qALDdc>1!{mp%PRnXGZ+%wOT}7i3BtWkmJz`Um-CopbXR*8e|^0Q|Uj1Ry>| zO)}C&4|;ubgU1U6$U#YqM^d@&Kxyik(T?+DUHI;O_kmLX=^brryH?*U1HS1pP`yf& zoq;%YrjAp~qKaRL5P>ACLIlhz)PaEIdRdl3T)Sc-gzwsxvXP7)RA_dwMXY#SawI`w zsYn8|3RJw{M9LKYKasMCF_b4$PL4Mu6Hbmd^e>S$l07HH9^w>w7#LR=I&A*0ILTG` zycajWf|yW&yBRjpNJ)I;ynVF|3!Kf{SU7ZWd5RnZvMg;y5 z*eAzA5Qr#nT+<(3Z z9QOUcT7;dZRjj$EEaI;K};?GETrqFX|28usni}>N>s7tA!=i1Pp(zQuVaif3qCu~IeI5s^c zOW26?X{?d%!({~0I!H$%f+s)d0D!6tY1L8{yr>MINkmj-B2xe+5vs_}%;a%#eM2BH zGx2?i@}hE*y{Md^7ii4I6_4u(2``FD$P6SeD(_48UsR%N^S=~l%#Wi;R~IOwRNa3O zsH+Oao4@Y;7lFE}sJ9nIpswm85@GcH7d(^!ZdFL6Qu)oe@}dZqP6GH0_aiybSmHVf z#N3izUQ|ghFRD~sNr3UL;a_S{pfM@VSehJXjKd*G4m5^)c~RkBUXee^%nxb;{Kze*Zo%||B66krSauJV`WmH@dOXX3p5Tups{zH zkxFQhSOBiVK(*|y3*-4BgVeIC`!51zGrA|D+)DTU>l=G8^y(b7q8m|T2#-sVpcrJ0 z5(#zxMPRZAlN@LqrW{h`!So6=4(k=kGniX#6@4hFm4O|2oly`EsCfj4>0;=lB}V9G7y2^i|0n9~bN^ z76lK)7~|DSbN8Z?N@&Tbc7j?mE-yL>M5e2(o2XWr%bQLDb+c34X5yxkKwULSB-#5x zUEFjMsH;v@d!NgdCQw&RRtwE>(HY0w3C015j|Fj(PT{8uuMYrtHbt!lAWU_NiaY>9 zvneX_00`T0O3`V;=_5#AeNX8#O%X=pskjLxXB3x@AUstOjsv7hmonXRPIW2MMxRNtqngX~=$55C~7h^XCcz;c4Raa~cEPa!xBb*NT&SO`sn0 zv{FqO_^0s()tZ7Ory*%g(jvunT8WH$LwpDmUlgl4P(E!?y<&zJD>Z-HcRziLV)KzEm zIDMtzHw`I3XDb0R(AhakfD9=>1nOpWN`MS`Q%)esk3sS?SIG|muxzecW1qFQ|^X-ujj_t5W6!bw#}&O6lyb zr{74akKt(on8sagG8HUQ`*0;2lbG~KQ6k;2PR_G@-JQ`@D%-100u{5Z;JkMEkegj= zr6rMqb#1^cj8IrUvGv)as*1BMYbpXVUyaRrVfV?Xx2#4NIq}dl6{&yu!yw^khKXW@ zN2x9eaoiFs*saDR8oi<#(Va?ufgWm|i&Q&Yeu3SuHZ(^hOLax(cvMi-zLqA| zy>Lo=ybhlSQpQ_t@`l+;$zY)dJi8X7x7ItmTfoeS%Z*4>KXzA-N9j7+)-*N3;}`Fd zCqKrHIFM)==_jfLyS4uzxrS(yIQwZ^Sa<}B_K&b+kpL-V7XWOQG}jy=#5C6&qWzPG z@T_cxs&r}Bf(FH+)7@sYInq;gjuLH-bQrlyFjr>BP51&VbhenR=4b>lHj)l{w_Q9l zhs3_l7~bl`qn(lFrp_*!hszJ}2K7|cjwc_Q@U~wE&5TSNbes*lnMMfZPq*CkOo@nW zA_po52SZ$e*cv%$1-cULatf1gE^}Z|ER%{iKc$d`>cn_GvH6cj96w>mwB7TOw?spR z8ncZ8HDmx7a)CO_0Aa)hYQ%uhtU!$z5LjG*5!X_NtQkTYU1&^Xa--y5(x4%nalKhJ z56_gWKTEu$L2vWnPAGtJ6w4TNfV`Pdpo;h#pfMD~>k)rr1jQMJ@^AEC9EymhD26F7 zipjmvP3icgD}`~?mez@G6vW?XTT)Qso-)TPtP&ZUO*&(`I&i(ZJyI0E9&aqn5s*FJ z^2^s15l^ABq7uL3B!13GoI>~wry7Y^bDDUAaT^2^Kwan(qA*3 z?`&ha@!2R9C}*V9r4ow7ue8!zyIXLCsumR8VL(Zfr7jY-F?PN}GJupyNU~HS!4Io% zYtuc`6>pHbqB0vQai(%JT{x!t7PPJ-^v0vh=!WvX;rMgVBGc9ZOyjioB&!0SN z-n8tL+PIaARNk{s9D8k=TzLdxy4ucWi-vx~Sij_p4)pIXrDMA+W!d4G-E510^l9SY z7J>fLRK%dl3nS2@+pkjjjRD~`G@ z{J;gB@?dm#2O@QNMl?$AiFU^Z(@$e`f~j=o;~11OkKe^u-PEue@0_;NMs8dOP0-XJ zo-L(cE^Cx+J8N*4fN)(LKiNS)an_A9wsWxCyA^3^Ldr8e?%CPh-rm+hF+%8=D1PKa zyx{4MPQ33~Lk|zp%Repnp^d?KKh=uRVS2ig^f;3%ZmD+qK)a2zvyI$MO*1lyta$?rRe2W zDTrGBz@Vt*s$sc%Ya)}$6hYPtQs*&uQA+HKUD%FQ1yh6yhvIHUpg1mmE~XPtz^;jQ zG~xOSf;qV!VlSh}_-Pki0jX+)9CzWlUQ8m^(a}b_{hu$MVvt@tF_a{OU}7*MPS0wX zEQlRAiO(`Zd4eDvPVU&zKv)JrTJ1@}bUc)-Y>3HbX($w_OWvq-a?2^}W@2e4;}vB= zM5yP4Y0WIHcp4{g^xZ^3BzVO1{>*3HtG25Jzl(g4t&i8dckPAJ6=2?XH3U< zC1DIdy2)yA>~PpKA|in>c(xf0F*bU` z`kx$Dz55sE)e53bx5+rKM4)<(FVhO?K=r&h&k1Ejs9o}!QK%X-p67+JAw=`CvgsWI z5pr6bvW$h+L{_-kTTNL1`cbr_ts7g2P7cLip`6@3Mb@O4O8jdN-rl4ITC}4hx=v0- zK0k$v>2VMnn7MdXikF0jkl4uebo=|71Cd66-LjGzL+Fd4%!ri;3`OQi+`QK_t4iAh#&Qt>yoa=&DZB^kN11xY>g z0(+8Wp)&<5n~r?!g1&BPT@uE)n3d+?cM322vbTl_8=f?~H>XI(PpSGd}knjVLC zwXasLb{Sml@;H5r1OfM@^i!tU*JQN0E_o0x`6_9tWv1k^&|)FIN&S44+t(PwgYoj` z!IEZ%9Uq4w6YYS~!;hpjH%XNk)fAL~(+13h`*%779NUpTC9*@~M7Lx}(^l$DRMJc) zN%oKOpQW@DAsmF+an~WIUb1`n@!83Kd=4H%pp78?_#C$xxoJ76+FT(&J_oP5N%tQ| zh0TYlW)#7)d0!2onO#*aU2S>OVdKXxXG^=LAf*or)kYS!b>lj^6=t2_-fU%@oH${! z&Rm{F1R(ajVjqpL0Fex0pK$n#@Pj&X3+Zbki@>K`qdNeVx}`ijR-UD<-eIB+2V8#pl7I z9G#bD_K|<1>(Y`6_ndlfq-7y`(vf@|(HEJOGrvjLF7y>hNuno-hD1)z(6S$&Xc-je^pkg9Tj-o} zEh`MxQPH?R`RH@h5fi?7`cEP9opX`zOp{JZc8-6uWasN%f0oa%@f@-nW}8dDm@(PT zb_?c-sN3W(Bh`Khsdf{{)xj*znHqOPs}V)fP6{$9nga6##9I5yh_#!3x6!!Fu%5@# zQ3w#Z>8oOPp8IFg`4`F}9oXC*Z)wGshQ!m*a;P8r-O1%76ZScwvsU3pCc2hOFGLt( zoi+F&kys5ZO7+_u%iHjy6j6LW+*u>hmJ`jERvFXiT;A2Tygr71yVk~Ht)T)dwWUeb z*{L}x^iMox#HpepdZBEjD;}ypHI!@lTiQCin%ALlrTFLFN%VGG?p!4~BE%~X1-q|X zs6K6W-L}ZIIdkc}JA2ODP@bGNW^IK?(BeV@L&JN`y)sn(Po{$@AR{nTYNZO(ngBa{ zM5YBcq9ghrI;waNzoA9jTIMX_8~@ zfAU;$%&>B30OEsjWLwFJ&$7&J6pCY^c|LR(^6PlUHGDtb!M0 z|DT**$eUi&@-@I8=n`kPQ9ILFwu6lwiZeS?XWL<;p}tO{*|^zW3lF?)E$spEHHxpc z+IeUPtIn)Tscsd5=r~DJz+Mgoxl3-l~QU;Py8b_4S z@}h|Jr;H+|c~M0AQzav1q~wT8T;bzo_~>N^m5h{$C#GPLD4#AaU5Np#330ou zzS<{HSA}|KEfI*wpiH7F=EMoAP)<7g$mY{ zmAs#>#OJYu)9?hKA4PZH6vCaML5~P>Aq)DQdfC}4M8VwiP!OQ*gD>Hk%oWXBm>Q2^`|5cE-WZT7mVwR{w70y%msO9PKnD6O$S6XqA#>ZC|RpT zZD{YS7%I`Vt!huIlrjQ!Rh!zADy57-RTVedRi=n~Q-MI;>;hat%EU2|X+)r|>QLKN z<&+Vqt2)(oRVifz(sGb{Xm;~8G9gGhb=`XX0b;J*fskMmATX^P?~@bL0D)<1+=mam zeYM7Y_yAnHkgt8!F$Bj9ze5^<=?oHbLxx^lf&Nw2b)0@jsyh^tJsq_iWvOb%FA z;`J^1=?$H@W(qj2Yw_xq{LKe0QjhRG3}9V}x3;QPQ+l`skC_7|p`w_ii5IryFRuc^ zjUhnsD7B0$0t|?E&}YdN>K}D(zyu`^fxh_A_@fkO|0NdMcx%I6c*A~Rz#j2YQsl9%TyDH7lHC1Mur;b zZ)Umjhadog{$_??hon!BwrplqOBH?hm_}hg%C02(Xo1(j8dM9&7=iM))KE$3AlER6 znIs4?0|+@&An0GiAZYXe46t4HT*H*48Mc2bOChU_Rtn_Itt=?s+Cp*}eQU(yLekvK zHd!Q)QGXrkrKHK^HYzBQq)?mCk|xvJB&?g_8F?vbMnFiKfOX|gdQO@oO=h^60@jtg zw4jL`H$0yw3Un>ro|lp))7_h!NdpA>0Hve>6tzmy0D}H*EolHjf47!20yX{J zBxz(0AY0A$;njTn#4{}}Kxi<)arvnjV03UJgGHdK(ZP)j76DvTk?KaRb2q{x?Vb7t zq`Z0zD=3i2CIDU|9^obNLn0fVt7AyClSJtkH~{J{NnYZ6Fm_!nuwmFLY)onoD_p3$ z^u%uuLvpC34-J14<11AQ`h{MA@^^*|@jlT_O#SQ^K#;$Q;f7a|r{Ax - - 4.0.0 - - org.schemata - schemata - 1.0 - - - 17 - true - 3.1.1 - 3.21.1 - 3.8.0 - 3.11.4 - 3.11.4 - 3.17.3 - 5.8.2 - 3.12.0 - 1.5.1 - 4.6.3 - 2.13.3 - 4.4 - 1.2.11 - 1.7.36 - 1.11.0 - 2.9.0 - - - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.version} - - - org.junit.jupiter - junit-jupiter-api - ${junit.version} - test - - - org.junit.jupiter - junit-jupiter-engine - ${junit.version} - test - - - org.apache.commons - commons-lang3 - ${commons-lang.version} - - - org.jgrapht - jgrapht-core - ${jgrapht.version} - - - org.apache.commons - commons-collections4 - ${commons-collections4.version} - - - info.picocli - picocli - ${picocli.version} - - - com.github.os72 - protoc-jar - ${protoc-jar.version} - - - ch.qos.logback - logback-classic - ${logback.version} - - - ch.qos.logback - logback-core - ${logback.version} - - - - org.slf4j - slf4j-api - ${slf4j.version} - - - - org.apache.avro - avro - ${avro.version} - - - - com.google.code.gson - gson - ${gson.version} - - - - com.github.jsqlparser - jsqlparser - 4.4 - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${maven-compiler-plugin.version} - - ${maven.compiler.release} - - - info.picocli - picocli-codegen - ${picocli.version} - - - - -Aproject=${project.groupId}/${project.artifactId} - - - - - com.github.os72 - protoc-jar-maven-plugin - ${protoc-jar-maven-plugin.version} - - - generate-proto-java-source - generate-sources - - run - - - ${protoc.version} - true - - src/opencontract/v1/org/schemata/protobuf - - - - - - generate-proto-test-descriptors - generate-test-sources - - run - - - ${protoc.version} - true - - src/main/resources/schema - - descriptor - src/test/resources/descriptors - - - - - - org.apache.maven.plugins - maven-shade-plugin - ${maven-shade-plugin.version} - - - package - - shade - - - ${shadeSources} - ${project.build.directory}/dependency-reduced-pom.xml - - - - - - true - - - META-INF/LICENSE - target/classes/META-INF/LICENSE - - - org.schemata.SchemataMain - - - false - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - ${project.artifactId}-${project.version} - - - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.22.1 - - - - - + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 3.0.0 + + + com.opsbeach + opsbeach + pom + 1.0-SNAPSHOT + opsbeach + http://maven.apache.org + + shared-lib + user + connect + + + + 17 + 17 + 5.8.2 + 17 + 3.8.0 + 9.23 + 3.0.0 + 3.0.0 + 2.11.0 + 2.3.1 + 1.15 + 1.1.5.RELEASE + 1.18.38 + 3.0.0 + 2.0.1.Final + 5.6.2 + 3.0.0 + 42.4.3 + + 8.5.13 + 2.10.14 + 1.9.2 + + 1.70 + + + + + 0.8.8 + jacoco + reuseReports + ${project.basedir}/target/site/jacoco/jacoco.xml + + ${project.basedir}/target/jacoco.exec + git + java + 2.6 + 8.39 + 3.1.2 + + 3.21.9 + 3.11.4 + 3.11.4 + 3.17.3 + 1.11.1 + + 1.5.1 + 4.4 + 3.2.37 + 2.5.0 + 26.1.5 + 2.15.1 + 2.6.3 + 5.7.0 + 1.10.0 + + + + + + + com.opsbeach + shared-lib + ${project.version} + + + + org.springframework.boot + spring-boot-starter-actuator + ${org.springframework.boot} + + + + org.springframework.boot + spring-boot-starter-security + ${org.springframework.boot} + + + org.springframework.boot + spring-boot-starter-web + ${org.springframework.boot} + + + org.springframework.boot + spring-boot-starter-test + ${org.springframework.boot} + test + + + org.springframework.boot + spring-boot-starter-mail + ${org.springframework.boot} + + + org.springframework.boot + spring-boot-starter-data-jpa + ${org.springframework.boot} + + + org.springframework.boot + spring-boot-starter-json + ${org.springframework.boot} + + + org.springframework.boot + spring-boot-starter-web-services + ${org.springframework.boot} + + + org.springframework.boot + spring-boot-starter-data-rest + ${org.springframework.boot} + + + org.springframework.security + spring-security-test + ${spring-security-test} + test + + + + org.junit.jupiter + junit-jupiter-api + ${junit.jupiter} + test + + + + + + javax.validation + validation-api + ${javax.validation} + + + + org.postgresql + postgresql + ${org.postgresql} + + + + org.springframework.boot + spring-boot-starter-data-neo4j + ${org.springframework.boot} + + + + org.neo4j + neo4j-ogm-core + ${org.neo4j.version} + + + + org.flywaydb + flyway-core + ${flyway-core} + + + + + com.nimbusds + nimbus-jose-jwt + ${nimbus-jose-jwt.version} + + + + io.springfox + springfox-swagger2 + ${springfox-swagger2.version} + + + io.springfox + springfox-swagger-ui + ${springfox-swagger-ui.version} + + + io.springfox + springfox-data-rest + ${springfox-data-rest} + + + + commons-io + commons-io + ${commons-io} + + + javax.xml.bind + jaxb-api + ${javax.xml.bind} + + + + commons-codec + commons-codec + ${commons-codec} + + + org.springframework.mobile + spring-mobile-device + ${spring-mobile-device} + + + org.projectlombok + lombok + ${org.projectlombok} + + + + joda-time + joda-time + ${joda-time} + + + + org.bouncycastle + bcprov-jdk15on + ${bcprov-jdk15on} + + + + io.micrometer + micrometer-registry-jmx + ${micrometer.jmx} + + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + + com.github.os72 + protoc-jar + ${protoc-jar.version} + + + + org.apache.avro + avro-compiler + ${avro.version} + + + + org.jgrapht + jgrapht-core + ${jgrapht-core.version} + + + + org.apache.commons + commons-collections4 + ${commons-collections.version} + + + + com.google.cloud + google-cloud-tasks + ${google-cloud-tasks.version} + + + + com.google.cloud + libraries-bom + ${libraries-bom.version} + pom + + + + com.google.cloud + google-cloud-storage + ${google-cloud-storage.version} + + + + net.kaczmarzyk + specification-arg-resolver + ${net.kaczmarzyk.version} + + + + org.junit.jupiter + junit-jupiter-engine + ${org.junit.jupiter.version} + + + + org.apache.commons + commons-csv + ${commons-csv.version} + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.10.1 + + 17 + 17 + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M7 + + + + diff --git a/schemata-api-ingress.yaml b/schemata-api-ingress.yaml new file mode 100644 index 0000000..e89ecf1 --- /dev/null +++ b/schemata-api-ingress.yaml @@ -0,0 +1,44 @@ +# Ingress to ensure we access through https + +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: schemata-api-ingress + namespace: schematalabs + annotations: + kubernetes.io/ingress.global-static-ip-name: api-schemata-static-ip + networking.gke.io/managed-certificates: schemata-api-cert + networking.gke.io/v1beta1.FrontendConfig: frontend-redirect-config + kubernetes.io/ingress.allow-http: "false" + kubernetes.io/ingress.class: "gce" +spec: + rules: + - http: + paths: + - path: /user + pathType: Prefix + backend: + service: + name: user-app-service + port: + number: 7080 + - path: /connect + pathType: Prefix + backend: + service: + name: connect-app-service + port: + number: 7081 + - path: /virima + pathType: Prefix + backend: + service: + name: virima-app-service + port: + number: 7084 + # - path: /analytics + # backend: + # serviceName: analytics-app + # servicePort: 7081 + +# keep adding other service paths \ No newline at end of file diff --git a/schemata-web-ingress.yaml b/schemata-web-ingress.yaml new file mode 100644 index 0000000..9770a07 --- /dev/null +++ b/schemata-web-ingress.yaml @@ -0,0 +1,19 @@ +# Ingress to ensure we access through https + +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: schemata-ui-ingress + namespace: schematalabs + annotations: + kubernetes.io/ingress.global-static-ip-name: ui-schemata-static-ip + networking.gke.io/managed-certificates: google-schemata-ui-cert + kubernetes.io/ingress.class: "gce" + networking.gke.io/v1beta1.FrontendConfig: frontend-redirect-config + kubernetes.io/ingress.allow-http: "false" +spec: + defaultBackend: + service: + name: opsweb-service + port: + number: 3000 \ No newline at end of file diff --git a/score.sh b/score.sh deleted file mode 100755 index 172db86..0000000 --- a/score.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -java -jar target/schemata-1.0.jar score -s=src/test/resources/descriptors/entities.desc -p=PROTOBUF $1 diff --git a/shared-lib/pom.xml b/shared-lib/pom.xml new file mode 100644 index 0000000..9507e8f --- /dev/null +++ b/shared-lib/pom.xml @@ -0,0 +1,123 @@ + + + + opsbeach + com.opsbeach + 1.0-SNAPSHOT + + 4.0.0 + shared-lib + shared-lib + + + org.springframework.boot + spring-boot-starter-security + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-test + + + org.springframework.boot + spring-boot-starter-mail + + + + org.springframework.security + spring-security-test + + + + com.nimbusds + nimbus-jose-jwt + + + io.springfox + springfox-swagger2 + + + io.springfox + springfox-swagger-ui + + + commons-io + commons-io + + + org.apache.avro + avro-compiler + + + org.apache.commons + commons-collections4 + + + javax.xml.bind + jaxb-api + + + + commons-codec + commons-codec + + + org.springframework.mobile + spring-mobile-device + + + org.projectlombok + lombok + + + + joda-time + joda-time + + + + io.micrometer + micrometer-registry-jmx + + + + org.junit.jupiter + junit-jupiter-api + + + + com.google.cloud + google-cloud-tasks + + + + com.google.cloud + google-cloud-storage + + + + org.apache.commons + commons-csv + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.apache.maven.plugins + maven-surefire-plugin + + + + + diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/SharedLibApplication.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/SharedLibApplication.java new file mode 100644 index 0000000..62adefb --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/SharedLibApplication.java @@ -0,0 +1,14 @@ +package com.opsbeach.sharedlib; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.scheduling.annotation.EnableAsync; + +@SpringBootApplication +@EnableAsync +public class SharedLibApplication { + + public static void main(String[] args) { + new SpringApplication(SharedLibApplication.class).run(); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/controller/CacheController.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/controller/CacheController.java new file mode 100644 index 0000000..ecab8f1 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/controller/CacheController.java @@ -0,0 +1,30 @@ +package com.opsbeach.sharedlib.controller; + +import com.opsbeach.sharedlib.response.SuccessResponse; +//import com.opsbeach.sharedlib.service.CacheService; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("v1/cache") +public class CacheController { + + /*private final CacheService cacheService; + + public CacheController(CacheService cacheService) { + this.cacheService = cacheService; + } + + @GetMapping("/{hashKey}/{key}") + public SuccessResponse getByKey(@PathVariable("hashKey") String hashKey, @PathVariable("key") String key) { + return new SuccessResponse<>(cacheService.get(hashKey, key), HttpStatus.OK); + } + + @GetMapping("/{hashKey}") + public SuccessResponse getAll(@PathVariable("hashKey") String hashKey) { + return new SuccessResponse<>(cacheService.getAll(hashKey), HttpStatus.OK); + }*/ +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/AuthenticationResponseDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/AuthenticationResponseDto.java new file mode 100644 index 0000000..a433993 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/AuthenticationResponseDto.java @@ -0,0 +1,39 @@ +package com.opsbeach.sharedlib.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + *

+ * Authentication Response Dto + *

> + */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AuthenticationResponseDto { + private Boolean isOnboarded; + @JsonInclude(JsonInclude.Include.NON_NULL) + private Token token; + + @Getter + @Setter + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class Token { + @JsonInclude(JsonInclude.Include.NON_NULL) + private String accessToken; + @JsonInclude(JsonInclude.Include.NON_NULL) + private String refreshToken; + private String tokenType; + private Integer expiresIn; + private Boolean isChangePasswordRequired; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ClientDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ClientDto.java new file mode 100644 index 0000000..715cc3f --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ClientDto.java @@ -0,0 +1,24 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + *

+ * Holds details of Tenant + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ClientDto { + private Long id; + private String name; + private String description; + private boolean isOnboarded; +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ExceptionDetailDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ExceptionDetailDto.java new file mode 100644 index 0000000..98af83b --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/ExceptionDetailDto.java @@ -0,0 +1,26 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + *

+ * Exception Details Dto used to send as a general details. + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ExceptionDetailDto { + private long dateTime; + private String status; + private String message; + private String exception; + private String messageCode; + private Integer responseCode; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/GenericResponseDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/GenericResponseDto.java new file mode 100644 index 0000000..81472d9 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/GenericResponseDto.java @@ -0,0 +1,21 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + *

+ * Generic message response. + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class GenericResponseDto { + private String status; +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JweDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JweDto.java new file mode 100644 index 0000000..044867a --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JweDto.java @@ -0,0 +1,41 @@ +package com.opsbeach.sharedlib.dto; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.nimbusds.jose.shaded.json.JSONObject; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +import java.util.Map; + +/** + * + */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class JweDto { + private long userId; + private String client; + private String username; + private Boolean isRefresh; + + public static JSONObject asJsonObject(JweDto jweDto) throws JsonProcessingException { + var mapper = new ObjectMapper(); + var jweJsonString = mapper.writeValueAsString(jweDto); + var jweJsonObject = new JSONObject(); + jweJsonObject.put("jweDto", jweJsonString); + return jweJsonObject; + } + + public static JweDto fromJsonObject(Map jsonObject) throws JsonProcessingException { + var mapper = new ObjectMapper(); + var jweJsonString = String.valueOf(jsonObject.get("jweDto")); + return mapper.readValue(jweJsonString, JweDto.class); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JwtDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JwtDto.java new file mode 100644 index 0000000..e10b9bd --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/JwtDto.java @@ -0,0 +1,37 @@ +package com.opsbeach.sharedlib.dto; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer; +import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +import java.io.Serializable; +import java.time.LocalDateTime; + +/** + *

+ * Jwt token. + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class JwtDto implements Serializable { + private Long id; + private Long updatedBy; + private String publicKey; + private String privateKey; + private String accessToken; + private String refreshToken; + private Boolean isDeleted; + @JsonSerialize(using = LocalDateTimeSerializer.class) + @JsonDeserialize(using = LocalDateTimeDeserializer.class) + private LocalDateTime expireAt; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/KeyStoreDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/KeyStoreDto.java new file mode 100644 index 0000000..f3286fa --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/KeyStoreDto.java @@ -0,0 +1,25 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +import java.security.PrivateKey; +import java.security.interfaces.RSAPublicKey; + +/** + *

+ * Kyestore dto. + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class KeyStoreDto { + private PrivateKey privateKey; + private RSAPublicKey publicKey; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/LoginDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/LoginDto.java new file mode 100644 index 0000000..5f1c9a5 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/LoginDto.java @@ -0,0 +1,26 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; + +/** + * + */ +@Getter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class LoginDto { + private String username; + private String password; + + @Getter + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class SendOTP { + private String username; + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/PermissionDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/PermissionDto.java new file mode 100644 index 0000000..6e612de --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/PermissionDto.java @@ -0,0 +1,25 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import org.springframework.security.core.GrantedAuthority; + +/** + *

+ * Permission Dto to used in api response. + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class PermissionDto implements GrantedAuthority { + private Long id; + private String operation; + private String authority; + private Boolean isDeleted; +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RefreshTokenDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RefreshTokenDto.java new file mode 100644 index 0000000..8b7dcde --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RefreshTokenDto.java @@ -0,0 +1,16 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.Getter; +import lombok.Setter; + +/** + *

+ * Refresh token resonse dto. + *

+ */ +@Getter +@Setter +public class RefreshTokenDto { + private String accessToken; + private String refreshToken; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegisterClientDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegisterClientDto.java new file mode 100644 index 0000000..1a461ac --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegisterClientDto.java @@ -0,0 +1,20 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + * + */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RegisterClientDto { + private String name; + private String description; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegistrationDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegistrationDto.java new file mode 100644 index 0000000..06f6a5a --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RegistrationDto.java @@ -0,0 +1,31 @@ +package com.opsbeach.sharedlib.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.opsbeach.sharedlib.utils.OnboardStatus; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RegistrationDto { + private String username; + private String password; + @JsonProperty("first_name") + private String firstName; + @JsonProperty("last_name") + private String lastName; + private String Gender; + private String mobile; + @JsonProperty("onboard_status") + private OnboardStatus onboardStatus; + private long clientId; + private String oldPassword; + @JsonProperty("company_name") + private String companyName; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RoleDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RoleDto.java new file mode 100644 index 0000000..a761eb4 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/RoleDto.java @@ -0,0 +1,28 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +import java.io.Serializable; +import java.util.List; + +/** + *

+ * + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RoleDto implements Serializable { + private long id; + private String name; + private Boolean isDeleted; + private String description; + private List permissions; +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/SessionDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/SessionDto.java new file mode 100644 index 0000000..0063065 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/SessionDto.java @@ -0,0 +1,28 @@ +package com.opsbeach.sharedlib.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + *

+ * Session Audit information. + *

+ */ +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class SessionDto { + private Long id; + private String uri; + private String type; + private Long userId; + private String action; + private String module; + private String ipAddress; + private Boolean successLogin; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/UserDto.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/UserDto.java new file mode 100644 index 0000000..024e28c --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/dto/UserDto.java @@ -0,0 +1,71 @@ +package com.opsbeach.sharedlib.dto; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer; +import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer; +import com.opsbeach.sharedlib.utils.Constants; +import lombok.Getter; +import lombok.Setter; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.userdetails.UserDetails; + +import java.io.Serializable; +import java.time.LocalDateTime; +import java.util.Collection; +import java.util.List; + +/** + * + */ +@Getter +@Setter +public class UserDto implements UserDetails, Serializable { + private long id; + private String userType; + private String email; + private String username; + private String mobile; + private Collection authorities; + private Boolean isDeleted; + private transient List roles; + private long clientId; + private String onboardStatus; + private String timeZone; + private String accessToken; + + @Override + public Collection getAuthorities() { + return authorities; + } + + @Override + public String getPassword() { + return Constants.EMPTY; + } + + @Override + public String getUsername() { + return this.getMobile(); + } + + @Override + public boolean isAccountNonExpired() { + return !getIsDeleted(); + } + + @Override + public boolean isAccountNonLocked() { + return !getIsDeleted(); + } + + @Override + public boolean isCredentialsNonExpired() { + return !getIsDeleted(); + } + + @Override + public boolean isEnabled() { + return !getIsDeleted(); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/AlreadyExistException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/AlreadyExistException.java new file mode 100644 index 0000000..435ca63 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/AlreadyExistException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class AlreadyExistException extends ServicesException { + + public AlreadyExistException(final ErrorCode code, String message) { + super(code, message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BadRequestException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BadRequestException.java new file mode 100644 index 0000000..839de19 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BadRequestException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class BadRequestException extends ServicesException { + + public BadRequestException(final ErrorCode code, String message) { + super(code, message); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BaseExceptionHandler.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BaseExceptionHandler.java new file mode 100644 index 0000000..d2ad5f2 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/BaseExceptionHandler.java @@ -0,0 +1,185 @@ +package com.opsbeach.sharedlib.exception; + +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.SecurityUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.web.bind.annotation.ControllerAdvice; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.multipart.MultipartException; +import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; +import org.springframework.web.servlet.mvc.support.RedirectAttributes; + +import java.util.Optional; + +/** + *

+ * Base exception handler all over the application + *

+ */ +@ControllerAdvice +public class BaseExceptionHandler extends ResponseEntityExceptionHandler { + + private final ExceptionResponseCreator exceptionResponseCreator; + private final ResponseMessage responseMessage; + + @Autowired + public BaseExceptionHandler(ExceptionResponseCreator exceptionResponseCreator, ResponseMessage responseMessage) { + this.exceptionResponseCreator = exceptionResponseCreator; + this.responseMessage = responseMessage; + } + + @ExceptionHandler(value = UnAuthorizedException.class) + protected final ResponseEntity handleException(final UnAuthorizedException unAuthorizedException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.UNAUTHORIZED, unAuthorizedException.getErrorCode(), unAuthorizedException); + } + + @ExceptionHandler(value = UserExistException.class) + protected final ResponseEntity handleException(final UserExistException userExistException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.CONFLICT, userExistException.getErrorCode(), userExistException); + } + + @ExceptionHandler(value = AccessDeniedException.class) + protected final ResponseEntity handleException(final AccessDeniedException accessDeniedException) { + Optional optionalUserDetails = SecurityUtil.getOptionalUserDetails(); + if (optionalUserDetails.isPresent()) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.FORBIDDEN, ErrorCode.INSUFFICIENT_PRIVILEGES, accessDeniedException, responseMessage.getErrorMessage(ErrorCode.INSUFFICIENT_PRIVILEGES)); + } else { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.UNAUTHORIZED, ErrorCode.UNAUTHORIZED_USER, accessDeniedException); + } + } + + @ExceptionHandler(value = CompletableFutureException.class) + protected final ResponseEntity handleException(final CompletableFutureException completableFutureException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, completableFutureException.getErrorCode(), completableFutureException); + } + + /*@ExceptionHandler(value = ConstraintViolationException.class) + protected final ResponseEntity handleException(final ConstraintViolationException constraintViolationException) { + var errorMessage = new StringJoiner(Constants.COMMA); + constraintViolationException.getConstraintViolations().forEach(constraintViolation -> errorMessage.add(constraintViolation.getPropertyPath() + Constants.COLON + constraintViolation.getMessage())); + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.BAD_REQUEST, ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, constraintViolationException, responseMessage.getErrorMessage(ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, errorMessage.toString())); + }*/ + + @ExceptionHandler(value = RecordNotFoundException.class) + protected final ResponseEntity handleException(final RecordNotFoundException recordNotFoundException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, recordNotFoundException.getErrorCode(), recordNotFoundException); + } + + @ExceptionHandler(value = InvalidDataException.class) + protected final ResponseEntity handleException(final InvalidDataException invalidDataException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, invalidDataException.getErrorCode(), invalidDataException); + } + + @ExceptionHandler(MultipartException.class) + protected final ResponseEntity handleException(MultipartException exception, RedirectAttributes redirectAttributes) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, ErrorCode.INVALID_FILE, exception); + } + + @ExceptionHandler(value = ClientEmptyException.class) + protected final ResponseEntity handleException(final ClientEmptyException tenantEmptyException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, tenantEmptyException.getErrorCode(), tenantEmptyException); + } + + @ExceptionHandler(value = Exception.class) + protected final ResponseEntity handleException(final Exception exception) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.INTERNAL_SERVER_ERROR, ErrorCode.GENERAL_ERROR, exception); + } + + @ExceptionHandler(value = BadRequestException.class) + protected final ResponseEntity handleException(final BadRequestException badRequestException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.BAD_REQUEST, ErrorCode.BAD_REQUEST, badRequestException); + } + + @ExceptionHandler(value = PreConditionException.class) + protected final ResponseEntity handleException(final PreConditionException preConditionException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, preConditionException.getErrorCode(), preConditionException); + } + + @ExceptionHandler(value = EncodeException.class) + protected final ResponseEntity handleException(final EncodeException encodeException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.NOT_ACCEPTABLE, encodeException.getErrorCode(), encodeException); + } + + /*@Override + public ResponseEntity handleMethodArgumentNotValid(MethodArgumentNotValidException methodArgumentNotValidException, HttpHeaders headers, HttpStatus status, WebRequest request) { + var errorMessage = new StringJoiner(Constants.COMMA); + if (methodArgumentNotValidException.getBindingResult().getFieldErrors().isEmpty()) { + if (!methodArgumentNotValidException.getBindingResult().getAllErrors().isEmpty()) { + ObjectError error = methodArgumentNotValidException.getBindingResult().getAllErrors().get(0); + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, methodArgumentNotValidException, responseMessage.getErrorMessage(ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, error.getDefaultMessage())); + } + } else { + for (FieldError error : methodArgumentNotValidException.getBindingResult().getFieldErrors()) { + errorMessage.add(error.getField() + Constants.COLON + error.getDefaultMessage()); + } + } + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.BAD_REQUEST, ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, methodArgumentNotValidException, responseMessage.getErrorMessage(ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, errorMessage.toString())); + } + + @Override + protected ResponseEntity handleHttpMessageNotReadable(HttpMessageNotReadableException httpMessageNotReadableException, HttpHeaders headers, HttpStatus status, WebRequest request) { + var enumValidationServiceException = (EnumValidationException) httpMessageNotReadableException.getMostSpecificCause(); + String errorMessage; + if (enumValidationServiceException.getEnumValue().isEmpty()) { + errorMessage = enumValidationServiceException.getEnumName() + " must not be empty"; + } else { + errorMessage = enumValidationServiceException.getEnumValue() + " is an invalid " + enumValidationServiceException.getEnumName(); + } + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.BAD_REQUEST, ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, httpMessageNotReadableException, responseMessage.getErrorMessage(ErrorCode.ARGUMENT_NOT_VALID_EXCEPTION, errorMessage)); + }*/ + + /*@ExceptionHandler(value = RecordMismatchException.class) + protected final ResponseEntity handleException(final RecordMismatchException recordMismatchException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.CONFLICT, recordMismatchException.getErrorCode(), recordMismatchException); + }*/ + + /*@ExceptionHandler(value = InvalidFileException.class) + protected final ResponseEntity handleException(final InvalidFileException invalidFileException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, invalidFileException.getErrorCode(), invalidFileException); + }*/ + + /*@ExceptionHandler(value = IntegrityViolationException.class) + protected final ResponseEntity handleException(final IntegrityViolationException integrityViolationException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, integrityViolationException.getErrorCode(), integrityViolationException); + }*/ + + /*@@ExceptionHandler(value = InvalidLoginModeException.class) + protected final ResponseEntity handleException(final InvalidLoginModeException invalidLoginModeException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, invalidLoginModeException.getErrorCode(), invalidLoginModeException); + } + + ExceptionHandler(value = AlreadyLoggedOutException.class) + protected final ResponseEntity handleException(final AlreadyLoggedOutException alreadyLoggedOutException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PERMANENT_REDIRECT, alreadyLoggedOutException.getErrorCode(), alreadyLoggedOutException); + } + + @ExceptionHandler(value = MissingCategoryBudgetException.class) + protected final ResponseEntity handleException(final MissingCategoryBudgetException missingCategoryBudgetException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.PRECONDITION_FAILED, missingCategoryBudgetException.getErrorCode(), missingCategoryBudgetException); + } + + @ExceptionHandler(value = TenantExistException.class) + protected final ResponseEntity handleException(final TenantExistException tenantExistException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.CONFLICT, tenantExistException.getErrorCode(), tenantExistException); + } + + @ExceptionHandler(value = ConnectionException.class) + protected final ResponseEntity handleException(final ConnectionException connectionException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.BAD_REQUEST, connectionException.getErrorCode(), connectionException); + }*/ + + /*@ExceptionHandler(value = UnprocessableEntity.class) + protected final ResponseEntity handleException(final UnProcessableEntity unProcessableEntity) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.UNPROCESSABLE_ENTITY, unProcessableEntity.getErrorCode(), unProcessableEntity); + }*/ + + /*@ExceptionHandler(value = NotImplementedException.class) + protected final ResponseEntity handleException(final NotImplementedException functionalityNotImplementedException) { + return exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.NOT_IMPLEMENTED, ErrorCode.GENERAL_ERROR, functionalityNotImplementedException); + }*/ + +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ClientEmptyException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ClientEmptyException.java new file mode 100644 index 0000000..33f8c99 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ClientEmptyException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class ClientEmptyException extends ServicesException { + + public ClientEmptyException(final ErrorCode code, final String message) { + super(code, message); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/CompletableFutureException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/CompletableFutureException.java new file mode 100644 index 0000000..03e6ec6 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/CompletableFutureException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class CompletableFutureException extends ServicesException { + + public CompletableFutureException(ErrorCode errorCode, final String message) { + super(errorCode, message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EncodeException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EncodeException.java new file mode 100644 index 0000000..331a55d --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EncodeException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class EncodeException extends ServicesException { + + public EncodeException(ErrorCode errorCode, final String message) { + super(errorCode, message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EnumValidationException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EnumValidationException.java new file mode 100644 index 0000000..bf2435b --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/EnumValidationException.java @@ -0,0 +1,32 @@ +package com.opsbeach.sharedlib.exception; + +/** + *

+ * Added in the enum exception class. + *

+ */ +public class EnumValidationException extends Exception { + private String enumName; + private String enumValue; + + public EnumValidationException(String enumValue, String enumName) { + super(enumValue); + this.enumName = enumName; + this.enumValue = enumValue; + } + + public EnumValidationException(String enumValue, String enumName, Throwable cause) { + super(enumValue, cause); + this.enumName = enumName; + this.enumValue = enumValue; + } + + public String getEnumValue() { + return enumValue; + } + + public String getEnumName() { + return enumName; + } + +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorCode.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorCode.java new file mode 100644 index 0000000..4c1189e --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorCode.java @@ -0,0 +1,94 @@ +package com.opsbeach.sharedlib.exception; + +public enum ErrorCode { + + GENERAL_ERROR(5000), + ARGUMENT_NOT_VALID_EXCEPTION(5001), + INVALID_FILE(5002), + BAD_REQUEST(5003), + ACCESS_TOKEN_INVALID(5004), + UNABLE_VERIFY_ACCESS_TOKEN(5005), + INVALID_ACCESS_TOKEN(5006), + INVALID_USER(5007), + INVALID_PASSWORD(5008), + TOKEN_GENERATION_ERROR(5009), + INVALID_REFRESH_TOKEN(5010), + INVALID_OTP(5011), + INVALID_VERIFICATION_OTP_TOKEN(5012), + UNAUTHORIZED_USER(5013), + APP_2_APP_ERROR(5014), + UNAUTHORIZED_ONLY_OWNER(5015), + RECORD_NOT_FOUND(5016), + INVALID_JSON_PARSE(5017), + DATA_RESPONSE_ERROR(5018), + INSUFFICIENT_PRIVILEGES(5019), + SAVING_PREFERENCE_NOT_UPDATED(5020), + INPUT_STREAM_ERROR(5021), + MALFORMED_URL_ERROR(5022), + INVALID_TENANT(5023), + EMPTY_TENANT(5024), + HTTP_REST_PRE_CONDITION(5025), + USER_ALREADY_EXISTS(5026), + USER_NOT_FOUND(5027), + CSV_FILE_CREATION_ERROR(5028), + ALREADY_LOGGED_OUT_ERROR(5029), + CONNECTION_CLOSE_ERROR(5030), + BUDGET_ERROR(5031), + SPLIT_TRANSACTION_ERROR(5032), + MISSING_CATEGORY_BUDGET_ERROR(5033), + INVALID_LOGIN_MODE(5034), + COMPLETABLE_FUTURE(5035), + CLIENT_EXISTS(5036), + URL_ENCODE(5037), + RECORD_NOT_FOUND_ID(5038), + INVALID_ID(5039), + ALREADY_EXISTS(5040), + EMPTY_FIELD_JIRA(5041), + INVALID_ISSUE_TYPE(5042), + PARENT_KEY_NOT_SPECIFIED(5043), + EPIC_NAME_NOT_SPECIFIED(5044), + INVALID_TASK_TYPE(5045), + INVALID_SERVICE_TYPE(5046), + DOMIN_NOT_FOUND(5047), + INVALID_ACTION_TYPE(5048), + CHANNEL_NOT_FOUND(5049), + NOT_IN_CHANNEL(5050), + SOMETHING_WENT_WRONG(5051), + FILE_NOT_FOUND(5052), + INVALID_REPO_TYPE(5053), + TABLE_ALREADY_MODIFIED(5054), + TABLE_ALREADY_MODIFIED_IN_PR(5055), + TABLE_VALIDATION_ERROR(5056), + TABLE_NAME_CHANGE_NOT_ALLOWED_FROM_TO(5057), + TABLE_NAMESPACE_CHANGE_NOT_ALLOWED_FROM_TO(5058), + JSON_SCHEMA_ID_CHANGE_NOT_ALLOWED_FROM_TO(5059), + FIELD_DELETED_NOT_ALLOWED(5060), + FIELD_NAME_UPDATION_NOT_ALLOWED(5061), + FIELD_DATATYPE_UPDATION_NOT_ALLOWED_FOR_FIELD(5062), + FIELD_NAME_OF_TABLE_HAS_ERROR(5063), + UNION_FIELD_DATATYPE_UPDATION_NOT_ALLOWED_FOR_FIELD(5064), + ARRAY_FIELD_TYPE_UPDATION_NOT_ALLOWED_FOR_FIELD(5065), + MAP_FIELD_TYPE_UPDATION_NOT_ALLOWED_FOR_FIELD(5066), + SCHEMA_CHANGES_NOT_PRESENT_IN_FILE(5067), + SCHEMA_CHANGES_NAME_OF_FILE1_NOT_SAME_AS_SCHEMA_IN_FILE2(5068), + FIELD_VALIDATION_ERROR(5069), + TABLE_METADATA_MISSING_NAME(5070), + FIELD_METADATA_MISSING_NAME_IN_TABLE(5071), + UNRECOGNIZED_COLUMN_IN_CSV_FILE_NAME(5072), + REPO_ORGANIZATION_NOT_FOUND(5073), + OTP_EXPIRED(5074), + CLOUD_TASK_CREATION_ERROR(5075), + TABLE_OWNER_METADATA_CHANGED_FROM_TO_NOT_ALLOWD_TABLE_NAME(5076), + TABLE_DOMAIN_METADATA_CHANGED_FROM_TO_NOT_ALLOWD_TABLE_NAME(5077), + INVALID_PR_STATUS_OR_EMPTY(5078); + + private final int key; + + ErrorCode(int key) { + this.key = key; + } + + public int getKey() { + return this.key; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorSource.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorSource.java new file mode 100644 index 0000000..9858fe0 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ErrorSource.java @@ -0,0 +1,131 @@ +package com.opsbeach.sharedlib.exception; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.nimbusds.jose.shaded.json.JSONObject; + +public class ErrorSource { + + @JsonProperty("error") + private Error error; + + public Error getError() { + return error; + } + + public void setError(Error error) { + this.error = error; + } + + public static class Error { + @JsonProperty("message") + private String message; + @JsonProperty("type") + private String type; + @JsonProperty("code") + private String code; + @JsonProperty("decline_code") + private String declineCode; + @JsonProperty("param") + private String param; + @JsonProperty("doc_url") + private String docUrl; + @JsonProperty("charge") + private String charge; + @JsonProperty("payment_intent") + private JSONObject paymentIntent; + @JsonProperty("payment_method") + private JSONObject paymentMethod; + @JsonProperty("source") + private String source; + @JsonProperty("setup_intent") + private JSONObject setupIntent; + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getDeclineCode() { + return declineCode; + } + + public void setDeclineCode(String declineCode) { + this.declineCode = declineCode; + } + + public String getParam() { + return param; + } + + public void setParam(String param) { + this.param = param; + } + + public String getDocUrl() { + return docUrl; + } + + public void setDocUrl(String docUrl) { + this.docUrl = docUrl; + } + + public String getCharge() { + return charge; + } + + public void setCharge(String charge) { + this.charge = charge; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public JSONObject getPaymentIntent() { + return paymentIntent; + } + + public void setPaymentIntent(JSONObject paymentIntent) { + this.paymentIntent = paymentIntent; + } + + public JSONObject getPaymentMethod() { + return paymentMethod; + } + + public void setPaymentMethod(JSONObject paymentMethod) { + this.paymentMethod = paymentMethod; + } + + public JSONObject getSetupIntent() { + return setupIntent; + } + + public void setSetupIntent(JSONObject setupIntent) { + this.setupIntent = setupIntent; + } + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionDetail.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionDetail.java new file mode 100644 index 0000000..ade0092 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionDetail.java @@ -0,0 +1,140 @@ +package com.opsbeach.sharedlib.exception; + +import java.util.Objects; + +/** + *

+ * Generic error response. + *

+ */ +public class ExceptionDetail { + + private final long dateTime; + private final String status; + private final String message; + private final String exception; + private final String messageCode; + private final Integer responseCode; + + public ExceptionDetail(final long dateTime, final String status, final Integer errorCode, + final String message, final String messageCode, final String exception) { + this.status = status; + this.message = message; + this.dateTime = dateTime; + this.exception = exception; + this.responseCode = errorCode; + this.messageCode = messageCode; + } + + public String getStatus() { + return status; + } + + public Integer getResponseCode() { + return responseCode; + } + + public String getMessage() { + return message; + } + + public String getException() { + return exception; + } + + public String getMessageCode() { + return messageCode; + } + + public long getDateTime() { + return dateTime; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return Boolean.TRUE; + } + if (o instanceof ExceptionDetail) { + final ExceptionDetail error = (ExceptionDetail) o; + return Objects.equals(getStatus(), error.getStatus()) + && getResponseCode().equals(error.getResponseCode()) + && Objects.equals(getMessage(), error.getMessage()) + && Objects.equals(getMessageCode(), error.getMessageCode()) + && Objects.equals(getException(), error.getException()); + } + return Boolean.FALSE; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + return Objects.hash(getStatus(), getResponseCode(), getMessage(), getMessageCode(), getException()); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + // noinspection StringBufferReplaceableByString + return new StringBuilder().append(getStatus()).toString(); + } + + public static class Builder { + + private long dateTime; + private String status; + private String message; + private String exception; + private String messageCode; + private Integer responseCode; + + public Builder() { + // Exception builder. + } + + /* + * Setters + */ + public Builder setDateTime(final long dateTime) { + this.dateTime = dateTime; + return this; + } + + public Builder setStatus(final String status) { + this.status = status; + return this; + } + + public Builder setResponseCode(final Integer responseCode) { + this.responseCode = responseCode; + return this; + } + + public Builder setMessage(final String message) { + this.message = message; + return this; + } + + public Builder setMessageCode(final String messageCode) { + this.messageCode = messageCode; + return this; + } + + public Builder setException(final String exception) { + this.exception = exception; + return this; + } + + public ExceptionDetail build() { + return new ExceptionDetail(this.dateTime, this.status, this.responseCode, this.message, this.messageCode, this.exception); + } + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResolver.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResolver.java new file mode 100644 index 0000000..4cadf44 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResolver.java @@ -0,0 +1,29 @@ +package com.opsbeach.sharedlib.exception; + +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.DateUtil; +import com.opsbeach.sharedlib.utils.StringUtil; +import org.springframework.http.HttpStatus; +import org.springframework.stereotype.Component; + +import java.time.ZoneId; + +/** + *

+ * Interface - Generic exception response message. + *

+ */ +@Component +public class ExceptionResolver { + + public ExceptionDetail resolveError(final HttpStatus statusCode, final ErrorCode messageCode, final String message, final String error, final String gid) { + final var builder = new ExceptionDetail.Builder(); + builder.setMessage(message); + builder.setException(error); + builder.setResponseCode(statusCode.value()); + builder.setMessageCode(String.valueOf(messageCode.getKey())); + builder.setStatus(StringUtil.constructStringEmptySeparator(gid, Constants.COLON, Constants.FAILED)); + builder.setDateTime(DateUtil.currentDateTime().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli()); + return builder.build(); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResponseCreator.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResponseCreator.java new file mode 100644 index 0000000..e619d12 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ExceptionResponseCreator.java @@ -0,0 +1,91 @@ +package com.opsbeach.sharedlib.exception; + +import com.opsbeach.sharedlib.utils.Constants; +import lombok.extern.slf4j.Slf4j; +import org.springframework.core.env.Environment; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Component; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.UUID; + +@Slf4j +@Component +public class ExceptionResponseCreator { + + private static final String MESSAGE_FOLLOWING_REASON = "Failed to process the request for the following reason :"; + private static final String MESSAGE_NEW_LINE = "\r\n\t"; + Environment environment; + ExceptionResolver exceptionResolver; + + public ExceptionResponseCreator(Environment environment, ExceptionResolver exceptionResolver) { + this.environment = environment; + this.exceptionResolver = exceptionResolver; + } + + /** + * Util Function to create response entity to be used by TribetterExceptionHandler + * and Authentication Filters unsuccessfulAuthentication overrides. + */ + public ResponseEntity getExceptionResponseEntity(HttpStatus status, ErrorCode errorCode, + Exception exception, String message) { + String uuid = getUUId(); + String errorKey = "OPSBEACH-" + errorCode.getKey(); + var errorAsString = getErrorAsString(exception); + var exceptionLog = String.join(" - ", errorKey, message); + var errorLog = String.join(": ", uuid, exceptionLog); + log.error(errorLog); + log.error("{} {} {} {} {} ", uuid, Constants.COLON, MESSAGE_FOLLOWING_REASON, errorAsString, errorKey); + var resolver = new ExceptionResolver(); + String errorStack = isProdProfile().equals(Boolean.TRUE) ? Constants.EMPTY : errorAsString; + return new ResponseEntity<>(resolver.resolveError(status, errorCode, message, errorStack, uuid), status); + } + + public ResponseEntity getExceptionResponseEntity(HttpStatus status, ErrorCode errorCode, + Exception exception) { + return getExceptionResponseEntity(status, errorCode, exception, exception.getMessage()); + } + + public String getErrorAsString(Exception exception) { + return getErrorStack(exception) + .replace(MESSAGE_NEW_LINE, " ") + .replace("\r", " ") + .replace("\t", " "); + } + + /** + *

+ * Generate Java Util UUID + *

+ * + * @return UUID as string + */ + public String getUUId() { + return UUID.randomUUID().toString().replace(Constants.HYPHEN, Constants.EMPTY); + } + + /** + *

+ * Convert exception stack trace in to string and return + *

+ * + * @param ex - Used to convert as string + * @return Stack value as string + */ + public String getErrorStack(Exception ex) { + var errors = new StringWriter(); + ex.printStackTrace(new PrintWriter(errors)); + return errors.toString(); + } + + private Boolean isProdProfile() { + for (String profile : environment.getActiveProfiles()) { + if (profile.equals(Constants.PROFILE_ACTIVE_PRODUCTION)) { + return Boolean.TRUE; + } + } + return Boolean.FALSE; + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/FileNotFoundException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/FileNotFoundException.java new file mode 100644 index 0000000..ed036ef --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/FileNotFoundException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class FileNotFoundException extends ServicesException { + + public FileNotFoundException(ErrorCode code, String message) { + super(code, message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/GoogleCloudException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/GoogleCloudException.java new file mode 100644 index 0000000..89725d8 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/GoogleCloudException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class GoogleCloudException extends ServicesException { + + public GoogleCloudException(ErrorCode errorCode, String message) { + super(errorCode, message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/InvalidDataException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/InvalidDataException.java new file mode 100644 index 0000000..737c07b --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/InvalidDataException.java @@ -0,0 +1,13 @@ +package com.opsbeach.sharedlib.exception; + +/** + *

+ * User defined exception invalid data is exist throw an error. + *

+ */ +public class InvalidDataException extends ServicesException { + + public InvalidDataException(final ErrorCode code, final String message) { + super(code, message); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/LoggedOutException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/LoggedOutException.java new file mode 100644 index 0000000..5185c07 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/LoggedOutException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class LoggedOutException extends ServicesException { + + public LoggedOutException(final ErrorCode code, final String message) { + super(code, message); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/PreConditionException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/PreConditionException.java new file mode 100644 index 0000000..bd764f6 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/PreConditionException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class PreConditionException extends ServicesException { + + public PreConditionException(ErrorCode errorCode, final String message) { + super(errorCode, message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/RecordNotFoundException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/RecordNotFoundException.java new file mode 100644 index 0000000..5b1d86c --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/RecordNotFoundException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class RecordNotFoundException extends ServicesException { + + public RecordNotFoundException(final ErrorCode code, final String message) { + super(code, message); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaNotFoundException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaNotFoundException.java new file mode 100644 index 0000000..0e5f0f6 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaNotFoundException.java @@ -0,0 +1,24 @@ +package com.opsbeach.sharedlib.exception; + +public class SchemaNotFoundException extends RuntimeException { + public SchemaNotFoundException() { + } + + public SchemaNotFoundException(String message) { + super(message); + } + + public SchemaNotFoundException(String message, Throwable cause) { + super(message, cause); + } + + public SchemaNotFoundException(Throwable cause) { + super(cause); + } + + public SchemaNotFoundException(String message, Throwable cause, boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + } + diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaParserException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaParserException.java new file mode 100644 index 0000000..73e8c41 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/SchemaParserException.java @@ -0,0 +1,11 @@ +package com.opsbeach.sharedlib.exception; + +public class SchemaParserException extends RuntimeException { + public SchemaParserException(String message) { + super(message); + } + + public SchemaParserException(String message, Throwable cause) { + super(message, cause); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ServicesException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ServicesException.java new file mode 100644 index 0000000..a642405 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/ServicesException.java @@ -0,0 +1,27 @@ +package com.opsbeach.sharedlib.exception; + +/** + *

+ * Common Service exception handler toa added the exception code + * and read the message from property using message validator. + *

+ */ +public class ServicesException extends RuntimeException { + + private final ErrorCode errorCode; + private final String message; + + public ServicesException(final ErrorCode code, final String message) { + this.errorCode = code; + this.message = message; + } + + public ErrorCode getErrorCode() { + return errorCode; + } + + @Override + public String getMessage() { + return message; + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UnAuthorizedException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UnAuthorizedException.java new file mode 100644 index 0000000..848b5a3 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UnAuthorizedException.java @@ -0,0 +1,29 @@ +package com.opsbeach.sharedlib.exception; + +import org.springframework.security.core.AuthenticationException; + +public class UnAuthorizedException extends AuthenticationException { + ErrorCode errorCode; + + public UnAuthorizedException(ErrorCode errorCode, final String message) { + super(message); + this.errorCode = errorCode; + } + + public UnAuthorizedException(final ErrorCode code) { + super(null); + this.errorCode = errorCode; + } + + public UnAuthorizedException(String msg, Throwable t) { + super(msg, t); + } + + public ErrorCode getErrorCode() { + return errorCode; + } + + public void setErrorCode(ErrorCode errorCode) { + this.errorCode = errorCode; + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UserExistException.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UserExistException.java new file mode 100644 index 0000000..0317756 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/exception/UserExistException.java @@ -0,0 +1,8 @@ +package com.opsbeach.sharedlib.exception; + +public class UserExistException extends ServicesException { + + public UserExistException(final ErrorCode code, final String message) { + super(code, message); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/repository/CacheRepository.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/repository/CacheRepository.java new file mode 100644 index 0000000..7d0129f --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/repository/CacheRepository.java @@ -0,0 +1,82 @@ +package com.opsbeach.sharedlib.repository; + +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.StringUtil; +import lombok.extern.slf4j.Slf4j; +//import org.springframework.data.redis.RedisConnectionFailureException; +//import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; +//import org.springframework.data.redis.core.HashOperations; +//import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.stereotype.Repository; +import org.springframework.util.CollectionUtils; + +import java.util.List; +import java.util.Objects; + +@Slf4j +@Repository +public class CacheRepository { + + /* private final RedisTemplate redisTemplate; + private final HashOperations hashOperations; + + public CacheRepository(RedisTemplate redisTemplate) { + this.redisTemplate = redisTemplate; + this.hashOperations = this.redisTemplate.opsForHash(); + } + + public Boolean save(String hashKey, String key, Object value) { + hashOperations.put(hashKey, key, value); + closeConnection(redisTemplate); + return Boolean.TRUE; + } + + public Object get(String hashKey, String key) { + Object object = hashOperations.get(hashKey, key); + closeConnection(redisTemplate); + return object; + } + + public Boolean delete(String hashKey) { + var deletionFlag = redisTemplate.delete(hashKey); + closeConnection(redisTemplate); + return deletionFlag; + } + + public List getAll(String hashKey) { + List cacheValues = hashOperations.values(hashKey); + closeConnection(redisTemplate); + return cacheValues; + } + + private void closeConnection(RedisTemplate stringRedisTemplate) { + try { + JedisConnectionFactory connectionFactory = (JedisConnectionFactory) stringRedisTemplate.getConnectionFactory(); + Objects.requireNonNull(connectionFactory).getConnection().close(); + connectionFactory.destroy(); + log.info("Redis Connection has been closed"); + } catch (RedisConnectionFailureException e) { + log.info("Connection closed already"); + } finally { + closeClients(redisTemplate); + } + } + + private void closeClients(RedisTemplate stringRedisTemplate) { + try { + if (!CollectionUtils.isEmpty(stringRedisTemplate.getClientList())) { + stringRedisTemplate.getClientList().remove(0); + stringRedisTemplate.getClientList().remove(1); + stringRedisTemplate.getClientList().forEach(redisClientInfo -> { + String address = redisClientInfo.getAddressPort(); + if (StringUtil.isEmpty(address).equals(Boolean.FALSE)) { + String[] addressList = address.split(Constants.COMMA); + stringRedisTemplate.killClient(addressList[0], Integer.parseInt(addressList[1])); + } + }); + } + } catch (Exception e) { + log.error("Unable to close redis cache client connection"); + } + }*/ +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/response/ResponseMessage.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/ResponseMessage.java new file mode 100644 index 0000000..6faaa58 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/ResponseMessage.java @@ -0,0 +1,60 @@ +package com.opsbeach.sharedlib.response; + +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.utils.YamlPropertySourceFactory; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Getter +@Setter +@Configuration +@EnableConfigurationProperties +@ConfigurationProperties("response-messages") +@PropertySource(value = "classpath:application-messages.yaml", factory = YamlPropertySourceFactory.class) +public class ResponseMessage { + + Map success = new HashMap<>(); + + Map error = new HashMap<>(); + + public String getErrorMessage(ErrorCode code) { + return getError().get(Integer.toString(code.getKey())); + } + + public String getErrorMessage(ErrorCode code, String arg) { + return MessageFormat.format(getErrorMessage(code), arg); + } + + public String getErrorMessage(ErrorCode code, List args) { + return MessageFormat.format(getErrorMessage(code), args); + } + + public String getErrorMessage(ErrorCode code, String... args) { + return MessageFormat.format(getErrorMessage(code), args); + } + + public String getErrorMessage(ErrorCode code, Long args) { + return MessageFormat.format(getErrorMessage(code), args); + } + + public String getErrorMessage(ErrorCode code, Integer args) { + return MessageFormat.format(getErrorMessage(code), args); + } + + public String getSuccessMessage(SuccessCode code) { + return getSuccess().get(Integer.toString(code.getKey())); + } + + public String getSuccessMessage(SuccessCode code, String args) { + return MessageFormat.format(getSuccessMessage(code), args); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessCode.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessCode.java new file mode 100644 index 0000000..ee90e9e --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessCode.java @@ -0,0 +1,21 @@ +package com.opsbeach.sharedlib.response; + +public enum SuccessCode { + + CREATED(1001), + UPDATED(1002), + FETCHED(1003), + DELETED(1004), + FETCHED_ALL_DATA(1005), + OTP_SENT_SUCCESSFULLY(1006); + + private final int key; + + SuccessCode(int key) { + this.key = key; + } + + public int getKey() { + return this.key; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessMessage.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessMessage.java new file mode 100644 index 0000000..1f4bec3 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessMessage.java @@ -0,0 +1,34 @@ +package com.opsbeach.sharedlib.response; + +import com.opsbeach.sharedlib.utils.Constants; +import lombok.Getter; +import lombok.Setter; + +/** + *

+ * Generic success message. + *

+ * @param + */ +@Getter +@Setter +public class SuccessMessage { + private Object entity; + private String status; + private String message; + private Integer responseCode; + + /** + *

Success message.

+ * + * @param message - Message to be displayed to the user is passed in this attribute. + * @param entity - object is passed in this attribute. + * @param responseCode - response code is passed in this attribute. + */ + public SuccessMessage(String message, Object entity, Integer responseCode) { + this.setMessage(message); + this.setEntity(entity); + this.setResponseCode(responseCode); + this.setStatus(Constants.SUCCESS); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessResponse.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessResponse.java new file mode 100644 index 0000000..9d6917a --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/response/SuccessResponse.java @@ -0,0 +1,41 @@ +package com.opsbeach.sharedlib.response; + +import com.opsbeach.sharedlib.utils.Constants; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; + +/** + *

+ * Generic success response + *

+ * @param + */ +public class SuccessResponse extends ResponseEntity { + + private SuccessResponse(Object entity, HttpStatus responseCode) { + this(Constants.OK, entity, responseCode); + } + + private SuccessResponse(String message, Object entity, HttpStatus httpStatus) { + super(new SuccessMessage(message, entity, httpStatus.value()), httpStatus); + } + + public static SuccessResponse statusOk(Object entity) { + return new SuccessResponse<>(entity, HttpStatus.OK); + } + + public static SuccessResponse statusCreated(Object entity) { + return new SuccessResponse<>(entity, HttpStatus.CREATED); + } + + public static SuccessResponse statusNoContent(Object entity) { + return new SuccessResponse<>(entity, HttpStatus.NO_CONTENT); + } + + public static SuccessResponse statusAccepted(Object entity) { + return new SuccessResponse<>(entity, HttpStatus.ACCEPTED); + } + public static SuccessResponse of(String message, Object entity, HttpStatus httpStatus) { + return new SuccessResponse<>(message, entity, httpStatus); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/ApplicationConfig.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/ApplicationConfig.java new file mode 100644 index 0000000..5f1b241 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/ApplicationConfig.java @@ -0,0 +1,30 @@ +package com.opsbeach.sharedlib.security; + +import com.opsbeach.sharedlib.utils.YamlPropertySourceFactory; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +import java.util.HashMap; +import java.util.Map; + +@Configuration +@Getter +@Setter +@EnableConfigurationProperties +@ConfigurationProperties(prefix = "spring") +@PropertySource(value = "classpath:application.yaml", factory = YamlPropertySourceFactory.class) +public class ApplicationConfig { + private Integer threadingFutureTimeout; + //private Map redis = new HashMap<>(); + private Map user = new HashMap<>(); + + private Map github = new HashMap<>(); + + private Map gcloud = new HashMap<>(); + + private Map smtp = new HashMap<>(); +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationProvider.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationProvider.java new file mode 100644 index 0000000..c116a40 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationProvider.java @@ -0,0 +1,133 @@ +package com.opsbeach.sharedlib.security; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.nimbusds.jose.JOSEException; +import com.opsbeach.sharedlib.service.JwtTokenService; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.utils.DateUtil; +import com.opsbeach.sharedlib.utils.StringUtil; +import lombok.extern.slf4j.Slf4j; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.authentication.dao.AbstractUserDetailsAuthenticationProvider; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.userdetails.UserDetails; +import org.springframework.stereotype.Component; +import org.springframework.util.CollectionUtils; + +import java.text.ParseException; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +@Slf4j +@Component +public class AuthenticationProvider extends AbstractUserDetailsAuthenticationProvider { + private static final String USER = "user"; + private final HMac hMac; + private final JweLibrary jweLibrary; + private final JwtTokenService jwtService; + private final App2AppService app2AppService; + private final ResponseMessage responseMessage; + private final ApplicationConfig applicationConfig; + private final RSAMechanism rsaMechanism; + + public AuthenticationProvider(ResponseMessage responseMessage, JweLibrary jweLibrary, HMac hMac, JwtTokenService jwtService, + App2AppService app2AppService, ApplicationConfig applicationConfig, RSAMechanism rsaMechanism) { + this.hMac = hMac; + this.jweLibrary = jweLibrary; + this.jwtService = jwtService; + this.app2AppService = app2AppService; + this.responseMessage = responseMessage; + this.applicationConfig = applicationConfig; + this.rsaMechanism = rsaMechanism; + } + + @Override + protected void additionalAuthenticationChecks(UserDetails userDetails, UsernamePasswordAuthenticationToken usernamePasswordAuthenticationToken) throws org.springframework.security.core.AuthenticationException { + var userDto = (UserDto) userDetails; + try { + if (Objects.isNull(userDto)) { + throw new UnAuthorizedException(ErrorCode.ACCESS_TOKEN_INVALID, responseMessage.getErrorMessage(ErrorCode.ACCESS_TOKEN_INVALID)); + } + if (CollectionUtils.isEmpty(userDto.getRoles())) { + throw new UnAuthorizedException(ErrorCode.ACCESS_TOKEN_INVALID, responseMessage.getErrorMessage(ErrorCode.ACCESS_TOKEN_INVALID)); + } + List authorities = userDto.getRoles().stream().flatMap(role -> role.getPermissions().stream()).collect(Collectors.toList()); + userDto.setAuthorities(authorities); + } catch (RecordNotFoundException recordNotFound) { + throw new UnAuthorizedException(ErrorCode.ACCESS_TOKEN_INVALID, responseMessage.getErrorMessage(ErrorCode.ACCESS_TOKEN_INVALID)); + } + } + + @Override + protected UserDetails retrieveUser(String userName, UsernamePasswordAuthenticationToken usernamePasswordAuthenticationToken) throws org.springframework.security.core.AuthenticationException { + var authenticationToken = (AuthenticationToken) usernamePasswordAuthenticationToken; + String accessToken = (String) authenticationToken.getCredentials(); + try { + //var jwtDto = (JwtDto) cacheService.get(accessToken, Constants.TOKEN); + var jwtDto = jwtService.getByAccessToken(accessToken); + if (Objects.isNull(jwtDto)) { + throw new UnAuthorizedException(ErrorCode.UNAUTHORIZED_USER, responseMessage.getErrorMessage(ErrorCode.UNAUTHORIZED_USER)); + } + if (!accessToken.equals(jwtDto.getAccessToken())) { + throw new UnAuthorizedException(ErrorCode.UNAUTHORIZED_USER, responseMessage.getErrorMessage(ErrorCode.UNAUTHORIZED_USER)); + } + var jweDto = jweLibrary.decrypt(accessToken, RSAKeyGen.getStringToKeys(jwtDto.getPublicKey(), jwtDto.getPrivateKey())); + var client = jweDto.getClient(); + if (jweDto.getIsRefresh().equals(Boolean.TRUE)) { + throw new UnAuthorizedException(ErrorCode.INVALID_ACCESS_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_ACCESS_TOKEN)); + } + UserDto userDto = getUser(jweDto.getUsername(), rsaMechanism.encrypt(String.valueOf(client))); + if (Objects.isNull(userDto)) { + throw new UnAuthorizedException(ErrorCode.UNAUTHORIZED_USER, responseMessage.getErrorMessage(ErrorCode.UNAUTHORIZED_USER)); + } + if (userDto.getIsDeleted().equals(Boolean.TRUE)) { + throw new UnAuthorizedException(ErrorCode.INVALID_USER, responseMessage.getErrorMessage(ErrorCode.INVALID_USER)); + } + if (DateUtil.validateExpiration(jwtDto.getExpireAt()).equals(Boolean.TRUE)) { + throw new UnAuthorizedException(ErrorCode.UNAUTHORIZED_USER, responseMessage.getErrorMessage(ErrorCode.UNAUTHORIZED_USER)); + } + userDto.setAccessToken(accessToken); + return userDto; + } catch (ParseException | JOSEException | JsonProcessingException e) { + throw new UnAuthorizedException(ErrorCode.INVALID_ACCESS_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_ACCESS_TOKEN)); + } + } + + /** + *

+ * Returns true if this AuthenticationProvider supports the indicated + * Authentication object. + *

+ * + * @param authentication - Authentication object. + * @return true if the implementation can more closely evaluate the Authentication class + * presented + */ + @Override + public boolean supports(Class authentication) { + return UsernamePasswordAuthenticationToken.class.isAssignableFrom(authentication); + } + + /*private UserDto getUser(String username, String clientId) { + log.info("Fetching the Details of the User [{}]", username); + var getUserDetailsUrl = CommonUtil.constructStringEmptySeparator(applicationConfig.getUserService().get(Constants.BASE_URL), applicationConfig.getUserService().get(USER_DETAIL_URL), "?searchValue=", username, "&type=", type); + return app2AppService.httpGet(getUserDetailsUrl, app2AppService.setHeaders(App2AppService.formClientHeader(tenant), null), UserDto.class); + }*/ + + private UserDto getUser(String username, String client) { + log.info("Fetching the Details of the User [{}]", username); + String getUserDetailsUrl = StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), applicationConfig.getUser().get(USER), "?username=", username); + return app2AppService.httpGet(getUserDetailsUrl, app2AppService.setHeaders(App2AppService.clientHeader(client), null), UserDto.class); + } + + private String getUserServiceBaseUrl() { + return applicationConfig.getUser().get(Constants.BASE_URL); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationToken.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationToken.java new file mode 100644 index 0000000..0c78a3c --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/AuthenticationToken.java @@ -0,0 +1,17 @@ +package com.opsbeach.sharedlib.security; + +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.GrantedAuthority; + +import java.util.Collection; + +public class AuthenticationToken extends UsernamePasswordAuthenticationToken { + + public AuthenticationToken(Object principal, Object credentials) { + super(principal, credentials); + } + + public AuthenticationToken(Object principal, Object credentials, Collection authorities) { + super(principal, credentials, authorities); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/BaseAuthentication.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/BaseAuthentication.java new file mode 100644 index 0000000..1c26679 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/BaseAuthentication.java @@ -0,0 +1,61 @@ +package com.opsbeach.sharedlib.security; + +import org.springframework.security.core.Authentication; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.userdetails.UserDetails; + +import java.util.Collection; +import java.util.Collections; + +/** + *

+ * Basic authentication entity + *

+ */ +public class BaseAuthentication implements Authentication { + + private final UserDetails userDetails; + + public BaseAuthentication(UserDetails userDetails) { + this.userDetails = userDetails; + } + + public static BaseAuthentication getInstance(UserDetails userDetails) { + return new BaseAuthentication(userDetails); + } + + @Override + public Collection getAuthorities() { + return Collections.emptyList(); + } + + @Override + public Object getCredentials() { + return null; + } + + @Override + public Object getDetails() { + return null; + } + + @Override + public Object getPrincipal() { + return userDetails; + } + + @Override + public boolean isAuthenticated() { + return false; + } + + @Override + public void setAuthenticated(boolean b) throws IllegalArgumentException { + //Override method + } + + @Override + public String getName() { + return null; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/CacheBeanConfig.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/CacheBeanConfig.java new file mode 100644 index 0000000..d41e52b --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/CacheBeanConfig.java @@ -0,0 +1,34 @@ +package com.opsbeach.sharedlib.security; + +import com.opsbeach.sharedlib.utils.Constants; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +//import org.springframework.data.redis.connection.RedisStandaloneConfiguration; +//import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; +//import org.springframework.data.redis.core.RedisTemplate; +//import org.springframework.data.redis.serializer.GenericToStringSerializer; + +@Configuration +public class CacheBeanConfig { + + /*private final ApplicationConfig applicationConfig; + + public CacheBeanConfig(ApplicationConfig applicationConfig) { + this.applicationConfig = applicationConfig; + } + + @Bean + JedisConnectionFactory jedisConnectionFactory() { + var redisStandaloneConfiguration = new RedisStandaloneConfiguration(applicationConfig.getRedis().get(Constants.HOST), Integer.parseInt(applicationConfig.getRedis().get(Constants.PORT))); + return new JedisConnectionFactory(redisStandaloneConfiguration); + } + + @Bean + public RedisTemplate redisTemplate() { + RedisTemplate redisTemplate = new RedisTemplate<>(); + redisTemplate.setConnectionFactory(jedisConnectionFactory()); + redisTemplate.setKeySerializer(new GenericToStringSerializer<>(Object.class)); + redisTemplate.setValueSerializer(new GenericToStringSerializer<>(Object.class)); + return redisTemplate; + }*/ +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/EmailConfig.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/EmailConfig.java new file mode 100644 index 0000000..ab709b3 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/EmailConfig.java @@ -0,0 +1,35 @@ +package com.opsbeach.sharedlib.security; + +import java.util.Properties; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.mail.javamail.JavaMailSender; +import org.springframework.mail.javamail.JavaMailSenderImpl; + +@Configuration +@ComponentScan +public class EmailConfig { + + @Autowired + private ApplicationConfig applicationConfig; + + @Bean + public JavaMailSender getJavaMailSender() { + JavaMailSenderImpl mailSender = new JavaMailSenderImpl(); + mailSender.setHost("smtp.gmail.com"); + mailSender.setPort(587); + mailSender.setUsername(applicationConfig.getSmtp().get("username")); + mailSender.setPassword(applicationConfig.getSmtp().get("password")); + + Properties props = mailSender.getJavaMailProperties(); + props.put("mail.transport.protocol", "smtp"); + props.put("mail.smtp.auth", "true"); + props.put("mail.smtp.starttls.enable", "true"); + props.put("mail.debug", "true"); + + return mailSender; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/HMac.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/HMac.java new file mode 100644 index 0000000..313dfb9 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/HMac.java @@ -0,0 +1,41 @@ +package com.opsbeach.sharedlib.security; + +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import org.springframework.stereotype.Component; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; +import java.util.Base64; + +@Component +public class HMac { + + private static final String H_MAC_SHA256 = "HmacSHA256"; + + private final ResponseMessage responseMessage; + + public HMac(ResponseMessage responseMessage) { + this.responseMessage = responseMessage; + } + + private byte[] calcHmacSha256(byte[] secretKey, byte[] message) { + byte[] hmacSha256; + try { + var mac = Mac.getInstance(H_MAC_SHA256); + var secretKeySpec = new SecretKeySpec(secretKey, H_MAC_SHA256); + mac.init(secretKeySpec); + hmacSha256 = mac.doFinal(message); + } catch (Exception e) { + throw new UnAuthorizedException(ErrorCode.INVALID_ACCESS_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_ACCESS_TOKEN)); + } + return hmacSha256; + } + + public String getPayload(String request, String secret) { + byte[] hmacSha256 = calcHmacSha256(secret.getBytes(StandardCharsets.UTF_8), request.getBytes(StandardCharsets.UTF_8)); + return Base64.getEncoder().encodeToString(hmacSha256); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/JweLibrary.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/JweLibrary.java new file mode 100644 index 0000000..0dfc628 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/JweLibrary.java @@ -0,0 +1,38 @@ +package com.opsbeach.sharedlib.security; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.nimbusds.jose.EncryptionMethod; +import com.nimbusds.jose.JOSEException; +import com.nimbusds.jose.JWEAlgorithm; +import com.nimbusds.jose.JWEHeader; +import com.nimbusds.jose.JWEObject; +import com.nimbusds.jose.Payload; +import com.nimbusds.jose.crypto.RSADecrypter; +import com.nimbusds.jose.crypto.RSAEncrypter; +import com.opsbeach.sharedlib.dto.JweDto; +import com.opsbeach.sharedlib.dto.KeyStoreDto; +import org.springframework.stereotype.Component; + +import java.security.interfaces.RSAPublicKey; +import java.text.ParseException; + +@Component +public class JweLibrary { + private static final JWEAlgorithm ALGORITHM = JWEAlgorithm.RSA_OAEP_256; + private static final EncryptionMethod ENCRYPTION = EncryptionMethod.A128CBC_HS256; + + public String encrypt(JweDto jweDto, RSAPublicKey publicKey) throws JOSEException, JsonProcessingException { + var jweJsonObject = JweDto.asJsonObject(jweDto); + var payload = new Payload(jweJsonObject); + var jwe = new JWEObject(new JWEHeader(ALGORITHM, ENCRYPTION), payload); + jwe.encrypt(new RSAEncrypter(publicKey)); + return jwe.serialize(); + } + + public JweDto decrypt(String token, KeyStoreDto keyStoreDto) throws JOSEException, ParseException, JsonProcessingException { + var jweObject = JWEObject.parse(token); + jweObject.decrypt(new RSADecrypter(keyStoreDto.getPrivateKey())); + var payload = jweObject.getPayload(); + return JweDto.fromJsonObject(payload.toJSONObject()); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/JwtAuthenticationFilter.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/JwtAuthenticationFilter.java new file mode 100644 index 0000000..fc628f2 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/JwtAuthenticationFilter.java @@ -0,0 +1,157 @@ +package com.opsbeach.sharedlib.security; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.opsbeach.sharedlib.dto.SessionDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.ExceptionResponseCreator; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.service.AuthService; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.StringUtil; +import jakarta.servlet.FilterChain; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter; +import org.springframework.security.web.util.matcher.AntPathRequestMatcher; +import org.springframework.security.web.util.matcher.RequestMatcher; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; + +/** + *

+ * Authentication Filter for Processing JWT Access Token and Authenticating User + * Other custom Authentication Filters will be before this filter in chain as this covers most end points + * and will resolve here that if it doesn't have Authorization Header Bearer, no other authorizations are in header. + *

+ */ +@Slf4j +public class JwtAuthenticationFilter extends AbstractAuthenticationProcessingFilter { + + private static final String AUDIT = "audit"; + private static final String X_FORWARDED_FOR = "X-FORWARDED-FOR"; + private final App2AppService app2AppService; + private final ResponseMessage responseMessage; + private final ApplicationConfig applicationConfig; + private final AuthService authService; + private final ExceptionResponseCreator exceptionResponseCreator; + + public JwtAuthenticationFilter(ExceptionResponseCreator exceptionResponseCreator, ResponseMessage responseMessage, App2AppService app2AppService, ApplicationConfig applicationConfig, AuthService authService) { + super("/**"); + this.app2AppService = app2AppService; + this.responseMessage = responseMessage; + this.applicationConfig = applicationConfig; + this.authService = authService; + this.exceptionResponseCreator = exceptionResponseCreator; + } + + @Override + protected boolean requiresAuthentication(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) { + Boolean isAuthenticated = SecurityUtil.getOptionalUserDetails().isPresent(); + if (isAuthenticated.equals(Boolean.TRUE) || StringUtil.isEmpty(httpServletRequest.getHeader(Constants.AUTHORIZATION_HEADER)).equals(Boolean.TRUE)) { + return Boolean.FALSE; + } + List matchers = List.of(new AntPathRequestMatcher(Constants.AUTHORIZED_PATH_PREFIX + "/**")); + for (RequestMatcher matcher : matchers) { + if (matcher.matches(httpServletRequest)) { + return Boolean.TRUE; + } + } + return Boolean.FALSE; + } + + @Override + public Authentication attemptAuthentication(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws org.springframework.security.core.AuthenticationException { + String header = httpServletRequest.getHeader(Constants.AUTHORIZATION_HEADER); + if (!header.startsWith(Constants.PREFIX)) { + throw new UnAuthorizedException(ErrorCode.UNABLE_VERIFY_ACCESS_TOKEN, responseMessage.getErrorMessage(ErrorCode.UNABLE_VERIFY_ACCESS_TOKEN)); + } + var accessToken = header.substring(Constants.PREFIX.length()); + var authenticationToken = new AuthenticationToken(null, accessToken); + var authenticationManager = getAuthenticationManager(); + return authenticationManager.authenticate(authenticationToken); + } + + @Override + protected void successfulAuthentication(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, FilterChain chain, Authentication authResult) throws IOException, ServletException { + SecurityContextHolder.getContext().setAuthentication(authResult); + log.info("Authenticated using - {}", JwtAuthenticationFilter.class.getSimpleName()); + chain.doFilter(httpServletRequest, httpServletResponse); + var jweDto = authService.decryptToken(httpServletRequest.getHeader(Constants.AUTHORIZATION_HEADER).substring(Constants.PREFIX.length())); + addSessionAudit(httpServletRequest, Boolean.TRUE); + //updateTokenExpiry(jweDto); + } + + /** + *

+ * Common method to identify the client IP address + *

+ * + * @param request - HttpServletRequest + * @return deviceType - Client IP Address. + */ + public String getClientIp(HttpServletRequest request) { + String remoteAddress = Constants.EMPTY; + if (request != null) { + remoteAddress = request.getHeader(X_FORWARDED_FOR); + if (StringUtil.isEmpty(remoteAddress).equals(Boolean.TRUE)) { + remoteAddress = request.getRemoteAddr(); + } + } + return remoteAddress; + } + + private String getModule(String requestUri) { + String module; + module = requestUri.substring(requestUri.indexOf(Constants.AUTHORIZED_PATH_PREFIX) + (Constants.AUTHORIZED_PATH_PREFIX + Constants.FORWARD_SLASH).length()); + module = module + Constants.FORWARD_SLASH; + return module.substring(0, module.indexOf(Constants.FORWARD_SLASH)).replace(Constants.HYPHEN, Constants.EMPTY_SPACE); + } + + private void addSessionAudit(HttpServletRequest httpServletRequest, Boolean successLogin) { + var userDto = SecurityUtil.getLoggedInUserDetail(); + var sessionAuditUrl = applicationConfig.getUser().get(Constants.BASE_URL) + applicationConfig.getUser().get(AUDIT); + var sessionDto = SessionDto.builder() + .action(httpServletRequest.getMethod()) + .ipAddress(getClientIp(httpServletRequest)) + .uri(httpServletRequest.getRequestURI()) + .userId(userDto.getId()) + .type("USER") + .module(StringUtil.capitalizeWord(getModule(httpServletRequest.getRequestURI()))) + .successLogin(successLogin).build(); + app2AppService.httpPost(sessionAuditUrl, app2AppService.setHeaders(new HashMap<>(), sessionDto), SessionDto.class); + } + + /*private void updateTokenExpiry(JweDto jweDto) { + var sessionAuditUrl = applicationConfig.getUser().get(Constants.BASE_URL) + applicationConfig.getUser().get("expiry-update") + "?userId=" + jweDto.getUserId(); + app2AppService.httpPatch(sessionAuditUrl, app2AppService.setHeaders(App2AppService.formClientHeader(jweDto.getClient()), null), GenericResponseDto.class); + }*/ + + @Override + protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, org.springframework.security.core.AuthenticationException failed) throws IOException { + var jweDto = authService.decryptToken(request.getHeader(Constants.AUTHORIZATION_HEADER).substring(Constants.PREFIX.length())); + //addSessionAudit(request, Boolean.FALSE); + ResponseEntity responseEntity; + if (failed instanceof UnAuthorizedException) { + UnAuthorizedException gatewayAuthenticationException = (UnAuthorizedException) failed; + responseEntity = exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.UNAUTHORIZED, gatewayAuthenticationException.getErrorCode(), gatewayAuthenticationException); + } else { + responseEntity = exceptionResponseCreator.getExceptionResponseEntity(HttpStatus.UNAUTHORIZED, ErrorCode.UNAUTHORIZED_USER, failed); + } + var objectMapper = new ObjectMapper(); + var json = objectMapper.writeValueAsString(responseEntity.getBody()); + response.setStatus(HttpStatus.UNAUTHORIZED.value()); + response.setContentType(MediaType.APPLICATION_JSON_VALUE); + response.getWriter().write(json); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAKeyGen.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAKeyGen.java new file mode 100644 index 0000000..3491036 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAKeyGen.java @@ -0,0 +1,72 @@ +package com.opsbeach.sharedlib.security; + +import com.opsbeach.sharedlib.dto.KeyStoreDto; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import java.security.KeyFactory; +import java.security.KeyPairGenerator; +import java.security.NoSuchAlgorithmException; +import java.security.interfaces.RSAPublicKey; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.util.Base64; + +/** + *

+ * Generate new RSAKey Gen + *

+ */ +@Slf4j +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class RSAKeyGen { + + private static final String RSA = "RSA"; + + /** + * Generates Public and Private key. + * + * @return KeyStoreDto - Dto with Public and Private key. + */ + public static KeyStoreDto getPublicAndPrivateKey() { + try { + var keyPairGenerator = KeyPairGenerator.getInstance(RSA); + keyPairGenerator.initialize(2048); + var keyPair = keyPairGenerator.generateKeyPair(); + var publicKeyContent = keyPair.getPublic(); + var privateKeyContent = keyPair.getPrivate(); + var keySpecPKCS8 = new PKCS8EncodedKeySpec(privateKeyContent.getEncoded()); + var keyFactory = KeyFactory.getInstance(RSA); + var privateKey = keyFactory.generatePrivate(keySpecPKCS8); + var keySpecX509 = new X509EncodedKeySpec(publicKeyContent.getEncoded()); + var publicKey = (RSAPublicKey) keyFactory.generatePublic(keySpecX509); + return KeyStoreDto.builder().privateKey(privateKey).publicKey(publicKey).build(); + } catch (InvalidKeySpecException | NoSuchAlgorithmException e) { + log.error("There is an generating Private and Public Key - {}", e.getMessage()); + } + return KeyStoreDto.builder().build(); + } + + /** + * Returns the Public and Private key from the String content. + * + * @param privateKeyContent - Private Key content. + * @param publicKeyContent - Public Key content. + * @return KeyStoreDto - Dto with Public and Private key. + */ + public static KeyStoreDto getStringToKeys(String publicKeyContent, String privateKeyContent) { + try { + var keySpecPKCS8 = new PKCS8EncodedKeySpec(Base64.getDecoder().decode(privateKeyContent)); + var keyFactory = KeyFactory.getInstance(RSA); + var privateKey = keyFactory.generatePrivate(keySpecPKCS8); + var keySpecX509 = new X509EncodedKeySpec(Base64.getDecoder().decode(publicKeyContent)); + var publicKey = (RSAPublicKey) keyFactory.generatePublic(keySpecX509); + return KeyStoreDto.builder().privateKey(privateKey).publicKey(publicKey).build(); + } catch (InvalidKeySpecException | NoSuchAlgorithmException e) { + log.error("There is an generating Private and Public Key - {}", e.getMessage()); + } + return KeyStoreDto.builder().build(); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAMechanism.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAMechanism.java new file mode 100644 index 0000000..994b407 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/RSAMechanism.java @@ -0,0 +1,66 @@ +package com.opsbeach.sharedlib.security; + +import com.nimbusds.jose.EncryptionMethod; +import com.nimbusds.jose.JOSEException; +import com.nimbusds.jose.JWEAlgorithm; +import com.nimbusds.jose.JWEHeader; +import com.nimbusds.jose.JWEObject; +import com.nimbusds.jose.Payload; +import com.nimbusds.jose.crypto.RSADecrypter; +import com.nimbusds.jose.crypto.RSAEncrypter; +import com.opsbeach.sharedlib.utils.Constants; +import org.springframework.core.io.ClassPathResource; +import org.springframework.stereotype.Component; +import org.springframework.util.FileCopyUtils; + +import java.io.IOException; +import java.security.KeyFactory; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.interfaces.RSAPublicKey; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.text.ParseException; +import java.util.Base64; + +/** + *

+ * RSA Encryption Algorithm. + *

+ */ +@Component +public class RSAMechanism { + public static final String RSA = "RSA"; + private static final String PUBLIC_KEY_FILE = "public.pem"; + private static final String PRIVATE_KEY_FILE = "private_pkcs8.pem"; + private static final JWEAlgorithm ALGORITHM = JWEAlgorithm.RSA_OAEP_256; + private static final EncryptionMethod ENCRYPTION = EncryptionMethod.A128CBC_HS256; + private final PrivateKey privateKey; + private final RSAPublicKey publicKey; + + public RSAMechanism() throws IOException, NoSuchAlgorithmException, InvalidKeySpecException { + byte[] privateKeyBytes = FileCopyUtils.copyToByteArray(new ClassPathResource(PRIVATE_KEY_FILE).getInputStream()); + String privateKeyContent = new String(privateKeyBytes).replaceAll("\\n", Constants.EMPTY).replaceAll("\\r", Constants.EMPTY).replace("-----BEGIN PRIVATE KEY-----", Constants.EMPTY).replace("-----END PRIVATE KEY-----", Constants.EMPTY); + byte[] publicKeyBytes = FileCopyUtils.copyToByteArray(new ClassPathResource(PUBLIC_KEY_FILE).getInputStream()); + String publicKeyContent = new String(publicKeyBytes).replaceAll("\\n", Constants.EMPTY).replaceAll("\\r", Constants.EMPTY).replace("-----BEGIN PUBLIC KEY-----", Constants.EMPTY).replace("-----END PUBLIC KEY-----", Constants.EMPTY); + var keySpecPKCS8 = new PKCS8EncodedKeySpec(Base64.getDecoder().decode(privateKeyContent)); + var kf = KeyFactory.getInstance(RSA); + privateKey = kf.generatePrivate(keySpecPKCS8); + var keySpecX509 = new X509EncodedKeySpec(Base64.getDecoder().decode(publicKeyContent)); + publicKey = (RSAPublicKey) kf.generatePublic(keySpecX509); + } + + public String encrypt(String encryptString) throws JOSEException { + var payload = new Payload(encryptString); + var jwe = new JWEObject(new JWEHeader(ALGORITHM, ENCRYPTION), payload); + jwe.encrypt(new RSAEncrypter(publicKey)); + return jwe.serialize(); + } + + public String decrypt(String encryptedString) throws JOSEException, ParseException { + var jweObject = JWEObject.parse(encryptedString); + jweObject.decrypt(new RSADecrypter(privateKey)); + return jweObject.getPayload().toString(); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityConfiguration.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityConfiguration.java new file mode 100644 index 0000000..716f867 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityConfiguration.java @@ -0,0 +1,119 @@ +package com.opsbeach.sharedlib.security; + +import com.opsbeach.sharedlib.exception.ExceptionResponseCreator; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.service.AuthService; +import com.opsbeach.sharedlib.utils.Constants; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.HttpMethod; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.config.annotation.authentication.configuration.AuthenticationConfiguration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +//import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; +import org.springframework.security.config.annotation.web.configuration.WebSecurityCustomizer; +import org.springframework.security.config.http.SessionCreationPolicy; +import org.springframework.security.web.SecurityFilterChain; +import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; +import org.springframework.web.cors.CorsConfiguration; +import org.springframework.web.cors.UrlBasedCorsConfigurationSource; +import org.springframework.web.filter.CorsFilter; + +import java.util.Collections; + +@EnableWebSecurity +@EnableGlobalMethodSecurity(prePostEnabled = true) +@Configuration +public class SecurityConfiguration { + + private static final String[] AUTH_WHITELIST = { + "/v1/auth/**", + "/user/actuator/health", + "/connect/actuator/health", + "/actuator/health", + "/v1/github/signin/callback", + "/v1/github/market-place-events", + "/v1/schema/organization" + }; + + private final App2AppService app2AppService; + private final ResponseMessage responseMessage; + private final ApplicationConfig applicationConfig; + private final AuthService authService; + private final AuthenticationProvider authenticationProvider; + private final ExceptionResponseCreator exceptionResponseCreator; + private final AuthenticationConfiguration authenticationConfiguration; + + @Autowired + public SecurityConfiguration(AuthenticationProvider authenticationProvider, ExceptionResponseCreator exceptionResponseCreator, + ResponseMessage responseMessage, App2AppService app2AppService, ApplicationConfig applicationConfig, + AuthService authService, AuthenticationConfiguration authenticationConfiguration) { + this.app2AppService = app2AppService; + this.responseMessage = responseMessage; + this.applicationConfig = applicationConfig; + this.authService = authService; + this.authenticationProvider = authenticationProvider; + this.exceptionResponseCreator = exceptionResponseCreator; + this.authenticationConfiguration = authenticationConfiguration; + } + + @Bean + public AuthenticationManager authenticationManagerBean() throws Exception { + //return authenticationConfiguration.getAuthenticationManager(); + return authenticationConfiguration.getAuthenticationManager(); + } + + /*protected void configure(AuthenticationManagerBuilder auth) { + auth.authenticationProvider(authenticationProvider); + }*/ + + @Bean + public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { + var jwtAuthenticationFilter = new JwtAuthenticationFilter(exceptionResponseCreator, responseMessage, app2AppService, applicationConfig, authService); + jwtAuthenticationFilter.setAuthenticationManager(authenticationManagerBean()); + http.headers().frameOptions().disable(); // Added to enable viewing H2 database in EC2 console + http.cors() + .and() + .csrf() + .disable() + .sessionManagement() + .sessionCreationPolicy(SessionCreationPolicy.STATELESS) + .and() + .addFilterBefore(jwtAuthenticationFilter, UsernamePasswordAuthenticationFilter.class) + //.authorizeHttpRequests((authz) -> authz.anyRequest().authenticated()) + .authorizeRequests() + .requestMatchers(AUTH_WHITELIST).permitAll() + //.antMatchers(AUTH_WHITELIST).permitAll() + .anyRequest().authenticated(); + return http.build(); + } + + @Bean + public WebSecurityCustomizer webSecurityCustomizer() { + //web.ignoring().antMatchers("/v1/auth/logout"); + return (web) -> web.ignoring().requestMatchers("/v1/auth/logout"); + } + + @Bean + public CorsFilter corsFilter() { + final var source = new UrlBasedCorsConfigurationSource(); + final var config = new CorsConfiguration(); + config.addAllowedMethod(HttpMethod.GET); + config.addAllowedMethod(HttpMethod.PUT); + config.setAllowCredentials(Boolean.TRUE); + config.addAllowedMethod(HttpMethod.HEAD); + config.addAllowedMethod(HttpMethod.POST); + config.addAllowedMethod(HttpMethod.PATCH); + config.addAllowedMethod(HttpMethod.DELETE); + config.addAllowedMethod(HttpMethod.OPTIONS); + config.addAllowedHeader(Constants.ASTERISK_SYMBOL); + config.setAllowedOriginPatterns(Collections.singletonList(Constants.ASTERISK_SYMBOL)); + source.registerCorsConfiguration(Constants.FORWARD_SLASH + Constants.ASTERISK_SYMBOL + Constants.ASTERISK_SYMBOL, config); + return new CorsFilter(source); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityUtil.java new file mode 100644 index 0000000..b019042 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/security/SecurityUtil.java @@ -0,0 +1,90 @@ +package com.opsbeach.sharedlib.security; + + +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.stereotype.Component; + +import java.util.Objects; +import java.util.Optional; + +@Component +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class SecurityUtil { + + public static UserDto getLoggedInUserDetail() { + var securityContext = SecurityContextHolder.getContext(); + if (Objects.isNull(securityContext) || Objects.isNull(securityContext.getAuthentication()) || Objects.isNull(securityContext.getAuthentication().getPrincipal())) { + return null; + } + if (securityContext.getAuthentication().getPrincipal().equals(Constants.ANONYMOUS_USER)) { + return null; + } + if (Objects.isNull(securityContext.getAuthentication())) { + throw new UnAuthorizedException(ErrorCode.UNAUTHORIZED_ONLY_OWNER); + } + return (UserDto) securityContext.getAuthentication().getPrincipal(); + } + + public static UserDto getUserDetails() { + return getOptionalUserDetails().orElse(null); + } + + public static Long getClientId() { + var userDto = getLoggedInUserDetail(); + return Objects.nonNull(userDto) ? userDto.getClientId() : null; + } + + public static void setClientId(Long clientId) { + var userDto = getLoggedInUserDetail(); + userDto.setClientId(clientId); + setAuthenticationContext(userDto); + } + + public static void setCurrentLoggedInUser(UserDto userDto) { + setAuthenticationContext(userDto); + } + + public static String getAccessToken() { + var userDto = getLoggedInUserDetail(); + return Objects.nonNull(userDto) ? userDto.getAccessToken() : null; + } + + public static Optional getOptionalUserDetails() { + var securityContext = SecurityContextHolder.getContext(); + var authentication = securityContext.getAuthentication(); + if (Objects.nonNull(authentication)) { + Object principal = authentication.getPrincipal(); + if (principal instanceof UserDto) { + return Optional.of((UserDto) (principal)); + } + } + return Optional.empty(); + } + + public static String getHashKey() { + var userDto = getLoggedInUserDetail(); + return Objects.nonNull(userDto) ? userDto.getUsername() : Constants.EMPTY; + } + + public static void setAuthenticationContext(UserDto userDto) { + var userContext = new UserDto(); + userContext.setId(userDto.getId()); + userContext.setRoles(userDto.getRoles()); + userContext.setMobile(userDto.getMobile()); + userContext.setClientId(userDto.getClientId()); + userContext.setIsDeleted(userDto.getIsDeleted()); + userContext.setOnboardStatus(userDto.getOnboardStatus()); + userContext.setAccessToken(userDto.getAccessToken()); + var securityContext = SecurityContextHolder.getContext(); + var authentication = BaseAuthentication.getInstance(userContext); + securityContext.setAuthentication(authentication); + SecurityContextHolder.setContext(securityContext); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/App2AppService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/App2AppService.java new file mode 100644 index 0000000..2361ab6 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/App2AppService.java @@ -0,0 +1,182 @@ +package com.opsbeach.sharedlib.service; + +import com.opsbeach.sharedlib.exception.EncodeException; +import com.opsbeach.sharedlib.security.RSAMechanism; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.exception.BadRequestException; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.JsonUtil; +import lombok.extern.slf4j.Slf4j; +import org.springframework.boot.web.client.RestTemplateBuilder; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; +import org.springframework.web.client.HttpStatusCodeException; +import org.springframework.web.client.RestTemplate; + +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +@Slf4j +@Service +public class App2AppService { + + private static final String CURRENT_DEVICE = "currentDevice"; + + private final RestTemplate restTemplate; + private final ResponseMessage responseMessage; + + public App2AppService(RestTemplateBuilder restTemplateBuilder, ResponseMessage responseMessage, RSAMechanism rsaMechanism) { + this.restTemplate = restTemplateBuilder.build(); + this.responseMessage = responseMessage; + } + + private String encode(String value) { + log.info("encodeValue value: [{}] ", value); + try { + return URLEncoder.encode(value, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + throw new EncodeException(ErrorCode.URL_ENCODE, responseMessage.getErrorMessage(ErrorCode.URL_ENCODE, e.getMessage())); + } + } + + private String decode(String value) { + log.info("decodeValue value: [{}] ", value); + try { + return URLDecoder.decode(value, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + throw new EncodeException(ErrorCode.URL_ENCODE, responseMessage.getErrorMessage(ErrorCode.URL_ENCODE, e.getMessage())); + } + } + + public static Map authorizationHeader(String authToken) { + Map header = new HashMap<>(); + header.put(Constants.AUTHORIZATION_HEADER, authToken); + return header; + } + + public static Map clientHeader(String authToken) { + Map header = new HashMap<>(); + header.put(Constants.CLIENT_ID_HEADER, authToken); + return header; + } + + public HttpEntity setHeaders(Map token, Object requestBody) { + var headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + token.forEach(headers::set); + if (Objects.isNull(requestBody)) return new HttpEntity<>(headers); + return new HttpEntity<>(requestBody, headers); + } + + public HttpEntity setHeaders(Object requestBody) { + var headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + if (Objects.isNull(requestBody)) return new HttpEntity<>(headers); + return new HttpEntity<>(requestBody, headers); + } + + public T httpPatch(String resourceUrl, String body, Map headers, Class responseClass) { + var client = HttpClient.newHttpClient(); + var request = HttpRequest.newBuilder(URI.create(resourceUrl)).method(HttpMethod.PATCH.name(), HttpRequest.BodyPublishers.ofString(body)); + headers.entrySet().forEach(header -> request.header(header.getKey(), header.getValue())); + String response = ""; + try { + var res = client.send(request.build(), HttpResponse.BodyHandlers.ofString()); + response = res.body().toString(); + } catch (Exception e) { + throw new UnAuthorizedException(ErrorCode.HTTP_REST_PRE_CONDITION, this.responseMessage.getErrorMessage(ErrorCode.HTTP_REST_PRE_CONDITION, e.getMessage())); + } + var object = JsonUtil.parseJsonResponse(response); + return getResponseAsObject(responseClass, object); + } + + private T getResponseAsObject(Class responseClass, Object object) { + return JsonUtil.convertJsonIntoObject(object.toString(), responseClass); + } + + public T httpGet(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.GET, entity); + var object = JsonUtil.parseJsonResponse(response); + return getResponseAsObject(responseClass, object); + } + + public List httpGetEntities(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.GET, entity); + return JsonUtil.jsonArrayToObjectList(response, responseClass); + } + + public T httpPut(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.PUT, entity); + var object = JsonUtil.parseJsonResponse(response); + return getResponseAsObject(responseClass, object); + } + + public T httpDelete(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.DELETE, entity); + if (Objects.isNull(response)) return null; + var object = JsonUtil.parseJsonResponse(response); + return getResponseAsObject(responseClass, object); + } + + public T httpPost(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.POST, entity); + var object = JsonUtil.parseJsonResponse(response); + return getResponseAsObject(responseClass, object); + } + + public List httpPostEntities(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.POST, entity); + var object = JsonUtil.parseJsonResponse(response); + return JsonUtil.jsonArrayToObjectList(object.toString(), responseClass); + } + + public T httpPostWithoutParsingJson(String resourceUrl, HttpEntity entity, Class responseClass) { + var response = getHttpResponse(resourceUrl, HttpMethod.POST, entity); + return getResponseAsObject(responseClass, response); + } + + public String getHttpResponse(String resourceUrl, HttpMethod httpMethod, HttpEntity httpEntity) { + String httpMethodName = httpMethod.name(); + log.info("Accessing {} with HttpMethod {}", resourceUrl, httpMethodName); + var response = restTemplateExchange(resourceUrl, httpMethod, httpEntity, String.class); + if(Objects.isNull(response)) return null; + return String.valueOf(response); + } + + public T restTemplateExchange(String resourceUrl, HttpMethod httpMethod, HttpEntity httpEntity, Class responseClass) { + try { + return restTemplate.exchange(resourceUrl, httpMethod, httpEntity, responseClass).getBody(); + } catch (HttpStatusCodeException exception) { + if (exception.getStatusCode().value() == HttpStatus.UNAUTHORIZED.value()) { + throw new UnAuthorizedException(ErrorCode.ACCESS_TOKEN_INVALID, responseMessage.getErrorMessage(ErrorCode.ACCESS_TOKEN_INVALID, exception.getMessage())); + } + if (exception.getStatusCode().value() == HttpStatus.FORBIDDEN.value()) { + throw new UnAuthorizedException(ErrorCode.ACCESS_TOKEN_INVALID, exception.getMessage()); + } + if (exception.getStatusCode().value() == HttpStatus.BAD_REQUEST.value()) { + throw new BadRequestException(ErrorCode.BAD_REQUEST, this.responseMessage.getErrorMessage(ErrorCode.BAD_REQUEST, exception.getMessage())); + } + if (exception.getStatusCode().value() == HttpStatus.NOT_FOUND.value()) { + throw new RecordNotFoundException(ErrorCode.DOMIN_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.DOMIN_NOT_FOUND, exception.getMessage())); + } + throw new UnAuthorizedException(ErrorCode.HTTP_REST_PRE_CONDITION, this.responseMessage.getErrorMessage(ErrorCode.HTTP_REST_PRE_CONDITION, exception.getMessage())); + } + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/AuthService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/AuthService.java new file mode 100644 index 0000000..e76c802 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/AuthService.java @@ -0,0 +1,52 @@ +package com.opsbeach.sharedlib.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.nimbusds.jose.JOSEException; +import com.opsbeach.sharedlib.dto.JweDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.JweLibrary; +import com.opsbeach.sharedlib.security.RSAKeyGen; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import java.text.ParseException; + +@Slf4j +@Service +public class AuthService { + + private static final String CLIENT = "client"; + private static final String USER_CHECK_URL = "user-check"; + private static final String USER_DETAIL_URL = "user-detail"; + private static final String SESSION_UPDATE = "session-update"; + private static final String USER_LOGIN = "user-login"; + private static final String CLIENT_REGISTER = "client-register"; + private static final String USER_REGISTER = "user-register"; + private static final String USER = "user"; + private static final String JWT_ADD = "jwt-add"; + + private final JweLibrary jweLibrary; + private final JwtTokenService jwtService; + private final ResponseMessage responseMessage; + + public AuthService(ResponseMessage responseMessage, JweLibrary jweLibrary, + JwtTokenService jwtService) { + this.jweLibrary = jweLibrary; + this.responseMessage = responseMessage; + this.jwtService = jwtService; + } + + public JweDto decryptToken(String authToken) { + log.info("Decrypting the token"); + try { + //var jwtTokenDto = (JwtDto) cacheService.get(authToken, Constants.TOKEN); + var jwtTokenDto = jwtService.getByAccessToken(authToken); + return jweLibrary.decrypt(authToken, RSAKeyGen.getStringToKeys(jwtTokenDto.getPublicKey(), jwtTokenDto.getPrivateKey())); + } catch (JOSEException | JsonProcessingException | ParseException jsonProcessingException) { + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + } + +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/CacheService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/CacheService.java new file mode 100644 index 0000000..f04f4c7 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/CacheService.java @@ -0,0 +1,37 @@ +package com.opsbeach.sharedlib.service; + +import com.opsbeach.sharedlib.repository.CacheRepository; +import org.springframework.stereotype.Service; + +import java.util.List; + +/** + *

+ * Cache service is key value storage. + *

+ */ +@Service +public class CacheService { + + /*private final CacheRepository cacheRepository; + + public CacheService(CacheRepository cacheRepository) { + this.cacheRepository = cacheRepository; + } + + public Boolean save(String hashKey, String key, Object value) { + return cacheRepository.save(hashKey, key, value); + } + + public Object get(String hashKey, String key) { + return cacheRepository.get(hashKey, key); + } + + public Boolean delete(String hashKey) { + return cacheRepository.delete(hashKey); + } + + public List getAll(String hashKey) { + return cacheRepository.getAll(hashKey); + }*/ +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/EmailService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/EmailService.java new file mode 100644 index 0000000..896e5d9 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/EmailService.java @@ -0,0 +1,23 @@ +package com.opsbeach.sharedlib.service; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.mail.SimpleMailMessage; +import org.springframework.mail.javamail.JavaMailSender; +import org.springframework.scheduling.annotation.Async; +import org.springframework.stereotype.Service; + +@Service +public class EmailService { + + @Autowired + private JavaMailSender javaMailSender; + + @Async + public void sendMail(String toMailId, String subject, String messageBody){ + SimpleMailMessage message = new SimpleMailMessage(); + message.setTo(toMailId); + message.setSubject(subject); + message.setText(messageBody); + javaMailSender.send(message); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/GoogleCloudService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/GoogleCloudService.java new file mode 100644 index 0000000..35fab3d --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/GoogleCloudService.java @@ -0,0 +1,108 @@ +package com.opsbeach.sharedlib.service; + +import com.google.auth.Credentials; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.cloud.storage.*; +import com.google.cloud.tasks.v2.CloudTasksClient; +import com.google.cloud.tasks.v2.HttpRequest; +import com.google.cloud.tasks.v2.QueueName; +import com.google.cloud.tasks.v2.Task; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.FileNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.utils.Constants; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import org.springframework.scheduling.annotation.Async; +import org.springframework.stereotype.Service; +import org.springframework.util.ObjectUtils; + +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; + +@Slf4j +@Service +@RequiredArgsConstructor +public class GoogleCloudService { + + private final ResponseMessage responseMessage; + private final ApplicationConfig applicationConfig; + + private Credentials getCredentials() throws IOException { + return GoogleCredentials.fromStream(new FileInputStream(System.getenv("BUCKET_SA_CREDENTIALS"))); + } + + private Storage getStorage() throws IOException { + var projectId = applicationConfig.getGcloud().get(Constants.PROJECT_ID); + return StorageOptions.newBuilder().setCredentials(getCredentials()).setProjectId(projectId).build().getService(); + } + + public void publish(String bucketName, String objectName, String filePath, byte[] content) { + + // Optional: set a generation-match precondition to avoid potential race + // conditions and data corruptions. The request returns a 412 error if the + // preconditions are not met. + // For a target object that does not yet exist, set the DoesNotExist precondition. + Storage.BlobTargetOption precondition = Storage.BlobTargetOption.doesNotExist(); + // If the destination already exists in your bucket, instead set a generation-match + // precondition: + BlobId blobId = BlobId.of(bucketName, objectName); + BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build(); + try { + if (Boolean.FALSE.equals(ObjectUtils.isEmpty(filePath))) { + content = Files.readAllBytes(Paths.get(filePath)); + } + var storage = getStorage(); + storage.delete(blobId); + storage.create(blobInfo, content, precondition); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.FILE_NOT_FOUND, e.getMessage())); + } + } + + public void downloadFile(String bucketName, String objectName, String downloadPath) { + var blob = pull(bucketName, objectName); + blob.downloadTo(Path.of(downloadPath)); + } + + public Blob pull(String bucketName, String objectName) { + try { + return getStorage().get(BlobId.of(bucketName, objectName)); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.FILE_NOT_FOUND, e.getMessage())); + } + } + + @Async + public void pushRequestInTask(List httpRequests) throws IOException { + CloudTasksClient client = CloudTasksClient.create(); + String projectId = applicationConfig.getGcloud().get(Constants.PROJECT_ID); + String locationId = applicationConfig.getGcloud().get(Constants.LOCATION_ID); + String queueId = applicationConfig.getGcloud().get(Constants.QUEUE_ID); + // https://cloud.google.com/kubernetes-engine/docs/tutorials/authenticating-to-cloud-platform#importing_credentials_as_a_secret + // set GOOGLE_APPLICATION_CREDENTIALS = path/to/cloud_tasks-sa.json env var when running from local + // run ./ngrok http 7081 in terminal and set that address in here + + // Construct the fully qualified queue name. + String queuePath = QueueName.of(projectId, locationId, queueId).toString(); + + log.info("Google Task Queue Path : "+ queuePath); + // Add your service account email to construct the OIDC token. + // in order to add an authentication header to the request. + + httpRequests.forEach(httpRequest -> { + Task.Builder taskBuilder = Task.newBuilder().setHttpRequest(httpRequest); + // Send create task request. + Task task = client.createTask(queuePath, taskBuilder.build()); + log.info("Task created: " + task.getName()); + }); + client.close(); + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/JwtTokenService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/JwtTokenService.java new file mode 100644 index 0000000..c5c02d3 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/JwtTokenService.java @@ -0,0 +1,39 @@ +package com.opsbeach.sharedlib.service; + +import com.opsbeach.sharedlib.dto.JwtDto; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.StringUtil; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import java.util.HashMap; + +@Slf4j +@Service +public class JwtTokenService { + + private static final String JWT_ACCESS = "jwt-access"; + + private final App2AppService app2AppService; + private final ApplicationConfig applicationConfig; + + public JwtTokenService(App2AppService app2AppService, ApplicationConfig applicationConfig) { + this.app2AppService = app2AppService; + this.applicationConfig = applicationConfig; + } + + private String getUserServiceBaseUrl() { + return applicationConfig.getUser().get(Constants.BASE_URL); + } + + public JwtDto getByAccessToken(String authenticationToken) { + JwtDto requestJwtDto = new JwtDto(); + requestJwtDto.setAccessToken(authenticationToken); + + return app2AppService.httpPost(StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), + applicationConfig.getUser().get(JWT_ACCESS)), app2AppService.setHeaders(new HashMap<>(), requestJwtDto), JwtDto.class); + /*return app2AppService.httpGet(StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), + applicationConfig.getUser().get(JWT_ADD),"/",authenticationToken,"/access"), app2AppService.setHeaders(new HashMap<>(), null), JwtDto.class);*/ + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/service/SecretService.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/SecretService.java new file mode 100644 index 0000000..90145e0 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/service/SecretService.java @@ -0,0 +1,19 @@ +package com.opsbeach.sharedlib.service; + +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Service; + +import java.util.Base64; + +@Service +@RequiredArgsConstructor +public class SecretService { + + public String decodeMySecret(String encodedString) { + return new String(Base64.getDecoder().decode(encodedString)); + } + + public String encodeMySecret(String stringToEncode) { + return new String(Base64.getEncoder().encode(stringToEncode.getBytes())); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Base62Util.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Base62Util.java new file mode 100644 index 0000000..0dde1f2 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Base62Util.java @@ -0,0 +1,17 @@ +package com.opsbeach.sharedlib.utils; + +public class Base62Util { + public static final String ENCODEDSTR = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; + + public static final int DIGITS = 7; + + public static String base62Encode(long num) { + StringBuilder result = new StringBuilder(); + while (num > 0) { + int reminder = (int) (num%62); + result.append(ENCODEDSTR.charAt(reminder)); + num = num/62; + } + return result.substring(0, DIGITS); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/CacheUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/CacheUtil.java new file mode 100644 index 0000000..ce7ec6e --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/CacheUtil.java @@ -0,0 +1,247 @@ +package com.opsbeach.sharedlib.utils; + +import com.opsbeach.sharedlib.security.SecurityUtil; +//import com.opsbeach.sharedlib.service.CacheService; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.Objects; + +/** + *

+ * Util class to retrieve values from Cache. + *

+ */ + +@Service +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class CacheUtil { + + /*private static final String USER_ID = "userId"; + private static final String MOBILE = "mobile"; + private static final String SECRET = "secret"; + private static final String TIME_ZONE = "timeZone"; + private static final String LOGIN_MODE = "loginMode"; + private static final String IS_LOGGED_IN = "isLoggedIn"; + private static final String CUSTOMER_VUA = "customerVua"; + private static final String PARTNER_EMAIL = "partnerEmail"; + private static final String ACC_REF_NUMBER = "accRefNumber"; + private static final String CONSENT_HANDLE = "consentHandle"; + private static final String CONSENT_TO = "consentToDateRange"; + private static final String GOAL_EVENT_FLAG = "goalEventFlag"; + private static final String USER_SESSION_ID = "userSessionId"; + private static final String CONSENT_FROM = "consentFromDateRange"; + private static final String BUDGET_EVENT_FLAG = "budgetEventFlag"; + private static final String PRIMARY_ACCOUNT_ID = "primaryAccountId"; + private static final String ON_BOARDING_EVENT_FLAG = "onboardingEventFlag"; + private static final String IS_DATA_CATEGORIZATION_SUBSCRIBED = "isDataCategorizationSubscribed"; + private static CacheService cacheService; + + @Autowired + public CacheUtil(CacheService cacheService) { + CacheUtil.cacheService = cacheService; + } + + *//** + * Fetch User ID from Cache. + * + * @return long - ID of current logged-in user. + *//* + public static long getUserId() { + return (long) cacheService.get(SecurityUtil.getHashKey(), USER_ID); + } + + *//** + * Fetch Consent Handle from Cache. + * + * @return String - Consent Handle. + *//* + public static String getConsentHandle() { + return (String) cacheService.get(SecurityUtil.getHashKey(), CONSENT_HANDLE); + } + + *//** + * Find whether a user has logged in (or) not. + * + * @return Boolean - Boolean value. + *//* + public static Boolean checkUserIsLoggedIn() { + return (boolean) cacheService.get(SecurityUtil.getHashKey(), IS_LOGGED_IN); + } + + *//** + * Fetches from date of a Consent. + * + * @return String - String value. + *//* + public static String getConsentFromDate() { + return (String) cacheService.get(SecurityUtil.getHashKey(), CONSENT_FROM); + } + + *//** + * Fetches the To date of a Consent. + * + * @return String - String value. + *//* + public static String getConsentToDate() { + return (String) cacheService.get(SecurityUtil.getHashKey(), CONSENT_TO); + } + + *//** + * Fetch Primary account ID from Cache. + * + * @return long - Primary Account ID of logged-in user. + *//* + public static long getPrimaryAccountId() { + return (long) cacheService.get(SecurityUtil.getHashKey(), PRIMARY_ACCOUNT_ID); + } + + *//** + * Fetch Budget Event Flag from Cache. + * + * @return Boolean - Budget Event Flag of logged in user. + *//* + public static Boolean getBudgetEventFlag() { + var object = cacheService.get(SecurityUtil.getHashKey(), BUDGET_EVENT_FLAG); + if (Objects.nonNull(object)) { + return (boolean) object; + } + return Boolean.FALSE; + } + + *//** + * Fetch On-boarding Event Flag from Cache. + * + * @return Boolean - On-boarding Event Flag of logged-in user. + *//* + public static Boolean getOnboardingEventFlag() { + var object = cacheService.get(SecurityUtil.getHashKey(), ON_BOARDING_EVENT_FLAG); + if (Objects.nonNull(object)) { + return (boolean) object; + } + return Boolean.FALSE; + } + + *//** + * Fetch Goal Event Flag from Cache. + * + * @return Boolean - Goal Event Flag of logged-in user. + *//* + public static Boolean getGoalEventFlag() { + var object = cacheService.get(SecurityUtil.getHashKey(), GOAL_EVENT_FLAG); + if (Objects.nonNull(object)) { + return (boolean) object; + } + return Boolean.FALSE; + } + + *//** + * Fetch Session Id from Cache. + * + * @return String - Session ID of One Money. + *//* + public static String getSessionId() { + return (String) cacheService.get(SecurityUtil.getHashKey(), Constants.SESSION_ID); + } + + *//** + * Fetches the CustomerVua of current user. + * + * @return String - CustomerVua value. + *//* + public static String getCustomerVua() { + return (String) cacheService.get(SecurityUtil.getHashKey(), CUSTOMER_VUA); + } + + *//** + * Fetches the Account Reference number. + * + * @return String - Account Reference value. + *//* + public static String getAccountReferenceNumber() { + return (String) cacheService.get(SecurityUtil.getHashKey(), ACC_REF_NUMBER); + } + + *//** + * Fetches the Mobile number. + * + * @return String - Mobile value. + *//* + public static String getMobile() { + return (String) cacheService.get(SecurityUtil.getHashKey(), MOBILE); + } + + *//** + * Fetches the TimeZone of the user. + * + * @return String - TimeZone. + *//* + public static String getCustomerTimeZone() { + return (String) cacheService.get(SecurityUtil.getHashKey(), TIME_ZONE); + } + + *//** + * Find whether the has subscribed to Data Categorization (or) not. + * + * @return Boolean - Boolean value. + *//* + public static Boolean checkDataCategorizationSubscriptionInCustomer() { + return (boolean) cacheService.get(SecurityUtil.getHashKey(), IS_DATA_CATEGORIZATION_SUBSCRIBED); + } + + *//** + * Find whether the has subscribed to Data Categorization (or) not. + * + * @return Boolean - Boolean value. + *//* + public static Boolean checkDataCategorizationSubscriptionInPartner() { + return (boolean) cacheService.get(SecurityUtil.getHashKey(), IS_DATA_CATEGORIZATION_SUBSCRIBED); + } + + *//** + * Fetch the email id of the logged in partner + * + * @return String - Email. + *//* + public static String getPartnerEmail() { + return (String) cacheService.get(SecurityUtil.getHashKey(), PARTNER_EMAIL); + } + + *//** + * Find whether the has subscribed to Data Categorization (or) not. + * + * @return Boolean - Boolean value. + *//* + public static Boolean checkAlreadyLoggedIn(String hashKey) { + return (boolean) cacheService.get(hashKey, Constants.IS_ALREADY_LOGGED_IN); + } + + *//** + * Fetches the TimeZone of the user. + * + * @return String - TimeZone. + *//* + public static String getPartnerTimeZone() { + return (String) cacheService.get(SecurityUtil.getHashKey(), TIME_ZONE); + } + + *//** + * Fetch user session Id + * + * @return long - Session ID of logged-in user. + *//* + public static Long getUserSessionId(String hashKey) { + return (Long) cacheService.get(hashKey, USER_SESSION_ID); + } + + *//** + * Fetch secret + * + * @return String - Secret for auth token. + *//* + public static String getSecret() { + return (String) cacheService.get(SecurityUtil.getHashKey(), SECRET); + }*/ +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Constants.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Constants.java new file mode 100644 index 0000000..ff8b9d0 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Constants.java @@ -0,0 +1,74 @@ +package com.opsbeach.sharedlib.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + *

+ * Common constants variable user all over the application. + *

+ */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class Constants { + + //Symbols + public static final String AT = "@"; + public static final String DOT = "."; + public static final String EMPTY = ""; + public static final String COLON = ":"; + public static final String COMMA = ","; + public static final String HYPHEN = "-"; + public static final String UNDERSCORE = "_"; + public static final String SEMI_COLON = ";"; + public static final String PERCENTAGE = "%"; + public static final String EMPTY_SPACE = " "; + public static final String COMMA_SPACE = ", "; + public static final String FORWARD_SLASH = "/"; + public static final String ASTERISK_SYMBOL = "*"; + public static final String SPACE_HYPHEN = EMPTY_SPACE + HYPHEN; + public static final String AUTHORIZATION_HEADER = "Authorization"; + public static final String AUTHORIZED_PATH_PREFIX = FORWARD_SLASH + "v1"; + + //common + public static final String OK = "OK"; + public static final String PORT = "port"; + public static final String HOST = "host"; + public static final String TOKEN = "token"; + public static final String FAILED = "FAILED"; + public static final String SUCCESS = "SUCCESS"; + public static final String BEARER = "Bearer"; + public static final String ENABLED = "enabled"; + public static final String BASE_URL = "base-url"; + public static final String GOAL_FOLDER = "goal"; + public static final String DISABLED = "disabled"; + public static final String CUSTOMER = "customer"; + public static final String PREFIX = BEARER + " "; + public static final String COMMON_FOLDER = "common"; + public static final String PROFILE_FOLDER = "profile"; + public static final String ANONYMOUS_USER = "anonymousUser"; + public static final String TRANSACTION_FOLDER = "transaction"; + public static final String PROFILE_ACTIVE_PRODUCTION = "production"; + public static final String EXECUTED_TENANT_SCHEMA_FOLDER = "executedTenantSchema"; + + //URL + public static final String ENTITY = "entity"; + public static final String STATUS = "status"; + public static final String MESSAGE = "message"; + public static final String SESSION_ID = "sessionId"; + public static final String CLIENT_ID_HEADER = "X-ClientId"; + public static final String IS_ALREADY_LOGGED_IN = "isAlreadyLoggedIn"; + + //aws + public static final String REGION = "region"; + public static final String ACCESS_KEY_ID = "access_key_id"; + public static final String SECRET_ACCESS_KEY = "secret_access_key"; + + //github + public static final String CLIENT_ID = "client-id"; + public static final String CLIENT_SECRET = "client-secret"; + + //google + public static final String PROJECT_ID = "project-id"; + public static final String LOCATION_ID = "location-id"; + public static final String QUEUE_ID = "queue-id"; +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/DateUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/DateUtil.java new file mode 100644 index 0000000..395a7e1 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/DateUtil.java @@ -0,0 +1,247 @@ +package com.opsbeach.sharedlib.utils; + +import com.opsbeach.sharedlib.security.SecurityUtil; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.time.Duration; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.YearMonth; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.Calendar; +import java.util.Date; +import java.util.TimeZone; + +/** + *

+ * Common DateUtil class. + *

+ */ +public class DateUtil { + private static final String UTC = "UTC"; + private static final String MMM_YYYY = "MMM, yyyy"; + + public static LocalDateTime convertTimeZoneToDifferentZone(LocalDateTime localDateTime) { + ZonedDateTime zonedUTC = localDateTime.atZone(ZoneId.of(UTC)); + var timeZone = SecurityUtil.getLoggedInUserDetail().getTimeZone(); + return zonedUTC.withZoneSameInstant(ZoneId.of(timeZone)).toLocalDateTime(); + } + + public static LocalDate currentDate() { + return ZonedDateTime.now(ZoneId.of(UTC)).toLocalDate(); + } + + public static LocalDateTime currentDateTime() { + return LocalDateTime.now(); + } + + public static LocalDate currentTimeZoneDate() { + var zonedUTC = ZonedDateTime.now(ZoneId.of(UTC)); + var timeZone = SecurityUtil.getLoggedInUserDetail().getTimeZone(); + var zonedDate = zonedUTC.withZoneSameInstant(ZoneId.of(timeZone)); + return zonedDate.toLocalDate(); + } + + public static LocalDateTime currentTimeZoneDateTime() { + var zonedUTC = ZonedDateTime.now(ZoneId.of(UTC)); + var timeZone = SecurityUtil.getLoggedInUserDetail().getTimeZone(); + var zonedDate = zonedUTC.withZoneSameInstant(ZoneId.of(timeZone)); + return zonedDate.toLocalDateTime(); + } + public static LocalDateTime plusHours(LocalDateTime localDateTime, long value) { + return localDateTime.plusHours(value); + } + + public static LocalDate plusMonths(LocalDate localDate, long value) { + return localDate.plusMonths(value); + } + + public static LocalDate minusMonths(LocalDate localDate, long value) { + return localDate.minusMonths(value); + } + + public static LocalDate plusDays(LocalDate localDate, long value) { + return localDate.plusDays(value); + } + + public static LocalDate minusDays(LocalDate localDate, long value) { + return localDate.minusDays(value); + } + + public static LocalDateTime plusMonths(LocalDateTime localDateTime, long value) { + return localDateTime.plusMonths(value); + } + + public static LocalDateTime minusMonths(LocalDateTime localDateTime, long value) { + return localDateTime.minusMonths(value); + } + + public static LocalDateTime plusDays(LocalDateTime localDateTime, long value) { + return localDateTime.plusDays(value); + } + + public static LocalDateTime minusDays(LocalDateTime localDateTime, long value) { + return localDateTime.minusDays(value); + } + + public static LocalDateTime plusMinutes(LocalDateTime localDateTime, long value) { + return localDateTime.plusMinutes(value); + } + + public static LocalDate plusYear(LocalDate localDate, long value) { + return localDate.plusYears(value); + } + + public static LocalDate minusYear(LocalDate localDate, long value) { + return localDate.minusYears(value); + } + + public static LocalDateTime plusYear(LocalDateTime localDateTime, long value) { + return localDateTime.plusYears(value); + } + + public static LocalDateTime minusYear(LocalDateTime localDateTime, long value) { + return localDateTime.minusYears(value); + } + + public static LocalDate plusWeeks(LocalDate localDate, long value) { + return localDate.plusWeeks(value); + } + + public static LocalDate minusWeeks(LocalDate localDate, long value) { + return localDate.minusWeeks(value); + } + + public static LocalDateTime plusWeeks(LocalDateTime localDateTime, long value) { + return localDateTime.plusWeeks(value); + } + + public static LocalDateTime minusWeeks(LocalDateTime localDateTime, long value) { + return localDateTime.minusWeeks(value); + } + + public static Boolean isBefore(LocalDate localDateA, LocalDate localDateB) { + return localDateA.isBefore(localDateB); + } + + public static Boolean isBefore(LocalDateTime localDateTimeA, LocalDateTime localDateTimeB) { + return localDateTimeA.isBefore(localDateTimeB); + } + + public static Boolean isAfter(LocalDate localDateA, LocalDate localDateB) { + return localDateA.isAfter(localDateB); + } + + public static Boolean isAfter(LocalDateTime localDateTimeA, LocalDateTime localDateTimeB) { + return localDateTimeA.isAfter(localDateTimeB); + } + + public static Boolean equals(LocalDate localDateA, LocalDate localDateB) { + return localDateA.equals(localDateB); + } + + public static Boolean equals(LocalDateTime localDateTimeA, LocalDateTime localDateTimeB) { + return localDateTimeA.equals(localDateTimeB); + } + + public static String format(LocalDate localDate, DateTimeFormatter dateTimeFormatter) { + return dateTimeFormatter.format(localDate); + } + + public static String format(LocalDateTime localDateTime, DateTimeFormatter dateTimeFormatter) { + return dateTimeFormatter.format(localDateTime); + } + + public static LocalDate convertToLocalDate(LocalDateTime localDateTime) { + return localDateTime.toLocalDate(); + } + + public static Long daysBetweenTwoDates(LocalDate localDateA, LocalDate localDateB) { + return (long) Period.between(localDateA, localDateB).getDays(); + } + + public static LocalDate withDayOfMonth(LocalDate localDate, int dayOfMonth) { + return localDate.withDayOfMonth(dayOfMonth); + } + + public static LocalDate minusMonthsFromFirstDay(LocalDate localDate, long months) { + return localDate.withDayOfMonth(1).minusMonths(months); + } + + public static LocalDate minusMonthsFromLastDay(LocalDate localDate, long months) { + return localDate.minusMonths(months).withDayOfMonth(localDate.minusMonths(months).lengthOfMonth()); + } + + public static Long durationBetween(LocalDate localDateA, LocalDate localDateB) { + return Duration.between(localDateA.atStartOfDay(), localDateB.atStartOfDay()).toDays(); + } + + public static long minutesBetweenTime(LocalDateTime localDateTimeA, LocalDateTime localDateTimeB) { + return Duration.between(localDateTimeA, localDateTimeB).toMinutes(); + } + + public static long hoursBetweenTime(LocalDateTime localDateTimeA, LocalDateTime localDateTimeB) { + return Duration.between(localDateTimeA, localDateTimeB).toHours(); + } + + public static Long secondsBetweenDate(LocalDateTime localDateTimeA, LocalDateTime localDateTimeB) { + return ChronoUnit.SECONDS.between(localDateTimeA, localDateTimeB); + } + + public static String getFormattedYearMonth(int year, int month) { + YearMonth thisYearMonth = YearMonth.of(year, month); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern(MMM_YYYY); + return thisYearMonth.format(formatter); + } + + public static DateFormat getDateFormat(String pattern) { + var timeZone = TimeZone.getTimeZone("UTC"); + DateFormat dateFormat = new SimpleDateFormat(pattern); + dateFormat.setTimeZone(timeZone); + return dateFormat; + } + + public static Calendar getCalendar() { + var date = new Date(); + var calendar = Calendar.getInstance(); + calendar.setTime(date); + return calendar; + } + + public static String formatDate(DateFormat dateFormat, Date date) { + return dateFormat.format(date); + } + + public static Boolean validateExpiration(LocalDateTime expirationTime) { + var valid = Boolean.FALSE; + if (expirationTime != null && LocalDateTime.now().isAfter(expirationTime)) { + valid = Boolean.TRUE; + } + return valid; + } + + public static LocalDateTime convertDatetoLocalDateTimeUTC(Date date) { + return date.toInstant().atZone(ZoneId.of(UTC)).toLocalDateTime(); + } + + public static LocalDateTime convertDatetoLocalDateTime(Date date) { + return date.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime(); + } + + public static LocalDateTime currentDateTimeUTC() { + return ZonedDateTime.now(ZoneId.of(UTC)).toLocalDateTime(); + } + + public static Date convertLocalDateTimeToDateUTC(LocalDateTime localDateTime) { + return Date.from(localDateTime.atZone(ZoneId.of(UTC)).toInstant()); + } + + public static Date convertLocalDateTimeToDate(LocalDateTime localDateTime) { + return Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FileUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FileUtil.java new file mode 100644 index 0000000..f88cfba --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FileUtil.java @@ -0,0 +1,221 @@ +package com.opsbeach.sharedlib.utils; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; +import java.util.stream.Stream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.io.FileUtils; +import org.springframework.web.multipart.MultipartFile; + +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.FileNotFoundException; +import com.opsbeach.sharedlib.exception.InvalidDataException; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class FileUtil { + + public static void deleteFile(String path) { + try { + FileUtils.delete(new File(path)); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, e.getMessage()); + } + } + + public static void deleteDirectory(String path) { + try { + FileUtils.cleanDirectory(new File(path)); + FileUtils.deleteDirectory(new File(path)); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, e.getMessage()); + } + } + + public static void uncompressTarGZ(String folderPath, String filePath) { + File dest = new File(folderPath); + try (var buf = new BufferedInputStream(new FileInputStream(filePath)); + TarArchiveInputStream tarIn = new TarArchiveInputStream(new GzipCompressorInputStream(buf));) { + + TarArchiveEntry tarEntry = tarIn.getNextTarEntry(); + // tarIn is a TarArchiveInputStream + while (tarEntry != null) { + // create a file with the same name as the tarEntry + File destPath = new File(dest, tarEntry.getName()); + log.info("working: " + destPath.getCanonicalPath()); + if (tarEntry.isDirectory()) { + destPath.mkdirs(); + } else { + destPath.createNewFile(); + write(destPath, tarIn); + } + tarEntry = tarIn.getNextTarEntry(); + } + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, e.getMessage()); + } + } + + private static void write(File destPath, TarArchiveInputStream tarIn) { + byte [] btoRead = new byte[1024]; + try (BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(destPath));) { + int len = 0; + while((len = tarIn.read(btoRead)) != -1) + { + bout.write(btoRead,0,len); + } + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, e.getMessage()); + } + btoRead = null; + } + + public static String getChecksum(String filePath) { + try (InputStream is = Files.newInputStream(Paths.get(filePath))){ + return DigestUtils.md5Hex(is); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + + public static String getChecksum(byte[] bytes) { + return DigestUtils.md5Hex(bytes); + } + + public static List deepSearchFiles(String folderPath, String fileType) { + try (Stream walk = Files.walk(Paths.get(folderPath))) { + return walk.filter(p -> !Files.isDirectory(p)) + .map(Path::toString) + .filter(f -> (f.endsWith(fileType))) + .toList(); + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, e.getMessage()); + } + } + + public static boolean isCSVFormat(MultipartFile file) { + if (("text/csv").equals(file.getContentType()) || ("application/vnd.ms-excel").equals(file.getContentType())) { + return true; + } + return false; + } + + public static List getCsvFileHeaders(MultipartFile multipartFile) { + try (BufferedReader fileReader = new BufferedReader(new InputStreamReader(multipartFile.getInputStream(), StandardCharsets.UTF_8)); + CSVParser csvParser = new CSVParser(fileReader, CSVFormat.DEFAULT.builder().setHeader().setSkipHeaderRecord(true).setIgnoreHeaderCase(true).setTrim(true).build());) { + return csvParser.getHeaderNames(); + } catch (IOException e) { + throw new InvalidDataException(ErrorCode.INVALID_FILE, e.getMessage()); + } + } + + public static ArrayNode readCsvFile(MultipartFile multipartFile) { + try { + return readCsvFile(multipartFile.getInputStream()); + } catch (IOException e) { + throw new InvalidDataException(ErrorCode.INVALID_FILE, e.getMessage()); + } + } + + public static ArrayNode readCsvFile(byte[] bytes) { + return readCsvFile(new ByteArrayInputStream(bytes)); + } + + public static ArrayNode readCsvFile(InputStream file){ + try (BufferedReader fileReader = new BufferedReader(new InputStreamReader(file, StandardCharsets.UTF_8)); + CSVParser csvParser = new CSVParser(fileReader, CSVFormat.DEFAULT.builder().setHeader().setSkipHeaderRecord(true).setIgnoreHeaderCase(true).setTrim(true).build());) { + var headers = csvParser.getHeaderNames(); + var size = csvParser.getHeaderNames().size(); + var records = csvParser.getRecords(); + ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(records.size()); + records.forEach(record -> { + var payload = JsonNodeFactory.instance.objectNode(); + for (int i=0; i 0) { + bos.write(buffer, 0, len); + } + } + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + } catch (IOException e) { + throw new FileNotFoundException(ErrorCode.FILE_NOT_FOUND, e.getMessage()); + } + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FutureUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FutureUtil.java new file mode 100644 index 0000000..53ee601 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/FutureUtil.java @@ -0,0 +1,51 @@ +package com.opsbeach.sharedlib.utils; + +import com.opsbeach.sharedlib.exception.CompletableFutureException; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.ApplicationConfig; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +@Slf4j +@Component +public class FutureUtil { + + private final ResponseMessage responseMessage; + private final ApplicationConfig applicationConfig; + + public FutureUtil(ResponseMessage responseMessage, ApplicationConfig applicationConfig) { + this.responseMessage = responseMessage; + this.applicationConfig = applicationConfig; + } + + /** + * Cancel the {@link Future} if it is still executing. The {@link Future} will be interrupted. + */ + public void tryCancelFuture(Future future) { + if (!(future.isDone() || future.isCancelled())) { + future.cancel(Boolean.TRUE); + } + } + + /** + * Get value from the {@link Future} {@code get} method with a timeout (configured in application properties), defaulted to 60 seconds. + * Encapsulates any thrown exception in a {@link RuntimeException} so that this method can be used as an action to {@code Iterable.forEach}. + * + * @return Return value from {@code get} call on the {@link Future} + */ + public T safeGet(Future future) { + try { + return future.get(applicationConfig.getThreadingFutureTimeout(), TimeUnit.MILLISECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + log.error("Error in retrieving future - {}", e.getMessage()); + Thread.currentThread().interrupt(); + throw new CompletableFutureException(ErrorCode.COMPLETABLE_FUTURE, responseMessage.getErrorMessage(ErrorCode.COMPLETABLE_FUTURE, e.getMessage())); + } + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/JsonUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/JsonUtil.java new file mode 100644 index 0000000..3ce6318 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/JsonUtil.java @@ -0,0 +1,115 @@ +package com.opsbeach.sharedlib.utils; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.nimbusds.jose.util.JSONObjectUtils; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.ObjectUtils; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + *

+ * Json convertor util. + *

+ */ +@Slf4j +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class JsonUtil { + + private static final String JSON_CONVERSION_ERROR = "There is an error in converting the Json to Object - {}"; + private static ResponseMessage responseMessage; + + @Autowired + public JsonUtil(ResponseMessage responseMessage) { + JsonUtil.responseMessage = responseMessage; + } + + public static Object parseJsonResponse(String response) { + Map jsonObject; + try { + jsonObject = JSONObjectUtils.parse(response); + } catch (ParseException e) { + log.error("Error occurred during processing the response - {}", e.getMessage()); + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + if(!ObjectUtils.isEmpty(jsonObject.get(Constants.STATUS)) && !ObjectUtils.isEmpty(jsonObject.get(Constants.ENTITY))) { + var status = jsonObject.get(Constants.STATUS).toString(); + if (!status.equals(Constants.SUCCESS)) { + throw new InvalidDataException(ErrorCode.DATA_RESPONSE_ERROR, responseMessage.getErrorMessage(ErrorCode.DATA_RESPONSE_ERROR, String.valueOf(jsonObject.get(Constants.MESSAGE)))); + } + return jsonObject.get(Constants.ENTITY); + } + return jsonObject; + } + + public static T convertJsonIntoObject(String object, Class clazz) { + JsonMapper jsonMapper = JsonMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); + var javaType = jsonMapper.getTypeFactory().constructType(clazz); + T result; + try { + result = jsonMapper.readValue(object, javaType); + } catch (JsonProcessingException e) { + log.error(JSON_CONVERSION_ERROR, e.getMessage()); + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + return result; + } + + public static List jsonArrayToObjectList(String json, Class tClass) { + var mapper = new ObjectMapper(); + var listType = mapper.getTypeFactory().constructCollectionType(ArrayList.class, tClass); + List result; + try { + result = mapper.readValue(json, listType); + } catch (JsonProcessingException e) { + log.error(JSON_CONVERSION_ERROR, e.getMessage()); + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + return result; + } + + public static Map convertJsonToMap(String json) { + Map convertedJson; + TypeReference> typeRef = new TypeReference<>() { + }; + var mapper = new ObjectMapper(); + try { + convertedJson = mapper.readValue(json, typeRef); + } catch (JsonProcessingException e) { + log.error(JSON_CONVERSION_ERROR, e.getMessage()); + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + return convertedJson; + } + + /** + * Converts Object into Json. + * + * @param object - Object which needs to be converted to Json. + * @return String - Converted Json. + */ + public static String convertObjectIntoJson(Object object) { + var mapper = new ObjectMapper(); + String convertedJson; + try { + convertedJson = mapper.writeValueAsString(object); + } catch (JsonProcessingException e) { + log.error(JSON_CONVERSION_ERROR, e.getMessage()); + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + return convertedJson; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/OnboardStatus.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/OnboardStatus.java new file mode 100644 index 0000000..3a69f31 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/OnboardStatus.java @@ -0,0 +1,47 @@ +package com.opsbeach.sharedlib.utils; + +import org.springframework.lang.Nullable; + +import java.util.Objects; + +/** + *

+ * Enum of on-Boarding Status of an user. + *

+ */ +public enum OnboardStatus { + ONBOARDED(0), + ACCOUNT_LINKED(1), + APPROVED(2), + REGISTRATION_COMPLETED(3), + COMPLETED(4), + DEMO_USER(5); + + private final int code; + + OnboardStatus(int code) { + this.code = code; + } + + public static OnboardStatus valueOf(int code) { + var onboardStatus = resolve(code); + if (Objects.isNull(onboardStatus)) { + throw new IllegalArgumentException("No matching constant found for [" + code + "]"); + } + return onboardStatus; + } + + @Nullable + public static OnboardStatus resolve(int code) { + for (OnboardStatus onboardStatus : values()) { + if (onboardStatus.code() == code) { + return onboardStatus; + } + } + return null; + } + + public int code() { + return this.code; + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Status.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Status.java new file mode 100644 index 0000000..22a646b --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/Status.java @@ -0,0 +1,36 @@ +package com.opsbeach.sharedlib.utils; + +import org.springframework.lang.Nullable; + +public enum Status { + ACTIVE(1), + IN_ACTIVE(2); + + private final int code; + + Status(int code) { + this.code = code; + } + + public static Status valueOf(int code) { + var userActivity = resolve(code); + if (userActivity == null) { + throw new IllegalArgumentException("No matching constant found for [" + code + "]"); + } + return userActivity; + } + + @Nullable + public static Status resolve(int code) { + for (Status status : values()) { + if (status.code() == code) { + return status; + } + } + return null; + } + + public int code() { + return this.code; + } +} diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/StringUtil.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/StringUtil.java new file mode 100644 index 0000000..d88ef3a --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/StringUtil.java @@ -0,0 +1,92 @@ +package com.opsbeach.sharedlib.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +import java.util.Objects; +import java.util.StringJoiner; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + *

+ * String operation reusable functions + *

+ */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class StringUtil { + + /** + *

+ * Common method to append string using the ${@code StringJoiner} and return as combine string. + *

+ * + * @param args - Get list of string. + * @return Combine the string using String Joiner and return as string. + */ + public static String constructStringEmptySeparator(String... args) { + var buildString = new StringJoiner(Constants.EMPTY); + for (String arg : args) { + buildString.add(Objects.nonNull(arg) ? arg : Constants.EMPTY); + } + return buildString.toString(); + } + + /** + * Returns a String with capitalizing every word starting letter. + * + * @param value - Target String in which every word needs to be capitalized. + * @return String - Final String with capitalizing every word starting letter. + */ + public static String capitalizeWord(String value) { + if (StringUtil.isEmpty(value).equals(Boolean.TRUE)) { + return Constants.EMPTY; + } + var words = value.split("\\s"); + var capitalizeWord = new StringBuilder(); + for (String word : words) { + var firstLetter = word.substring(0, 1); + var wordAfterFirstLetter = word.substring(1); + capitalizeWord.append(firstLetter.toUpperCase()).append(wordAfterFirstLetter).append(" "); + } + return capitalizeWord.toString().trim(); + } + + public static Boolean isBlank(final CharSequence cs) { + final int strLen = length(cs); + if (strLen == 0) { + return true; + } + for (var i = 0; i < strLen; i++) { + if (!Character.isWhitespace(cs.charAt(i))) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + private static int length(final CharSequence cs) { + return cs == null ? 0 : cs.length(); + } + + public static Boolean isEmpty(final CharSequence cs) { + return cs == null || cs.length() == 0; + } + + public static void addToJsonNode(ObjectNode node, String key, String value) { + if (value == null) { + node.putNull(key); + } else { + node.put(key, value); + } + } + + public static void addArrayToJsonNode(ObjectNode node, String key, String[] value) { + if (value == null) { + node.putNull(key); + } else { + var arrayNode = node.putArray(key); + for (String val : value) arrayNode.add(val); + } + } +} \ No newline at end of file diff --git a/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/YamlPropertySourceFactory.java b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/YamlPropertySourceFactory.java new file mode 100644 index 0000000..bb574a8 --- /dev/null +++ b/shared-lib/src/main/java/com/opsbeach/sharedlib/utils/YamlPropertySourceFactory.java @@ -0,0 +1,55 @@ +package com.opsbeach.sharedlib.utils; + +import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; +import org.springframework.core.env.PropertiesPropertySource; +import org.springframework.core.env.PropertySource; +import org.springframework.core.io.support.EncodedResource; +import org.springframework.core.io.support.PropertySourceFactory; +import org.springframework.lang.NonNull; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Objects; +import java.util.Properties; + +/** + *

+ * YamlPropertySourceFactory class for loading properties. + *

+ */ +public class YamlPropertySourceFactory implements PropertySourceFactory { + + /** + * Create a {@link PropertySource} that wraps the given resource. + * + * @param name the name of the property source + * @param resource the resource (potentially encoded) to wrap + * @return the new {@link PropertySource} (never {@code null}) + * @throws IOException if resource resolution failed + */ + @Override + public PropertySource createPropertySource(String name, @NonNull EncodedResource resource) throws IOException { + var properties = load(resource); + return new PropertiesPropertySource(name != null ? name : Objects.requireNonNull(resource.getResource().getFilename(), "File not found"), Objects.requireNonNullElse(properties, null)); + } + + /** + * Load properties from the YAML file. + * + * @param resource Instance of {@link EncodedResource} + * @return instance of properties + */ + private Properties load(EncodedResource resource) throws FileNotFoundException { + try { + var factory = new YamlPropertiesFactoryBean(); + factory.setResources(resource.getResource()); + factory.afterPropertiesSet(); + return factory.getObject(); + } catch (IllegalStateException ex) { + Throwable cause = ex.getCause(); + if (cause instanceof FileNotFoundException) + throw (FileNotFoundException) cause; + throw ex; + } + } +} \ No newline at end of file diff --git a/shared-lib/src/main/resources/application-messages.yaml b/shared-lib/src/main/resources/application-messages.yaml new file mode 100644 index 0000000..999cdfb --- /dev/null +++ b/shared-lib/src/main/resources/application-messages.yaml @@ -0,0 +1,88 @@ +response-messages: + success: + 1001: Created successfully {0} ..! + 1002: Updated successfully {0} ..! + 1003: Fetched successfully ..! + 1004: Deleted successfully {0} ..! + 1005: Fetched all data successfully..! + 1006: OTP sent successfully..! + error: + 5000: Unexcepted internal server error..! + 5001: Request validation failed - {0} + 5002: Invalid file {0} provided + 5003: Bad Request {0} + 5004: Access Token Invalid, Access Token is {0} + 5005: Unable to verify access token + 5006: Invalid Access Token. + 5007: Invalid User. + 5008: Invalid Password. + 5009: Error while generating token. + 5010: Invalid Refresh Token. + 5011: Invalid OTP. + 5012: Invalid verification otp token. + 5013: Unauthorized user. + 5014: Rest app 2 app call. + 5015: Only Owner of the account can update a member but not oneself. + 5016: Record Not Found - {0}. + 5017: Invalid data while parse JSON Response. + 5018: Error on API response - {0} + 5019: Insufficient Privileges. + 5020: Saving Preference is not updated. + 5021: Input Stream exception. + 5022: Malformed URL exception. + 5023: Invalid Tenant. + 5024: X-TenantId not present in the Request Header. + 5025: Authentication PreCondition - {0} + 5026: User already exists. Please login. + 5027: User %s does not exist + 5028: Error in creating CSV file. + 5029: You have been automatically logged out from the system. + 5030: Unable to close {0} connection. + 5031: Budget amount for {0} category is not in-sync with the {1}% share out of total budget amount. + 5032: Actual transaction amount {0} does not match splitted transaction amount {1} + 5033: Category wise Budget should be set before Merchant level budget for the user. + 5034: Invalid Login Mode + 5035: Completable future Async error - {0} + 5036: Client already exists. + 5037: URL encoded error + 5038: Record not found {1} Id - {0} + 5039: Invalid {0} Id + 5040: Already exists - {0} + 5041: Project Id, Issue Type, description, reporter Id and Summary should not be empty or null + 5042: Invalid Issue Type + 5043: Issue type is a Sub-task but parent issue key not specified. + 5044: Issue type is Epic but Epic name is not specified. + 5045: Invalid Task Type - {0} + 5046: Invalid Service Type - {0} + 5047: Domin Not Found - {0} - messaage - {1} + 5048: Invalid Action Type - {0} + 5049: Channel not found - {0} + 5050: Not in channel - {0} + 5051: Something went wrong - {0} + 5052: File Not Found - {0} + 5053: Invalid Repo Type - {0} + 5054: Table Already Modified - {0} + 5055: Table '{0}' Already Modified In PR - {1} + 5056: Table Validation Error - {0} + 5057: Table name changed not allowed (from -{0} to - {1}) + 5058: Table namespace changed not allowed (from -{0} to - {1}) + 5059: Json Schema Id changed not allowed (from -{0} to - {1}) + 5060: Field deleted not allowed + 5061: Field name {0} changed not allowed + 5062: Field DataType updation not allowed for field - {0} + 5063: Field {0} of table - {1} has error + 5064: Field datatype updation for union field - {0} + 5065: Element type of array field - {0} + 5066: Value type of Map field - {0} + 5067: Changes of Schema {0} is not present in file - {1} + 5068: Changes of Schema {0} of file1 - {1} and file2 - {2} are not same + 5069: Field Validation Error - {0} + 5070: -{0} in Table {1} + 5071: -{0} in {1} of Table {2} + 5072: Unrecognized Column In CSV - {0} - in File - {1} + 5073: Repo Organization Not Found + 5074: OTP expired. login again. + 5075: Error Occured while creating Task on Gcloud - {0} + 5076: Table metadata owner changed not allowed from {0} to {1} in table - {2} + 5077: Table metadata domain changed not allowed from {0} to {1} in table - {2} + 5078: Invalid Pr Status or Empty - {0} \ No newline at end of file diff --git a/shared-lib/src/main/resources/application.yaml b/shared-lib/src/main/resources/application.yaml new file mode 100644 index 0000000..5632b67 --- /dev/null +++ b/shared-lib/src/main/resources/application.yaml @@ -0,0 +1,22 @@ +spring: + application: + name: shared-lib + jackson: + parser: + allow-numeric-leading-zeros: true + connect: + + user: + base-url: https://api.aldefi.io/user/v1/auth/ + client-register: client/register + user-register: user/register + user: user + client: client + user-login: user/login + jwt-add: jwt + audit: audit + jwt-access: access + Private String: + host: localhost + port: 6379 + threading-future-timeout: 10000 \ No newline at end of file diff --git a/shared-lib/src/main/resources/private_pkcs8.pem b/shared-lib/src/main/resources/private_pkcs8.pem new file mode 100644 index 0000000..43e5a16 --- /dev/null +++ b/shared-lib/src/main/resources/private_pkcs8.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDEvBre40pjSlQD +ptcvth0cjHyIY41/go1IxSSlVKy8sXDSKVwiILliMv/eW0SxNAp9B5x+YqncAGgy +QymKBPSIQoX56yw14c/8vGrwrwimJ4SgXAwMzzJgj3wRc7PTfS1EmpLVS6LCyMNr +XEJQQ4LYqGxuDeUhrtfoeRrON7PIH5EIbyW2Y38Ew/KErlHJ2sPSR742NzbudTtt +7Cfpnyvv+meH741x8Iw2IhIXGBbXedtkcJaC5eUe6SZ+eBv1yzEZElij+ZSJyy0h +zoX4PFRS0s265lGLit4dq9GOr5aVH15ejCbPOsglr7dZ0739m63WR4LIuP2bbFHt +0wzfNHabAgMBAAECggEAKZdW2u4USN9q2YbJEVYf22w7252oSV+xPMQo1icLiay9 +nECQMMrYRlFf1INZB1loKjyUsgdrv436eT3Hp10a0T+3ETnNk+qUTOsMdKucWXwb +RseO6JpZHR7YbNwYFUruagV/q0YdN6632ZeckVlkbSdWHQIlAbT0AjqwFw6hnFnp +JAGpSmizR4A965jD6WHhqFxtOnZ4ABsD+d1tpSzvcsetmquVZRdbYuoV+0lOfyk3 +E5hjy6ELXijbl4zjuIhhw9E7wbJYvVqnukcNfbwdvM2JquStKVZClAp4Byw1UpQW +2FAYcvCM8y0iRKcS+ZZVcAnyPVMuRRPfq92hP+eRIQKBgQDmh+yOurDY+3v3kTwN +JCGsXJoWx8Kcy5MH1cI5gkj+1LfROuofvYVgL0rKhUQQtNKl2lVZsItY8I2ILngF +5eUdaDVCJ7iU3K+BhpKMeki6W8iIpP4U+/zvhKsUzPvMX9bhlkKKPzv5O7Xzc7gu +K3t0UVSCkb3Q4Yw0DwOkybRisQKBgQDaeFQT1OCuRfgA8MXi07KkdC4dQwo4kQ/p +jxWF7E4zd8XEboRFw2h75eytjmeWAf2yK8D4RXgJfSiSIOltorv2TwJO/VU5G8dN +bg/WTA1eRFyTmu+SKjMn2kNYj+I7a9HkyE6fjhxWMJM1jis8vFE0/E3Y9uUVZzmM +52o5U7wJCwKBgB47x090torOeJSgTILgsTXcsyy3qBNOM4s8gotuRcp99VmQY3Ik +NeDGcwWacrHKNTIhLz5jA/N3IOiyR3VmkCTKLOnvMar8Qlw3XPLlerJOJT+ee+Ae +qC51dMZ2l7NXAmR1g2aEIoZuSU+TZp1PgJJhtDanjIBfhDaznWO4PFsRAoGBAMO3 +HmMxxyJ4ridxi2WcDyLCrfkq8MIiOmYa98YlHx1+hB5of3jcCQNNxIzq0SOSnwr5 +pYMFAPdHN4xJjbTZH5gVydSnoKxrlU1pdyicszEq/uCA4XRI99EQ+FojdCGVgtB5 +He5UalBQbeCBf3yGnB1ilf7zl+BnvCfdvMyklgRnAoGBANqKWatq+7JRpXKVViSy +kCspBpNY1D9bRiu0hV65O51s5753YMzmmWp/yXT9zR6ZdQlVhpK7LsRliDs3A/to +gwXXgMED0jyfvok4MCC0mjYaxkrQ/XzEE723+GJ5+AT1mytq3tdjhmkriIp4H+gQ +cjASxE5PGhMXKj8EsOL8gUTs +-----END PRIVATE KEY----- diff --git a/shared-lib/src/main/resources/public.pem b/shared-lib/src/main/resources/public.pem new file mode 100644 index 0000000..6a36c0d --- /dev/null +++ b/shared-lib/src/main/resources/public.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxLwa3uNKY0pUA6bXL7Yd +HIx8iGONf4KNSMUkpVSsvLFw0ilcIiC5YjL/3ltEsTQKfQecfmKp3ABoMkMpigT0 +iEKF+essNeHP/Lxq8K8IpieEoFwMDM8yYI98EXOz030tRJqS1UuiwsjDa1xCUEOC +2Khsbg3lIa7X6HkazjezyB+RCG8ltmN/BMPyhK5RydrD0ke+Njc27nU7bewn6Z8r +7/pnh++NcfCMNiISFxgW13nbZHCWguXlHukmfngb9csxGRJYo/mUicstIc6F+DxU +UtLNuuZRi4reHavRjq+WlR9eXowmzzrIJa+3WdO9/Zut1keCyLj9m2xR7dMM3zR2 +mwIDAQAB +-----END PUBLIC KEY----- diff --git a/src/main/java/org/schemata/SchemataExecutor.java b/src/main/java/org/schemata/SchemataExecutor.java deleted file mode 100644 index 8bc7503..0000000 --- a/src/main/java/org/schemata/SchemataExecutor.java +++ /dev/null @@ -1,92 +0,0 @@ -package org.schemata; - -import org.schemata.app.DocumentApp; -import org.schemata.app.SchemaScoreApp; -import org.schemata.app.SchemaValidatorApp; -import org.schemata.compatibility.SchemaCompatibilityChecker; -import org.schemata.compatibility.Summary; -import org.schemata.provider.SchemaParser; -import org.schemata.provider.avro.AvroSchemaCompatibilityChecker; -import org.schemata.provider.avro.AvroSchemaParser; -import org.schemata.provider.protobuf.ProtoSchemaCompatibilityChecker; -import org.schemata.provider.protobuf.ProtoSchemaParser; -import picocli.CommandLine.Option; -import picocli.CommandLine.ScopeType; - -import java.util.Set; - -import static picocli.CommandLine.Command; -import static picocli.CommandLine.Parameters; - - -@Command(name = "protocol", mixinStandardHelpOptions = true, description = "Schemata commandline tool") -public class SchemataExecutor { - - enum Provider { - PROTOBUF, AVRO - } - - @Option(names = {"-s", "--source"}, description = "Path to schema file", scope = ScopeType.INHERIT) - private String path; - - @Option(names = {"-p", "--provider"}, description = "Valid provider values: ${COMPLETION-CANDIDATES}", scope = - ScopeType.INHERIT) - private Provider provider; - - @Option(names = {"-b", "--base"}, description = "Base Path to schema file", scope = ScopeType.INHERIT) - private String basePath; - - @Command(description = "Validate schema") - public int validate() - throws Exception { - var parser = getSchemaParser(); - return new SchemaValidatorApp(parser.getSchemaList(path)).call(); - } - - @Command(description = "Calculate protocol score") - public int score(@Parameters(paramLabel = "", description = "fully qualified message name") String schema) - throws Exception { - var parser = getSchemaParser(); - return new SchemaScoreApp(parser.getSchemaList(path), schema).call(); - } - - @Command(description = "Document a schema as JSON") - public int document() - throws Exception { - var parser = getSchemaParser(); - return new DocumentApp(parser.getSchemaList(path)).call(); - } - - @Command(description = "Check if schema is backward compatible") - public int isBackwardCompatible() { - var checker = getSchemaCompatibilityChecker(); - return checker.check(basePath, path).isCompatible() ? 0 : 1; - } - - @Command(description = "Print the backward compatibility summary with incompatible fields") - public int compatibilitySummary() { - var checker = getSchemaCompatibilityChecker().check(basePath, path); - if (checker.isCompatible()) { - System.out.println("Schema is backward compatible"); - return 0; - } else { - System.out.println("Incompatible fields:"); - checker.summary().forEach(System.out::println); - return 1; - } - } - - public SchemaParser getSchemaParser() { - return switch (provider) { - case PROTOBUF -> new ProtoSchemaParser(); - case AVRO -> new AvroSchemaParser(); - }; - } - - public SchemaCompatibilityChecker getSchemaCompatibilityChecker() { - return switch (provider) { - case PROTOBUF -> new ProtoSchemaCompatibilityChecker(); - case AVRO -> new AvroSchemaCompatibilityChecker(); - }; - } -} diff --git a/src/main/java/org/schemata/SchemataMain.java b/src/main/java/org/schemata/SchemataMain.java deleted file mode 100644 index 8386672..0000000 --- a/src/main/java/org/schemata/SchemataMain.java +++ /dev/null @@ -1,16 +0,0 @@ -package org.schemata; - -import picocli.CommandLine; - - -public class SchemataMain { - - public static void main(String... args) { - var cmd = new CommandLine(new SchemataExecutor()) - .setOptionsCaseInsensitive(true) - .setCaseInsensitiveEnumValuesAllowed(true); - - int exitCode = cmd.execute(args); - System.exit(exitCode); - } -} diff --git a/src/main/java/org/schemata/app/DocumentApp.java b/src/main/java/org/schemata/app/DocumentApp.java deleted file mode 100644 index 8709b24..0000000 --- a/src/main/java/org/schemata/app/DocumentApp.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.schemata.app; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import org.schemata.domain.Schema; - -import java.util.List; -import java.util.concurrent.Callable; - -public class DocumentApp implements Callable { - - private final List schemaList; - - public DocumentApp(List schemaList) { - this.schemaList = schemaList; - } - - @Override - public Integer call() throws Exception { - var mapper = new ObjectMapper(); - mapper.enable(SerializationFeature.INDENT_OUTPUT); // pretty print - var out = mapper.writeValueAsString(schemaList); - System.out.println(out); - - return 0; - } -} diff --git a/src/main/java/org/schemata/app/SchemaScoreApp.java b/src/main/java/org/schemata/app/SchemaScoreApp.java deleted file mode 100644 index a0df11e..0000000 --- a/src/main/java/org/schemata/app/SchemaScoreApp.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.schemata.app; - -import java.util.List; -import java.util.concurrent.Callable; -import org.apache.commons.lang3.StringUtils; -import org.schemata.domain.Schema; -import org.schemata.exception.SchemaNotFoundException; -import org.schemata.graph.SchemaGraph; -import org.schemata.printer.Console; - - -public class SchemaScoreApp implements Callable { - - private List schemaList; - private String schemaName; - - public SchemaScoreApp(List schemaList, String schemaName) { - this.schemaList = schemaList; - this.schemaName = schemaName; - } - - @Override - public Integer call() throws Exception { - - if (StringUtils.isBlank(schemaName)) { - Console.printError("Invalid schema name:" + schemaName); - return -1; - } - - var graph = new SchemaGraph(this.schemaList); - try { - double value = graph.getSchemataScore(schemaName); - Console.printSuccess("Schemata score for " + schemaName + " : " + value); - } catch (SchemaNotFoundException e) { - Console.printError(e.getMessage()); - return -1; - } - return 0; - } -} diff --git a/src/main/java/org/schemata/app/SchemaValidatorApp.java b/src/main/java/org/schemata/app/SchemaValidatorApp.java deleted file mode 100644 index c23e610..0000000 --- a/src/main/java/org/schemata/app/SchemaValidatorApp.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.schemata.app; - -import java.util.List; -import java.util.concurrent.Callable; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; -import org.schemata.printer.Console; -import org.schemata.validate.FieldValidator; -import org.schemata.validate.SchemaValidator; -import org.schemata.validate.Status; - - -public class SchemaValidatorApp implements Callable { - - private List schemaList; - - public SchemaValidatorApp(List schemaList) { - this.schemaList = schemaList; - } - - @Override - public Integer call() - throws Exception { - var schemaValidator = new SchemaValidator(); - var fieldValidator = new FieldValidator(); - for (Schema schema : schemaList) { - var schemaResult = schemaValidator.apply(schema); - if (schemaResult.status() == Status.ERROR) { - Console.printError("Error parsing Schema " + schema.name() + "Error Message:" + schemaResult.errorMessages()); - return -1; - } - - for (Field field : schema.fieldList()) { - var fieldResult = fieldValidator.apply(field); - if (fieldResult.status() == Status.ERROR) { - Console.printError( - "Error parsing Schema Fields in schema:" + schema.name() + " on field:" + field.name() + " Error Message:" - + fieldResult.errorMessages()); - return -1; - } - } - } - Console.printSuccess("Schema validation success. No error to report"); - return 0; - } -} diff --git a/src/main/java/org/schemata/compatibility/Result.java b/src/main/java/org/schemata/compatibility/Result.java deleted file mode 100644 index 0d51cb5..0000000 --- a/src/main/java/org/schemata/compatibility/Result.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.schemata.compatibility; - - -import java.util.Set; - -public record Result(Boolean isCompatible, Set summary) { -} diff --git a/src/main/java/org/schemata/compatibility/SchemaCompatibilityChecker.java b/src/main/java/org/schemata/compatibility/SchemaCompatibilityChecker.java deleted file mode 100644 index bc16dda..0000000 --- a/src/main/java/org/schemata/compatibility/SchemaCompatibilityChecker.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.schemata.compatibility; - - -public interface SchemaCompatibilityChecker { - Result check(String baseSchemaPath, String changeSchemaPath); -} - diff --git a/src/main/java/org/schemata/compatibility/Summary.java b/src/main/java/org/schemata/compatibility/Summary.java deleted file mode 100644 index b6d9607..0000000 --- a/src/main/java/org/schemata/compatibility/Summary.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.schemata.compatibility; - -public record Summary(String filename, String schemaName, String fieldName, String fieldType) { - - private Summary(Builder builder) { - this(builder.filename, builder.schemaName, builder.fieldName, builder.fieldType); - } - - public static class Builder { - protected String filename; - protected String schemaName; - protected String fieldName; - protected String fieldType; - - public Builder filename(String filename) { - this.filename = filename; - return this; - } - - public Builder schemaName(String schemaName) { - this.schemaName = schemaName; - return this; - } - - public Builder fieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public Builder fieldType(String fieldType) { - this.fieldType = fieldType; - return this; - } - - public Summary build() { - return new Summary(filename, schemaName, fieldName, fieldType); - } - } -} - diff --git a/src/main/java/org/schemata/domain/Constraints.java b/src/main/java/org/schemata/domain/Constraints.java deleted file mode 100644 index de20d27..0000000 --- a/src/main/java/org/schemata/domain/Constraints.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.schemata.domain; - -import java.util.List; -import java.util.Map; - -public record Constraints(String name, String description, Map constraintMap) { - - public record Constraint(String key, Object value, DataType dataType, List listValue) { - - public static Constraint primitiveConstraints(String key, Object value, DataType dataType) { - return new Constraint(key, value, dataType, null); - } - - public static Constraint listConstraints(String key, List listValue, DataType dataType) { - return new Constraint(key, null, dataType, listValue); - } - - } - - public enum DataType { - NULL, NUMBER, STRING, BOOLEAN, LIST; // STRUCT Type is not supported - - public static DataType fromString(String dataType) { - return switch (dataType.toUpperCase()) { - case "NULL_VALUE", "NULL" -> NULL; - case "NUMBER_VALUE", "NUMBER" -> NUMBER; - case "STRING_VALUE", "STRING" -> STRING; - case "BOOL_VALUE", "BOOLEAN" -> BOOLEAN; - case "LIST_VALUE", "LIST" -> LIST; - default -> throw new IllegalArgumentException("Invalid data type: " + dataType); - }; - } - } -} diff --git a/src/main/java/org/schemata/domain/Depends.java b/src/main/java/org/schemata/domain/Depends.java deleted file mode 100644 index 58c758c..0000000 --- a/src/main/java/org/schemata/domain/Depends.java +++ /dev/null @@ -1,4 +0,0 @@ -package org.schemata.domain; - -public record Depends(String model, String column) { -} diff --git a/src/main/java/org/schemata/domain/Field.java b/src/main/java/org/schemata/domain/Field.java deleted file mode 100644 index 9cd456d..0000000 --- a/src/main/java/org/schemata/domain/Field.java +++ /dev/null @@ -1,110 +0,0 @@ -package org.schemata.domain; - -import java.util.List; - - -public record Field(String schema, String name, String dataType, boolean isPrimitiveType, String description, - String comment, String seeAlso, String reference, boolean isClassified, String classificationLevel, - boolean isPrimaryKey, String productType, Link link, List depends) { - - private Field(Builder builder) { - this(builder.schema, builder.name, builder.dataType, builder.isPrimitiveType, builder.description, builder.comment, - builder.seeAlso, builder.reference, builder.isClassified, builder.classificationLevel, builder.isPrimaryKey, - builder.productType, builder.link, builder.depends); - } - - public static class Builder { - - String schema; - String name; - String dataType; - boolean isPrimitiveType; - String description; - String comment; - String seeAlso; - String reference; - boolean isClassified; - String classificationLevel; - boolean isPrimaryKey; - String productType; - Link link; - List depends; - - public Builder(String schema, String name, String dataType, boolean isPrimitiveType) { - this.schema = schema; - this.name = name; - this.dataType = dataType; - this.isPrimitiveType = isPrimitiveType; - } - - public Builder description(String description) { - this.description = description; - return this; - } - - public Builder comment(String comment) { - this.comment = comment; - return this; - } - - public Builder seeAlso(String seeAlso) { - this.seeAlso = seeAlso; - return this; - } - - public Builder reference(String reference) { - this.reference = reference; - return this; - } - - public Builder isClassified(boolean classified) { - this.isClassified = classified; - return this; - } - - public Builder classificationLevel(String classifiedLevel) { - this.classificationLevel = classifiedLevel; - return this; - } - - public Builder primaryKey(boolean primaryKey) { - this.isPrimaryKey = primaryKey; - return this; - } - - public Builder productType(String productType) { - this.productType = productType; - return this; - } - - public Builder link(Link link) { - this.link = link; - return this; - } - - public Builder depends(List depends) { - this.depends = depends; - return this; - } - - public Field build() { - return new Field(this); - } - } - - public static class Prop { - public static final String DESC = "desc"; - public static final String DESCRIPTION = "description"; - public static final String COMMENT = "comment"; - public static final String SEE_ALSO = "see_also"; - public static final String REFERENCE = "reference"; - public static final String IS_CLASSIFIED = "is_classified"; - public static final String IS_PRIMARY_KEY = "is_primary_key"; - public static final String PRODUCT_TYPE = "product_type"; - public static final String LINK = "link"; - public static final String DEPENDS = "depends"; - public static final String MODEL = "model"; - - public static final String COLUMN = "column"; - } -} diff --git a/src/main/java/org/schemata/domain/Link.java b/src/main/java/org/schemata/domain/Link.java deleted file mode 100644 index ee4b93b..0000000 --- a/src/main/java/org/schemata/domain/Link.java +++ /dev/null @@ -1,4 +0,0 @@ -package org.schemata.domain; - -public record Link(String model, String column) { -} diff --git a/src/main/java/org/schemata/domain/Schema.java b/src/main/java/org/schemata/domain/Schema.java deleted file mode 100644 index 9037c08..0000000 --- a/src/main/java/org/schemata/domain/Schema.java +++ /dev/null @@ -1,157 +0,0 @@ -package org.schemata.domain; - -import java.util.List; - - -public record Schema(String name, String description, String comment, String seeAlso, String reference, String owner, - String domain, String status, String type, String eventType, String modelType, String teamChannel, - String alertChannel, String complianceOwner, String complianceChannel, - List downstreamSubscribersList, List upstreamSubscribersList, - List fieldList, List constraintsList) { - - private Schema(Builder builder) { - this(builder.name, builder.description, builder.comment, builder.seeAlso, builder.reference, builder.owner, - builder.domain, builder.status, builder.schemaType.name(), builder.eventType.name(), builder.modelType.name(), - builder.teamChannel, builder.alertChannel, builder.complianceOwner, builder.complianceChannel, - builder.downstreamSubscribersList, builder.upstreamSubscribersList, - builder.fieldList, builder.constraintsList); - } - - public static class Builder { - String name; - String description; - String comment; - String seeAlso; - String reference; - String owner; - String domain; - String status; - SchemaType schemaType; - EventType eventType; - ModelType modelType; - String teamChannel; - String alertChannel; - String complianceOwner; - String complianceChannel; - List fieldList; - List downstreamSubscribersList; - List upstreamSubscribersList; - List constraintsList; - - public Builder(String name, List fieldList) { - this.name = name; - this.fieldList = fieldList; - this.eventType = EventType.NONE; - this.modelType = ModelType.NONE; - } - - public Builder description(String description) { - this.description = description; - return this; - } - - public Builder comment(String comment) { - this.comment = comment; - return this; - } - - public Builder seeAlso(String seeAlso) { - this.seeAlso = seeAlso; - return this; - } - - public Builder reference(String reference) { - this.reference = reference; - return this; - } - - public Builder owner(String owner) { - this.owner = owner; - return this; - } - - public Builder domain(String domain) { - this.domain = domain; - return this; - } - - public Builder status(String status) { - this.status = status; - return this; - } - - public Builder schemaType(String schemaTypeValue) { - this.schemaType = SchemaType.get(schemaTypeValue); - return this; - } - - public Builder eventType(String eventTypeValue) { - this.eventType = EventType.get(eventTypeValue); - return this; - } - - public Builder modelType(String modelTypeValue) { - this.modelType = ModelType.get(modelTypeValue); - return this; - } - - public Builder teamChannel(String teamChannel) { - this.teamChannel = teamChannel; - return this; - } - - public Builder alertChannel(String alertChannel) { - this.alertChannel = alertChannel; - return this; - } - - public Builder complianceOwner(String complianceOwner) { - this.complianceOwner = complianceOwner; - return this; - } - - public Builder complianceChannel(String complianceChannel) { - this.complianceChannel = complianceChannel; - return this; - } - - public Builder downstreamSubscribersList(List subscribersList) { - this.downstreamSubscribersList = subscribersList; - return this; - } - - public Builder upstreamSubscribersList(List subscribersList) { - this.upstreamSubscribersList = subscribersList; - return this; - } - - public Builder constraintsList(List constraints) { - this.constraintsList = constraints; - return this; - } - - public Schema build() { - return new Schema(this); - } - } - - public static final class Prop { - - public static final String DESC = "desc"; - - public static final String DESCRIPTION = "description"; - public static final String COMMENT = "comment"; - public static final String SEE_ALSO = "see_also"; - public static final String REFERENCE = "reference"; - public static final String OWNER = "owner"; - public static final String DOMAIN = "domain"; - public static final String STATUS = "status"; - public static final String SCHEMA_TYPE = "schema_type"; - public static final String EVENT_TYPE = "event_type"; - public static final String MODEL_TYPE = "model_type"; - public static final String TEAM_CHANNEL = "team_channel"; - public static final String ALERT_CHANNEL = "alert_channel"; - public static final String COMPLIANCE_OWNER = "compliance_owner"; - public static final String COMPLIANCE_CHANNEL = "compliance_channel"; - } -} diff --git a/src/main/java/org/schemata/domain/Subscribers.java b/src/main/java/org/schemata/domain/Subscribers.java deleted file mode 100644 index 22a4fad..0000000 --- a/src/main/java/org/schemata/domain/Subscribers.java +++ /dev/null @@ -1,4 +0,0 @@ -package org.schemata.domain; - -public record Subscribers(String name, String usage) { -} diff --git a/src/main/java/org/schemata/exception/SchemaNotFoundException.java b/src/main/java/org/schemata/exception/SchemaNotFoundException.java deleted file mode 100644 index 20da351..0000000 --- a/src/main/java/org/schemata/exception/SchemaNotFoundException.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.schemata.exception; - -public class SchemaNotFoundException extends RuntimeException { - public SchemaNotFoundException() { - } - - public SchemaNotFoundException(String message) { - super(message); - } - - public SchemaNotFoundException(String message, Throwable cause) { - super(message, cause); - } - - public SchemaNotFoundException(Throwable cause) { - super(cause); - } - - public SchemaNotFoundException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } -} diff --git a/src/main/java/org/schemata/exception/SchemaParserException.java b/src/main/java/org/schemata/exception/SchemaParserException.java deleted file mode 100644 index 75a62f7..0000000 --- a/src/main/java/org/schemata/exception/SchemaParserException.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.schemata.exception; - -public class SchemaParserException extends RuntimeException { - public SchemaParserException(String message) { - super(message); - } - - public SchemaParserException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/src/main/java/org/schemata/graph/SchemaGraph.java b/src/main/java/org/schemata/graph/SchemaGraph.java deleted file mode 100644 index 0d4fb27..0000000 --- a/src/main/java/org/schemata/graph/SchemaGraph.java +++ /dev/null @@ -1,176 +0,0 @@ -package org.schemata.graph; - -import java.math.BigDecimal; -import java.math.MathContext; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.commons.collections4.SetUtils; -import org.apache.commons.lang3.StringUtils; -import org.jgrapht.alg.scoring.PageRank; -import org.jgrapht.graph.DirectedWeightedMultigraph; -import org.jgrapht.util.SupplierUtil; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; -import org.schemata.exception.SchemaNotFoundException; - - -public final class SchemaGraph { - - private final DirectedWeightedMultigraph graph = - new DirectedWeightedMultigraph<>(SupplierUtil.createSupplier(Schema.class), - SupplierUtil.createSupplier(WeightedSchemaEdge.class)); - - private final List schemaList; - Map schemaMap; - private final PageRank pageRank; - - public SchemaGraph(List schemaList) { - this.schemaList = schemaList; - this.schemaMap = buildGraph(); - this.buildEdge(); - pageRank = new PageRank<>(graph); - } - - private Map buildGraph() { - Map schemaMap = new HashMap<>(); - for (Schema schema : schemaList) { - schemaMap.put(schema.name(), schema); - this.addVertex(schema); - } - return schemaMap; - } - - private void buildEdge() - throws SchemaNotFoundException { - for (Schema schema : this.schemaList) { - for (Field field : schema.fieldList()) { - if (!field.isPrimitiveType()) { - findVertex(field.dataType()).ifPresentOrElse( - value -> this.addEdge(new WeightedSchemaEdge(schema, value, field)), () -> { - throw new SchemaNotFoundException("DataType " + field.dataType() + " Not found in the graph"); - }); - } - } - } - } - - private void addVertex(Schema schema) { - graph.addVertex(schema); - } - - private void addEdge(WeightedSchemaEdge edge) { - if (edge == null) { - throw new IllegalArgumentException("Edge can't be null"); - } - graph.addEdge(edge.getSource(), edge.getTarget(), edge); - } - - public Set incomingEdgesOf(String vertex) - throws SchemaNotFoundException { - return graph.incomingEdgesOf(getSchema(vertex)); - } - - public Set incomingVertexOf(String vertex) { - Set incomingSchemaSet = new HashSet<>(); - incomingEdgesOf(vertex).forEach(e -> incomingSchemaSet.add(e.getSource())); - return incomingSchemaSet; - } - - public Set outgoingEdgesOf(String vertex) - throws SchemaNotFoundException { - return graph.outgoingEdgesOf(getSchema(vertex)); - } - - public Set outgoingVertexOf(String vertex) { - Set outgoingSchemaSet = new HashSet<>(); - outgoingEdgesOf(vertex).forEach(e -> outgoingSchemaSet.add(e.getTarget())); - return outgoingSchemaSet; - } - - public Set outgoingEntityVertexOf(String vertex) { - return outgoingVertexOf(vertex).stream().filter(f -> "ENTITY".equalsIgnoreCase(f.type())) - .collect(Collectors.toSet()); - } - - public Set getAllEntityVertex() { - return graph.vertexSet().stream().filter(f -> "ENTITY".equalsIgnoreCase(f.type())).collect(Collectors.toSet()); - } - - public Double getVertexPageRankScore(String vertex) { - return pageRank.getVertexScore(getSchema(vertex)); - } - - public Double getSchemataScore(String vertex) { - var schema = getSchema(vertex); - double score = switch (schema.type().toUpperCase()) { - case "ENTITY" -> computeEntityScore(vertex); - case "EVENT" -> computeEventScore(vertex, schema.eventType()); - default -> 0.0; - }; - return roundUp(score); - } - - private double computeEntityScore(String vertex) { - double totalEdges = graph.edgeSet().size(); - if (totalEdges == 0) { - return 0.0; - } - - double referenceEdges = referenceEdges(vertex).size(); - return 1 - ((totalEdges - referenceEdges) / totalEdges); - } - - public Set referenceEdges(String vertex) { - return SetUtils.union(incomingEdgesOf(vertex), outgoingEdgesOf(vertex)); - } - - private double computeEventScore(String vertex, String eventType) { - double score = switch (eventType) { - case "LIFECYCLE" -> outgoingEntityVertexOf(vertex).size() > 0 ? 1.0 : 0.0; - case "ACTIVITY", "AGGREGATED" -> computeNonLifecycleScore(vertex); - default -> 0.0; - }; - return score; - } - - private double computeNonLifecycleScore(String vertex) { - double totalVertex = getAllEntityVertex().size(); - if (totalVertex == 0) { - return 0.0; - } - Set referenceVertex = - outgoingEntityVertexOf(vertex).stream().map(v -> outgoingEntityVertexOf(v.name())).flatMap(Collection::stream) - .collect(Collectors.toSet()); - Set outgoingVertex = outgoingEntityVertexOf(vertex); - double vertexCount = SetUtils.union(referenceVertex, outgoingVertex).size(); - - return 1 - ((totalVertex - vertexCount) / totalVertex); - } - - public Schema getSchema(String vertex) - throws SchemaNotFoundException { - return findVertex(vertex).orElseThrow( - () -> new SchemaNotFoundException("Vertex " + vertex + " Not found in the graph")); - } - - public Optional findVertex(String vertex) { - if (StringUtils.isBlank(vertex)) { - return Optional.empty(); - } - if (this.schemaMap.containsKey(vertex)) { - return Optional.of(this.schemaMap.get(vertex)); - } - return Optional.empty(); - } - - private double roundUp(double value) { - return new BigDecimal(value, new MathContext(3)).doubleValue(); - } -} diff --git a/src/main/java/org/schemata/json/Json.java b/src/main/java/org/schemata/json/Json.java deleted file mode 100644 index cdd4d0c..0000000 --- a/src/main/java/org/schemata/json/Json.java +++ /dev/null @@ -1,43 +0,0 @@ -package org.schemata.json; - -import com.google.gson.JsonArray; -import com.google.gson.JsonObject; -import java.util.Locale; -import java.util.Optional; -import org.apache.commons.lang3.BooleanUtils; -import org.apache.commons.lang3.math.NumberUtils; - - -public final class Json { - - public static Boolean containsField(JsonObject obj, String element) { - return obj.has(element) && !obj.get(element).isJsonNull(); - } - - public static String getAsString(JsonObject obj, String element) { - return getAsStringCaseSensitive(obj, element).toLowerCase(); - } - - public static String getAsStringCaseSensitive(JsonObject obj, String element) { - return containsField(obj, element) ? obj.get(element).getAsString() : ""; - } - - public static Long getAsLong(JsonObject obj, String element) { - return NumberUtils.isParsable(getAsString(obj, element)) ? - obj.get(element).getAsLong() : Long.MIN_VALUE; - } - - public static boolean getAsBoolean(JsonObject obj, String element) { - return BooleanUtils.toBoolean(getAsString(obj, element)); - } - - public static Optional getAsJsonObject(JsonObject obj, String element) { - return containsField(obj, element) && obj.isJsonObject() ? - Optional.of(obj.get(element).getAsJsonObject()) : Optional.empty(); - } - - public static Optional getAsJsonArray(JsonObject obj, String element) { - return containsField(obj, element) ? - Optional.of(obj.getAsJsonArray(element)) : Optional.empty(); - } -} diff --git a/src/main/java/org/schemata/printer/Console.java b/src/main/java/org/schemata/printer/Console.java deleted file mode 100644 index 7bae39b..0000000 --- a/src/main/java/org/schemata/printer/Console.java +++ /dev/null @@ -1,16 +0,0 @@ -package org.schemata.printer; - -public class Console { - - public static final String TEXT_RED = "\033[0;31m"; - public static final String TEXT_GREEN = "\033[0;32m"; - public static final String TEXT_RESET = "\u001B[0m"; - - public static void printSuccess(String message) { - System.out.println(TEXT_GREEN + message + TEXT_RESET); - } - - public static void printError(String message) { - System.out.println(TEXT_RED + message + TEXT_RESET); - } -} diff --git a/src/main/java/org/schemata/provider/SchemaParser.java b/src/main/java/org/schemata/provider/SchemaParser.java deleted file mode 100644 index 4257390..0000000 --- a/src/main/java/org/schemata/provider/SchemaParser.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.schemata.provider; - -import java.util.List; -import org.schemata.domain.Schema; - - -public interface SchemaParser { - - List getSchemaList(String path); -} diff --git a/src/main/java/org/schemata/provider/avro/AvroSchemaCompatibilityChecker.java b/src/main/java/org/schemata/provider/avro/AvroSchemaCompatibilityChecker.java deleted file mode 100644 index f49dc88..0000000 --- a/src/main/java/org/schemata/provider/avro/AvroSchemaCompatibilityChecker.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.schemata.provider.avro; - -import org.schemata.compatibility.Result; -import org.schemata.compatibility.SchemaCompatibilityChecker; - -public class AvroSchemaCompatibilityChecker implements SchemaCompatibilityChecker { - - // TODO: Implement this method - - @Override - public Result check(String baseSchemaPath, String changeSchemaPath) { - return null; - } -} diff --git a/src/main/java/org/schemata/provider/avro/AvroSchemaParser.java b/src/main/java/org/schemata/provider/avro/AvroSchemaParser.java deleted file mode 100644 index 612c73c..0000000 --- a/src/main/java/org/schemata/provider/avro/AvroSchemaParser.java +++ /dev/null @@ -1,123 +0,0 @@ -package org.schemata.provider.avro; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Stream; -import org.schemata.domain.EventType; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; -import org.schemata.domain.SchemaType; -import org.schemata.exception.SchemaParserException; -import org.schemata.provider.SchemaParser; - - -public class AvroSchemaParser implements SchemaParser { - - static final Map PRIMITIVES = new HashMap<>(); - - static { - PRIMITIVES.put("string", org.apache.avro.Schema.Type.STRING); - PRIMITIVES.put("bytes", org.apache.avro.Schema.Type.BYTES); - PRIMITIVES.put("int", org.apache.avro.Schema.Type.INT); - PRIMITIVES.put("long", org.apache.avro.Schema.Type.LONG); - PRIMITIVES.put("float", org.apache.avro.Schema.Type.FLOAT); - PRIMITIVES.put("double", org.apache.avro.Schema.Type.DOUBLE); - PRIMITIVES.put("boolean", org.apache.avro.Schema.Type.BOOLEAN); - PRIMITIVES.put("null", org.apache.avro.Schema.Type.NULL); - } - - @Override - public List getSchemaList(String path) { - try { - var schemaFileList = listAvroSchemaFiles(path); - return schemaFileList.stream().map(s -> { - try { - return buildSchema(s); - } catch (IOException e) { - throw new SchemaParserException("Error while parsing Avro schema", e); - } - }).toList(); - } catch (IOException e) { - throw new SchemaParserException("Error while parsing Avro schema", e); - } - } - - public List listAvroSchemaFiles(String path) - throws IOException { - try (Stream walk = Files.walk(Paths.get(path))) { - return walk - .filter(p -> !Files.isDirectory(p)) - .map(p -> p.toString().toLowerCase()) - .filter(f -> f.endsWith("avsc")) - .toList(); - } - } - - public Schema buildSchema(String path) - throws IOException { - var avroSchema = compileAvroSchema(path); - List fields = new ArrayList<>(); - var avroFields = avroSchema.getFields(); - for (org.apache.avro.Schema.Field avroField : avroFields) { - fields.add(parseField(avroSchema.getFullName(), avroField)); - } - return parseSchema(avroSchema, fields); - } - - public org.apache.avro.Schema compileAvroSchema(String path) - throws IOException { - return new org.apache.avro.Schema.Parser().parse(new File(path)); - } - - public Schema parseSchema(org.apache.avro.Schema schema, List fields) { - Schema.Builder builder = new Schema.Builder(schema.getFullName(), fields); - builder.description(schema.getProp(Schema.Prop.DESC)); - builder.comment(schema.getProp(Schema.Prop.COMMENT)); - builder.seeAlso(schema.getProp(Schema.Prop.SEE_ALSO)); - builder.reference(schema.getProp(Schema.Prop.REFERENCE)); - builder.owner(schema.getProp(Schema.Prop.OWNER)); - builder.domain(schema.getProp(Schema.Prop.DOMAIN)); - builder.status(schema.getProp(Schema.Prop.STATUS)); - builder.schemaType(handleEmptySchemaType(schema)); - builder.eventType(handleEmptyEventType(schema)); - builder.teamChannel(schema.getProp(Schema.Prop.TEAM_CHANNEL)); - builder.alertChannel(schema.getProp(Schema.Prop.ALERT_CHANNEL)); - builder.complianceOwner(schema.getProp(Schema.Prop.COMPLIANCE_OWNER)); - builder.complianceChannel(schema.getProp(Schema.Prop.COMPLIANCE_CHANNEL)); - return builder.build(); - } - - public Field parseField(String schemaName, org.apache.avro.Schema.Field avroField) { - String dataType = avroField.schema().getType().getName(); - var builder = new Field.Builder(schemaName, avroField.name(), dataType, isPrimitiveType(dataType)); - builder.description(avroField.getProp(Field.Prop.DESC)); - builder.comment(avroField.getProp(Field.Prop.COMMENT)); - builder.seeAlso(avroField.getProp(Field.Prop.SEE_ALSO)); - builder.reference(avroField.getProp(Field.Prop.REFERENCE)); - builder.isClassified(Boolean.parseBoolean(avroField.getProp(Field.Prop.IS_CLASSIFIED))); - builder.primaryKey(Boolean.parseBoolean(avroField.getProp(Field.Prop.IS_PRIMARY_KEY))); - builder.productType(avroField.getProp(Field.Prop.PRODUCT_TYPE)); - return builder.build(); - } - - private String handleEmptyEventType(org.apache.avro.Schema schema) { - return schema.getProp(Schema.Prop.EVENT_TYPE) == null ? EventType.NONE.name() - : schema.getProp(Schema.Prop.EVENT_TYPE); - } - - private String handleEmptySchemaType(org.apache.avro.Schema schema) { - return schema.getProp(Schema.Prop.SCHEMA_TYPE) == null ? SchemaType.UNKNOWN.name() - : schema.getProp(Schema.Prop.SCHEMA_TYPE); - } - - private boolean isPrimitiveType(String name) { - return PRIMITIVES.containsKey(name); - } -} diff --git a/src/main/java/org/schemata/provider/dbt/DbtCatalogMetadata.java b/src/main/java/org/schemata/provider/dbt/DbtCatalogMetadata.java deleted file mode 100644 index a60ddc5..0000000 --- a/src/main/java/org/schemata/provider/dbt/DbtCatalogMetadata.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.schemata.provider.dbt; - -import java.util.List; - - -public interface DbtCatalogMetadata { - record Table(String namespace, String modelName, String dbtModelFullName, String comment, String owner) { - } - - record Column(String name, String dataType, long index, String comment) { - } - - record Catalog(Table table, List column) { - } - - String TARGET_PATH = "target"; - String MANIFEST_FILE = "manifest.json"; - String CATALOG_FILE = "catalog.json"; -} diff --git a/src/main/java/org/schemata/provider/dbt/DbtCatalogParser.java b/src/main/java/org/schemata/provider/dbt/DbtCatalogParser.java deleted file mode 100644 index ea6e17d..0000000 --- a/src/main/java/org/schemata/provider/dbt/DbtCatalogParser.java +++ /dev/null @@ -1,108 +0,0 @@ -package org.schemata.provider.dbt; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.schemata.exception.SchemaParserException; -import org.schemata.json.Json; - -import static org.schemata.provider.dbt.DbtCatalogMetadata.CATALOG_FILE; - - -public class DbtCatalogParser { - - public Map parse(String path) { - Map catalogMap = new HashMap<>(); - - var jsonParser = getCatalogJsonParser(path); - var nodes = getNodes(jsonParser); - nodes.entrySet().forEach(entry -> { - var table = extractTable(entry.getKey(), entry.getValue()); - var columnList = extractColumn(entry.getValue()); - catalogMap.put(entry.getKey(), new DbtCatalogMetadata.Catalog(table, columnList)); - }); - return catalogMap; - } - - public JsonElement getCatalogJsonParser(String path) { - try (Stream lines = Files.lines(Paths.get(path, CATALOG_FILE))) { - String data = lines.collect(Collectors.joining("\n")); - return JsonParser.parseString(data); - } catch (IOException e) { - throw new SchemaParserException("Error while parsing getCatalogJsonParser:", e); - } - } - - public List extractColumn(JsonElement element) { - var columns = getColumnJsonElement(element); - List fieldList = new ArrayList<>(); - columns.entrySet().forEach(column -> { - var columnObj = column.getValue().getAsJsonObject(); - fieldList.add(new DbtCatalogMetadata.Column(getColumnName(column), getDataType(columnObj), - getColumnIndex(columnObj), getColumnComment(columnObj))); - }); - return fieldList; - } - - public JsonObject getColumnJsonElement(JsonElement element) { - return Json.getAsJsonObject(element.getAsJsonObject(), "columns") - .orElseThrow(() -> new SchemaParserException("Error parsing dbt catalog: columns is empty")); - } - - public String getColumnComment(JsonObject columnObj) { - return Json.getAsString(columnObj, "comment"); - } - - public Long getColumnIndex(JsonObject columnObj) { - return Json.getAsLong(columnObj, "index"); - } - - public String getDataType(JsonObject columnObj) { - return Json.getAsString(columnObj, "type"); - } - - public String getColumnName(Map.Entry column) { - return column.getKey().toLowerCase(); - } - - public DbtCatalogMetadata.Table extractTable(String modelName, JsonElement element) { - var metadata = getMetadata(element); - return new DbtCatalogMetadata.Table(getNamespace(metadata), getModelName(metadata), modelName, - getComment(metadata), getOwner(metadata)); - } - - public JsonObject getMetadata(JsonElement element) { - return Json.getAsJsonObject(element.getAsJsonObject(), "metadata") - .orElseThrow(() -> new SchemaParserException("Error parsing dbt catalog: Metadata Object is Empty")); - } - - public JsonObject getNodes(JsonElement jsonParser) { - return Json.getAsJsonObject(jsonParser.getAsJsonObject(), "nodes") - .orElseThrow(() -> new SchemaParserException("Error parsing dbt catalog: Nodes is empty")); - } - - public String getOwner(JsonObject metadata) { - return Json.getAsString(metadata, "owner"); - } - - public String getComment(JsonObject metadata) { - return Json.getAsString(metadata, "comment"); - } - - public String getModelName(JsonObject metadata) { - return Json.getAsString(metadata, "name"); - } - - public String getNamespace(JsonObject metadata) { - return Json.getAsString(metadata, "schema"); - } -} diff --git a/src/main/java/org/schemata/provider/dbt/DbtManifestParser.java b/src/main/java/org/schemata/provider/dbt/DbtManifestParser.java deleted file mode 100644 index 2e52b1d..0000000 --- a/src/main/java/org/schemata/provider/dbt/DbtManifestParser.java +++ /dev/null @@ -1,159 +0,0 @@ -package org.schemata.provider.dbt; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.schemata.domain.Depends; -import org.schemata.domain.Field; -import org.schemata.domain.Link; -import org.schemata.domain.Schema; -import org.schemata.exception.SchemaParserException; -import org.schemata.json.Json; - -import static org.schemata.provider.dbt.DbtCatalogMetadata.MANIFEST_FILE; - - -public class DbtManifestParser { - - public List parse(Map catalog, String path) { - if (catalog == null) { - return Collections.emptyList(); - } - - List schemaList = new ArrayList<>(); - - try { - var jsonParser = getManifestJsonParser(path); - var nodes = getNodes(jsonParser); - - catalog.forEach((key, value) -> { - var node = nodes.get(key); - if (node != null && isModel(node.getAsJsonObject())) { - var nodeObj = node.getAsJsonObject(); - if (getConfig(nodeObj).isPresent()) { - - var fields = extractFields(nodeObj, value); - var schema = new Schema.Builder(key, fields); - - var config = getConfig(nodeObj).get(); - - var builder = schema.domain(Json.getAsString(config, Schema.Prop.DOMAIN)) - .schemaType("model") // We parse only the dbt model type. so all dbt we set it as model - .modelType(Json.getAsString(config, Schema.Prop.MODEL_TYPE)) - .description(Json.getAsString(nodeObj, Schema.Prop.DESCRIPTION)) - .reference(Json.getAsString(config, Schema.Prop.REFERENCE)) - .seeAlso(Json.getAsString(config, Schema.Prop.SEE_ALSO)) - .comment(value.table().comment()) - .owner(value.table().owner()) - .status(Json.getAsString(config, Schema.Prop.STATUS)) - .alertChannel(Json.getAsString(config, Schema.Prop.ALERT_CHANNEL)) - .teamChannel(Json.getAsString(config, Schema.Prop.TEAM_CHANNEL)) - .complianceChannel(Json.getAsString(config, Schema.Prop.COMPLIANCE_CHANNEL)) - .complianceOwner(Json.getAsString(config, Schema.Prop.COMPLIANCE_OWNER)); - schemaList.add(builder.build()); - } - } - }); - } catch (IOException e) { - throw new SchemaParserException("Error while parsing DbtManifestParser", e); - } - return schemaList; - } - - public JsonObject getNodes(JsonElement jsonParser) { - return Json.getAsJsonObject(jsonParser.getAsJsonObject(), "nodes") - .orElseThrow(() -> new SchemaParserException("Error parsing DbtManifestParser: Nodes is empty")); - } - - public JsonElement getManifestJsonParser(String path) - throws IOException { - Stream lines = Files.lines(Paths.get(path, MANIFEST_FILE)); - String data = lines.collect(Collectors.joining("\n")); - return JsonParser.parseString(data); - } - - public String getResourceType(JsonObject node) { - return Json.getAsString(node, "resource_type"); - } - - public boolean isModel(JsonObject node) { - return getResourceType(node).equalsIgnoreCase("model"); - } - - public Optional getConfig(JsonObject node) { - return Json.getAsJsonObject(node, "config"); - } - - public List extractFields(JsonObject nodeObj, DbtCatalogMetadata.Catalog catalog) { - var columns = Json.getAsJsonObject(nodeObj, "columns"); - if (columns.isEmpty()) { - return extractFieldsWithManifestMetadataNotDetected(catalog); - } - return enrichColumnMetadataWithManifest(catalog, columns.get()); - } - - private List enrichColumnMetadataWithManifest(DbtCatalogMetadata.Catalog catalog, JsonObject columns) { - List fieldList = new ArrayList<>(); - catalog.column().forEach(column -> { - var columnObj = columns.getAsJsonObject(column.name()); - var builder = - new Field.Builder(catalog.table().modelName(), column.name(), column.dataType(), true); - - if (isMetaObjExist(columnObj)) { - var metaObj = columnObj.getAsJsonObject("meta"); - builder.description(Json.getAsString(metaObj, Field.Prop.DESCRIPTION)) - .link(getFieldLink(metaObj)) - .depends(getDepends(metaObj)) - .primaryKey(Json.getAsBoolean(metaObj, Field.Prop.IS_PRIMARY_KEY)); - } - fieldList.add(builder.build()); - }); - return fieldList; - } - - public boolean isMetaObjExist(JsonObject columnObj) { - return columnObj != null && columnObj.getAsJsonObject("meta") != null; - } - - private Link getFieldLink(JsonObject obj) { - var linkObj = Json.getAsJsonObject(obj, Field.Prop.LINK); - - if (linkObj.isEmpty()) { - return null; - } - var linkPropObj = linkObj.get(); - return new Link(Json.getAsString(linkPropObj, Field.Prop.MODEL), Json.getAsString(linkPropObj, Field.Prop.COLUMN)); - } - - private List getDepends(JsonObject metaObj) { - List dependsList = new ArrayList<>(); - var obj = Json.getAsJsonArray(metaObj, Field.Prop.DEPENDS); - if (obj.isEmpty()) { - return dependsList; - } - for (JsonElement depends : obj.get()) { - dependsList.add(new Depends(Json.getAsString(depends.getAsJsonObject(), Field.Prop.MODEL), - Json.getAsString(depends.getAsJsonObject(), Field.Prop.COLUMN))); - } - return dependsList; - } - - public List extractFieldsWithManifestMetadataNotDetected(DbtCatalogMetadata.Catalog catalog) { - List fieldList = new ArrayList<>(); - catalog.column().forEach(column -> { - fieldList.add(new Field.Builder(catalog.table().modelName(), column.name(), column.dataType(), true) - .build()); - }); - return fieldList; - } -} diff --git a/src/main/java/org/schemata/provider/dbt/DbtSchemaCompatibilityChecker.java b/src/main/java/org/schemata/provider/dbt/DbtSchemaCompatibilityChecker.java deleted file mode 100644 index 82fb420..0000000 --- a/src/main/java/org/schemata/provider/dbt/DbtSchemaCompatibilityChecker.java +++ /dev/null @@ -1,90 +0,0 @@ -package org.schemata.provider.dbt; - -import org.schemata.compatibility.Result; -import org.schemata.compatibility.SchemaCompatibilityChecker; -import org.schemata.compatibility.Summary; -import org.schemata.domain.Schema; - - -import java.util.*; - -/** - * Compare the base schema with the change schema and return the incompatible changes. - */ -public class DbtSchemaCompatibilityChecker implements SchemaCompatibilityChecker { - @Override - public Result check(String baseSchemaPath, String changeSchemaPath) { - var baseSchema = new DbtSchemaParser().getSchemaList(baseSchemaPath); - var changeSchema = new DbtSchemaParser().getSchemaList(changeSchemaPath); - var summaries = compare(buildSchemaMap(baseSchema), buildSchemaMap(changeSchema)); - return new Result(summaries.size() == 0, summaries); - } - - private Map buildSchemaMap(List schemaList) { - Map schemaMap = new HashMap<>(); - for (var schema : schemaList) { - for (var field : schema.fieldList()) { - schemaMap.put(new SchemaKey(schema.name(), field.name()), new SchemaValue(field.dataType())); - } - } - return schemaMap; - } - - /** - * The current data type validation is a 'strict type' validation. It doesn't support `type boxing` support. - * - * @param base base schema value - * @param change change schema value - * @return Summary of Incompatible changes - */ - private Set compare(Map base, Map change) { - - for (var entry : change.entrySet()) { - if (base.containsKey(entry.getKey()) - && isDataTypeCompatible(base.get(entry.getKey()), entry.getValue())) { - base.remove(entry.getKey()); - } - } - if (base.size() > 0) { - return getIncompatibleSchemaChanges(base); - } - return Set.of(); // return empty set - } - - /** - * The current data type validation is a 'strict type' validation. It doesn't support `type boxing` support. - * We intend to enrich this compatibility check in the future. For example, `int32` and `int64` are compatible. - * - * @param baseValue base schema value - * @param changeValue change schema value - * @return true if compatible, false otherwise - */ - private boolean isDataTypeCompatible(SchemaValue baseValue, SchemaValue changeValue) { - return baseValue.type.equalsIgnoreCase(changeValue.type); - } - - /** - * Loop through the base schema map and build a set of incompatible schema changes. - * - * @param base base schema map - * @return set of incompatible schema changes - */ - private static Set getIncompatibleSchemaChanges(Map base) { - Set summaries = new HashSet<>(); - for (var entry : base.entrySet()) { - var key = entry.getKey(); - var value = entry.getValue(); - summaries.add(new Summary.Builder().fieldName(key.fieldName) - .schemaName(key.table) - .fieldType(value.type).build()); - } - return summaries; - } - - - record SchemaKey(String table, String fieldName) { - } - - record SchemaValue(String type) { - } -} diff --git a/src/main/java/org/schemata/provider/dbt/DbtSchemaParser.java b/src/main/java/org/schemata/provider/dbt/DbtSchemaParser.java deleted file mode 100644 index faeb33b..0000000 --- a/src/main/java/org/schemata/provider/dbt/DbtSchemaParser.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.schemata.provider.dbt; - -import java.util.List; -import org.schemata.domain.Schema; -import org.schemata.provider.SchemaParser; - - -/** - * Parse the dbt generated catalog.json and manifest.json to gather metadata - * - * Steps: - * ====== - * 1. Parse the catalog.json and gather the list of models, columns and types & index - * 2. Parse manifest.json for each model and gather additional metadata - */ -public class DbtSchemaParser implements SchemaParser { - - @Override - public List getSchemaList(String path) { - var dbtCatalogParser = new DbtCatalogParser(); - var catalog = dbtCatalogParser.parse(path); - return new DbtManifestParser().parse(catalog, path); - } -} diff --git a/src/main/java/org/schemata/provider/protobuf/Loader.java b/src/main/java/org/schemata/provider/protobuf/Loader.java deleted file mode 100644 index 000e6e3..0000000 --- a/src/main/java/org/schemata/provider/protobuf/Loader.java +++ /dev/null @@ -1,13 +0,0 @@ -package org.schemata.provider.protobuf; - -import com.google.protobuf.Descriptors; -import com.google.protobuf.Descriptors.Descriptor; - -import java.util.List; - -/** - * Abstracts loading of Descriptor objects from various sources - */ -public interface Loader { - List loadDescriptors() throws Descriptors.DescriptorValidationException; -} diff --git a/src/main/java/org/schemata/provider/protobuf/ProtoProcessor.java b/src/main/java/org/schemata/provider/protobuf/ProtoProcessor.java deleted file mode 100644 index fcdab7a..0000000 --- a/src/main/java/org/schemata/provider/protobuf/ProtoProcessor.java +++ /dev/null @@ -1,163 +0,0 @@ -package org.schemata.provider.protobuf; - -import com.google.protobuf.Descriptors; -import com.google.protobuf.Value; -import org.schemata.domain.Constraints; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; -import org.schemata.domain.Subscribers; -import org.schemata.schema.SchemataBuilder; -import org.schemata.schema.SchemataConstraintsBuilder; -import org.schemata.schema.SchemataSubscribersBuilder; - - -import java.util.*; -import java.util.stream.Collectors; - - -public class ProtoProcessor { - - private static final Set INCLUDED_PRIMITIVE_TYPES = Set.of("google.protobuf.Timestamp"); - - public List parse(List descriptors) { - return descriptors - .stream() - .filter(this::isAnnotated) - .map(this::parseSingleSchema) - .toList(); - } - - public Schema parseSingleSchema(Descriptors.Descriptor descriptor) { - String schemaName = descriptor.getFullName(); - // Extract all the metadata for the fieldList - var fieldList = extractFields(descriptor.getFields(), schemaName); - return extractSchema(descriptor, descriptor.getFullName(), fieldList); - } - - public Schema extractSchema(Descriptors.Descriptor descriptorType, String schema, List fieldList) { - Schema.Builder builder = new Schema.Builder(schema, fieldList); - for (Map.Entry entry : descriptorType.getOptions().getAllFields().entrySet()) { - switch (entry.getKey().getName()) { - case "message_core" -> { - SchemataBuilder.CoreMetadata coreMetadata = (SchemataBuilder.CoreMetadata) entry.getValue(); - builder.description(coreMetadata.getDescription()); - builder.comment(coreMetadata.getComment()); - builder.seeAlso(coreMetadata.getSeeAlso()); - builder.reference(coreMetadata.getReference()); - } - case "owner" -> builder.owner(Objects.toString(entry.getValue(), "")); - case "domain" -> builder.domain(Objects.toString(entry.getValue(), "")); - case "schema_type" -> builder.schemaType(entry.getValue().toString()); - case "event_type" -> builder.eventType(entry.getValue().toString()); - case "status" -> builder.status(Objects.toString(entry.getValue(), "")); - case "team_channel" -> builder.teamChannel(Objects.toString(entry.getValue(), "")); - case "alert_channel" -> builder.alertChannel(Objects.toString(entry.getValue(), "")); - case "compliance_owner" -> builder.complianceOwner(Objects.toString(entry.getValue(), "")); - case "compliance_channel" -> builder.complianceChannel(Objects.toString(entry.getValue(), "")); - case "downstream" -> builder.downstreamSubscribersList(extractDownstreamConsumers(entry)); - case "upstream" -> builder.upstreamSubscribersList(extractUpstreamConsumers(entry)); - case "constraints" -> builder.constraintsList(extractConstraintsList(entry)); - } - } - return builder.build(); - } - - private static List extractConstraintsList(Map.Entry entry) { - SchemataConstraintsBuilder.Constraints constraints = (SchemataConstraintsBuilder.Constraints) entry.getValue(); - List constraintsList = new ArrayList<>(); - for (var constraint : constraints.getConstraintList()) { - Map constraintMap = processConstraintConfig(constraint); - constraintsList.add(new Constraints(constraint.getName(), constraint.getDescription(), constraintMap)); - } - return constraintsList; - } - - private static Map processConstraintConfig(SchemataConstraintsBuilder.Constraint constraint) { - Map constraintMap = new HashMap<>(); - for (String key : constraint.getConfigMap().keySet()) { - var value = constraint.getConfigMap().get(key); - if (value.hasListValue()) { - var listValueConstraints = value.getListValue().getValuesList().stream() - .map(Value::getStringValue) - .collect(Collectors.toList()); - constraintMap.put(key, (Constraints.Constraint.listConstraints(key, listValueConstraints, - Constraints.DataType.LIST))); - } else { - var dataType = Constraints.DataType.fromString(value.getKindCase().name()); - constraintMap.put(key, (Constraints.Constraint.primitiveConstraints(key, getConstraintValue(value), - dataType))); - } - } - return constraintMap; - } - - private static Object getConstraintValue(Value value) { - return switch (value.getKindCase()) { - case NUMBER_VALUE -> value.getNumberValue(); - case STRING_VALUE -> value.getStringValue(); - case BOOL_VALUE -> value.getBoolValue(); - default -> null; - }; - } - - - private static List extractUpstreamConsumers(Map.Entry entry) { - SchemataSubscribersBuilder.Upstream upstream = (SchemataSubscribersBuilder.Upstream) entry.getValue(); - return upstream - .getSubscribersList() - .stream() - .map(subscribe -> new Subscribers(subscribe.getName(), subscribe.getUsage())) - .collect(Collectors.toList()); - } - - private static List extractDownstreamConsumers(Map.Entry entry) { - SchemataSubscribersBuilder.Downstream downstream = (SchemataSubscribersBuilder.Downstream) entry.getValue(); - return downstream - .getSubscribersList() - .stream() - .map(subscribe -> new Subscribers(subscribe.getName(), subscribe.getUsage())) - .collect(Collectors.toList()); - } - - public List extractFields(List fieldDescriptorList, String schema) { - List fields = new ArrayList<>(); - - for (Descriptors.FieldDescriptor entry : fieldDescriptorList) { - String type = entry.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ? entry.getMessageType().getFullName() - : entry.getType().name(); - Field.Builder builder = new Field.Builder(schema, entry.getName(), type, isPrimitiveType(entry.getType(), type)); - - for (Map.Entry fieldEntry : entry.getOptions().getAllFields().entrySet()) { - switch (fieldEntry.getKey().getName()) { - case "field_core" -> { - SchemataBuilder.CoreMetadata coreMetadata = (SchemataBuilder.CoreMetadata) fieldEntry.getValue(); - builder.description(coreMetadata.getDescription()); - builder.comment(coreMetadata.getComment()); - builder.seeAlso(coreMetadata.getSeeAlso()); - builder.reference(coreMetadata.getReference()); - } - case "is_classified" -> builder.isClassified( - fieldEntry.getValue() != null && Boolean.parseBoolean(fieldEntry.getValue().toString())); - case "classification_level" -> - builder.classificationLevel(Objects.toString(fieldEntry.getValue(), "")); - case "product_type" -> builder.productType(Objects.toString(fieldEntry.getValue(), "")); - case "is_primary_key" -> builder.primaryKey( - fieldEntry.getValue() != null && Boolean.parseBoolean(fieldEntry.getValue().toString())); - } - } - fields.add(builder.build()); - } - - return fields; - } - - private boolean isAnnotated(Descriptors.Descriptor descriptor) { - return !descriptor.getOptions().getExtension(org.schemata.schema.SchemataBuilder.schemaType) - .equals(SchemataBuilder.SchemaType.UNKNOWN); - } - - private boolean isPrimitiveType(Descriptors.FieldDescriptor.Type type, String typeName) { - return type != Descriptors.FieldDescriptor.Type.MESSAGE || INCLUDED_PRIMITIVE_TYPES.contains(typeName); - } - -} diff --git a/src/main/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityChecker.java b/src/main/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityChecker.java deleted file mode 100644 index e876c00..0000000 --- a/src/main/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityChecker.java +++ /dev/null @@ -1,120 +0,0 @@ -package org.schemata.provider.protobuf; - - -import com.google.protobuf.DescriptorProtos; -import org.schemata.compatibility.Result; -import org.schemata.compatibility.SchemaCompatibilityChecker; -import org.schemata.compatibility.Summary; - -import java.io.File; -import java.io.FileInputStream; -import java.util.*; - - -public class ProtoSchemaCompatibilityChecker implements SchemaCompatibilityChecker { - - - static Set EXCLUDED_FILES = Set.of("protobuf/schemata.proto", - "protobuf/constraints.proto", "protobuf/struct.proto", "protobuf/timestamp.proto", - "protobuf/descriptor.proto", "protobuf/subscribers.proto", "protobuf/empty.proto"); - - - static Map COMPATIBLE_TYPES = Map.of("TYPE_INT32", "TYPE_INT64", "TYPE_UINT32", "TYPE_UINT64"); - - @Override - public Result check(String baseSchemaPath, String changeSchemaPath) { - var baseSchemaMap = getSchemaMap(baseSchemaPath); - var changeSchemaMap = getSchemaMap(changeSchemaPath); - var summaries = compare(baseSchemaMap, changeSchemaMap); - return new Result(summaries.size() == 0, summaries); - } - - private Set compare(Map base, Map change) { - for (var changeEntry : change.entrySet()) { - if (base.containsKey(changeEntry.getKey()) - && isDataTypeCompatible(base.get(changeEntry.getKey()), changeEntry.getValue())) { - base.remove(changeEntry.getKey()); - } - } - if (base.size() > 0) { - return getIncompatibleSchemaChanges(base); - } - return Set.of(); // return empty set - } - - private static Set getIncompatibleSchemaChanges(Map base) { - Set summaries = new HashSet<>(); - for (var entry : base.entrySet()) { - var key = entry.getKey(); - var value = entry.getValue(); - summaries.add(new Summary.Builder().filename(key.filename) - .schemaName(key.messageName) - .fieldName(key.fieldName) - .fieldType(value.type) - .build()); - summaries.add(new Summary(key.filename, key.messageName(), key.fieldName(), value.type())); - } - return summaries; - } - - private static boolean isDataTypeCompatible(SchemaValue baseValue, SchemaValue changeValue) { - return baseValue.type.equalsIgnoreCase(changeValue.type) || - changeValue.type.equalsIgnoreCase(COMPATIBLE_TYPES.get(baseValue.type.toUpperCase())); - } - - private Map getSchemaMap(String path) { - Map schemaValueMap = new HashMap<>(); - - try (FileInputStream stream = new FileInputStream(path)) { - ProtoFileDescriptorSetLoader loader = new ProtoFileDescriptorSetLoader(stream); - var fileDescriptorProtoMap = loader.indexFileDescriptorProtoByFilename(loader.getDescriptorSet()); - - for (var entry : fileDescriptorProtoMap.entrySet()) { - var descriptor = entry.getValue(); - String filename = getLeafDirectoryAndFileName(descriptor.getName()); - if (EXCLUDED_FILES.contains(filename)) { - continue; - } - processDescriptorMessages(schemaValueMap, descriptor, filename); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - - return schemaValueMap; - } - - private static void processDescriptorMessages(Map schemaValueMap, - DescriptorProtos.FileDescriptorProto descriptor, String filename) { - for (var message : descriptor.getMessageTypeList()) { - for (var field : message.getFieldList()) { - var key = new SchemaKey(filename, message.getName(), field.getNumber(), field.getName()); - var value = new SchemaValue(field.getType().name()); - schemaValueMap.put(key, value); - } - } - } - - - private static String getLeafDirectoryAndFileName(String filePath) { - File file = new File(filePath); - String parent = file.getParent(); - String fileName = file.getName(); - if (parent != null && parent.trim().length() > 0) { - var dirSplit = parent.split("[\\\\/]"); - if (dirSplit.length > 0) { - return dirSplit[dirSplit.length - 1] + File.separator + fileName; - } - } - return fileName; - } - - - record SchemaKey(String filename, String messageName, int fieldNumber, String fieldName) { - } - - record SchemaValue(String type) { - } - - -} diff --git a/src/main/java/org/schemata/provider/protobuf/ProtoSchemaParser.java b/src/main/java/org/schemata/provider/protobuf/ProtoSchemaParser.java deleted file mode 100644 index 49503cb..0000000 --- a/src/main/java/org/schemata/provider/protobuf/ProtoSchemaParser.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.schemata.provider.protobuf; - -import com.google.protobuf.Descriptors; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.List; -import org.schemata.domain.Schema; -import org.schemata.exception.SchemaParserException; -import org.schemata.provider.SchemaParser; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * Parse .desc proto descriptor file - */ -public class ProtoSchemaParser implements SchemaParser { - - private static final Logger logger = LoggerFactory.getLogger(ProtoSchemaParser.class); - - @Override - public List getSchemaList(String path) - throws SchemaParserException { - try { - var stream = new FileInputStream(path); - var loader = new ProtoFileDescriptorSetLoader(stream); - var descriptors = loader.loadDescriptors(); - return new ProtoProcessor().parse(descriptors); - } catch (IOException | Descriptors.DescriptorValidationException e) { - logger.error("Error finding file:", e); - throw new SchemaParserException("Error parsing Proto Schema", e); - } - } -} diff --git a/src/main/java/org/schemata/validate/FieldTrigger.java b/src/main/java/org/schemata/validate/FieldTrigger.java deleted file mode 100644 index 84ce6b1..0000000 --- a/src/main/java/org/schemata/validate/FieldTrigger.java +++ /dev/null @@ -1,13 +0,0 @@ -package org.schemata.validate; - -import java.util.function.Predicate; -import org.apache.commons.lang3.StringUtils; -import org.schemata.domain.Field; - - -public interface FieldTrigger extends Predicate { - - FieldTrigger isDescriptionEmpty = field -> StringUtils.isBlank(field.description()); - FieldTrigger isClassificationLevelEmpty = - field -> field.isClassified() && StringUtils.isBlank(field.classificationLevel()); -} diff --git a/src/main/java/org/schemata/validate/FieldValidator.java b/src/main/java/org/schemata/validate/FieldValidator.java deleted file mode 100644 index 4e717cc..0000000 --- a/src/main/java/org/schemata/validate/FieldValidator.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.schemata.validate; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import org.schemata.domain.Field; - -import static org.schemata.validate.FieldTrigger.isClassificationLevelEmpty; -import static org.schemata.validate.FieldTrigger.isDescriptionEmpty; - - -public class FieldValidator implements Function, Validator { - @Override - public Result apply(Field schema) { - - List errors = new ArrayList<>(); - for (Map.Entry ruleTrigger : fieldValidatorMap().entrySet()) { - var result = test(ruleTrigger.getKey(), ruleTrigger.getValue(), schema); - result.ifPresent(errors::add); - } - return errors.size() == 0 ? new Result(Status.SUCCESS, errors) : new Result(Status.ERROR, errors); - } - - private Map fieldValidatorMap() { - return Map.of(Rules.FIELD_DESCRIPTION_EMPTY, isDescriptionEmpty, Rules.FIELD_CLASSIFICATION_EMPTY, - isClassificationLevelEmpty); - } -} diff --git a/src/main/java/org/schemata/validate/Result.java b/src/main/java/org/schemata/validate/Result.java deleted file mode 100644 index 0d8c25c..0000000 --- a/src/main/java/org/schemata/validate/Result.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.schemata.validate; - -import java.util.List; - - -public record Result(Status status, List errorMessages) { -} diff --git a/src/main/java/org/schemata/validate/SchemaTrigger.java b/src/main/java/org/schemata/validate/SchemaTrigger.java deleted file mode 100644 index 562f8d2..0000000 --- a/src/main/java/org/schemata/validate/SchemaTrigger.java +++ /dev/null @@ -1,20 +0,0 @@ -package org.schemata.validate; - -import java.util.function.Predicate; -import org.apache.commons.lang3.StringUtils; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; -import org.schemata.domain.SchemaType; - - -public interface SchemaTrigger extends Predicate { - - SchemaTrigger isDescriptionEmpty = schema -> StringUtils.isBlank(schema.description()); - - SchemaTrigger isOwnerEmpty = schema -> StringUtils.isBlank(schema.owner()); - - SchemaTrigger isDomainEmpty = schema -> StringUtils.isBlank(schema.domain()); - - SchemaTrigger isInValidType = schema -> SchemaType.UNKNOWN.name().equalsIgnoreCase(schema.type()); - -} diff --git a/src/main/java/org/schemata/validate/SchemaValidator.java b/src/main/java/org/schemata/validate/SchemaValidator.java deleted file mode 100644 index ee5cc89..0000000 --- a/src/main/java/org/schemata/validate/SchemaValidator.java +++ /dev/null @@ -1,30 +0,0 @@ -package org.schemata.validate; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import org.schemata.domain.Schema; - -import static org.schemata.validate.SchemaTrigger.*; - - -public class SchemaValidator implements Function, Validator { - - @Override - public Result apply(Schema schema) { - - List errors = new ArrayList<>(); - for (Map.Entry ruleTrigger : schemaValidatorMap().entrySet()) { - var result = test(ruleTrigger.getKey(), ruleTrigger.getValue(), schema); - result.ifPresent(errors::add); - } - - return errors.size() == 0 ? new Result(Status.SUCCESS, errors) : new Result(Status.ERROR, errors); - } - - private Map schemaValidatorMap() { - return Map.of(Rules.SCHEMA_DESCRIPTION_EMPTY, isDescriptionEmpty, Rules.SCHEMA_OWNER_EMPTY, isOwnerEmpty, - Rules.SCHEMA_DOMAIN_EMPTY, isDomainEmpty, Rules.SCHEMA_UNKNOWN_TYPE, isInValidType); - } -} diff --git a/src/main/java/org/schemata/validate/Status.java b/src/main/java/org/schemata/validate/Status.java deleted file mode 100644 index 397635e..0000000 --- a/src/main/java/org/schemata/validate/Status.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.schemata.validate; - -public enum Status { - SUCCESS, ERROR -} diff --git a/src/main/resources/avro/brand.avsc b/src/main/resources/avro/brand.avsc deleted file mode 100644 index 48bc2f4..0000000 --- a/src/main/resources/avro/brand.avsc +++ /dev/null @@ -1,9 +0,0 @@ -{"namespace": "org.schemata.schema", - "type": "record", - "name": "Brand", - "fields": [ - {"name": "id", "type": "int"}, - {"name": "name", "type": ["string", "null"]}, - {"name": "is_active", "type": "boolean"} - ] -} \ No newline at end of file diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml deleted file mode 100644 index b4e56d4..0000000 --- a/src/main/resources/logback.xml +++ /dev/null @@ -1,24 +0,0 @@ - - # Console appender - - - # Pattern of log message for console appender - %d{yyyy-MM-dd HH:mm:ss} %-5p %m%n - - - - # File appender - - - - - - - \ No newline at end of file diff --git a/src/main/resources/schema/brand.proto b/src/main/resources/schema/brand.proto deleted file mode 100644 index 2e34b37..0000000 --- a/src/main/resources/schema/brand.proto +++ /dev/null @@ -1,49 +0,0 @@ -syntax = "proto3"; -package org.schemata.schema; - -import "schemata/protobuf/schemata.proto"; - -option java_package = "org.protocol.schema"; -option java_outer_classname = "BrandBuilder"; - - -message Brand { - - option(message_core).description = "This is the description of the Brand table"; - option(message_core).comment = "The comment added after thought"; - option(message_core).see_also = "db.brand MySQL table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = ENTITY; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - int32 id = 1 - [(field_core).description = "Unique identifier for Brand", (is_primary_key) = true]; - - string name = 2 - [(field_core).description = "Name of the Brand"]; - - bool is_active = 3 - [(field_core).description = "define the active status of the Brand. `true` == active; `false` = inactive`", (field_core).comment = "should refactor to non-binary status"]; - -} - -message BrandEvent { - option(message_core).description = "This is the description of the brand activity table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = EVENT; - option(event_type) = LIFECYCLE; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - Brand previous_brand_state = 1 - [(field_core).description = "Previous version of the Brand entity before the mutation"]; - - Brand current_brand_state = 2 - [(field_core).description = "Current version of the Brand entity before the mutation"]; - - ActivityType activity_type = 3 - [(field_core).description = "Lifecycle event type for the Brand table"]; -} \ No newline at end of file diff --git a/src/main/resources/schema/campaign.proto b/src/main/resources/schema/campaign.proto deleted file mode 100644 index 2c38ac7..0000000 --- a/src/main/resources/schema/campaign.proto +++ /dev/null @@ -1,90 +0,0 @@ -syntax = "proto3"; -package org.schemata.schema; - -import "schemata/protobuf/schemata.proto"; -import "product.proto"; -import "google/protobuf/timestamp.proto"; -import "category.proto"; - -option java_package = "org.protocol.schema"; -option java_outer_classname = "CampaignBuilder"; - - -enum CampaignOrigin { - EMAIL = 0; - SOCIAL_MEDIA = 1; -} - -message Campaign { - - option(message_core).description = "This is the description of the Campaign table"; - option(message_core).comment = "The comment added after thought"; - option(message_core).see_also = "db.campaign MySQL table"; - option(owner) = "Marketing"; - option(domain) = "Growth"; - option(schema_type) = ENTITY; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - - int32 id = 1 - [(field_core).description = "Unique identifier for Campaign", (is_primary_key) = true]; - - string name = 2 - [(field_core).description = "Name of the Campaign"]; - - bool is_active = 3 - [(field_core).description = "define the active status of the Campaign. `true` == active; `false` = inactive`", (field_core).comment = "should refactor to non-binary status"]; - -} - -message CampaignEvent { - option(message_core).description = "This is the description of the Campaign activity table"; - option(owner) = "Marketing"; - option(domain) = "Growth"; - option(schema_type) = EVENT; - option(event_type) = LIFECYCLE; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - - Campaign previous_campaign_state = 1 - [(field_core).description = "Previous version of the Campaign entity before the mutation"]; - - Campaign current_campaign_state = 2 - [(field_core).description = "Current version of the Campaign entity before the mutation"]; - - ActivityType activity_type = 3 - [(field_core).description = "Lifecycle event type for the Campaign table"]; -} - -message CampaignCategoryTrackerEvent { - option(message_core).description = "This is the description of the Campaign activity table"; - option(owner) = "Marketing"; - option(domain) = "Growth"; - option(schema_type) = EVENT; - option(event_type) = ACTIVITY; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - - Campaign campaign = 1 [(field_core).description = "Campaign entity"]; - Category category = 2 [(field_core).description = "Category of the targeted campaign"]; - CampaignOrigin origin = 3 [(field_core).description = "origin source of the campaign"] ; - google.protobuf.Timestamp timestamp = 4 [(field_core).description = "Timestamp of the activity"]; - -} - -message CampaignProductTrackerEvent { - - option(message_core).description = "This is the description of the Campaign activity table"; - option(owner) = "Marketing"; - option(domain) = "Growth"; - option(schema_type) = EVENT; - option(event_type) = ACTIVITY; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - - Campaign campaign = 1 [(field_core).description = "Campaign entity"]; - Product product = 2 [(field_core).description = "Product of the targeted campaign"]; - CampaignOrigin origin = 3 [(field_core).description = "origin source of the campaign"] ; - google.protobuf.Timestamp timestamp = 4 [(field_core).description = "Timestamp of the activity"]; - -} \ No newline at end of file diff --git a/src/main/resources/schema/category.proto b/src/main/resources/schema/category.proto deleted file mode 100644 index 515611f..0000000 --- a/src/main/resources/schema/category.proto +++ /dev/null @@ -1,49 +0,0 @@ -syntax = "proto3"; -package org.schemata.schema; - -import "schemata/protobuf/schemata.proto"; - -option java_package = "org.protocol.schema"; -option java_outer_classname = "CategoryBuilder"; - - -message Category { - - option(message_core).description = "This is the description of the Category table"; - option(message_core).comment = "The comment added after thought"; - option(message_core).see_also = "db.category MySQL table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = ENTITY; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - int32 id = 1 - [(field_core).description = "Unique identifier for Category", (is_primary_key) = true]; - - string name = 2 - [(field_core).description = "Name of the Category"]; - - bool is_active = 3 - [(field_core).description = "define the active status of the Category. `true` == active; `false` = inactive`", (field_core).comment = "should refactor to non-binary status"]; - -} - -message CategoryEvent { - option(message_core).description = "This is the description of the Category activity table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = EVENT; - option(event_type) = LIFECYCLE; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - Category previous_category_state = 1 - [(field_core).description = "Previous version of the Category entity before the mutation"]; - - Category current_category_state = 2 - [(field_core).description = "Current version of the Category entity before the mutation"]; - - ActivityType activity_type = 3 - [(field_core).description = "Lifecycle event type for the Category table"]; -} \ No newline at end of file diff --git a/src/main/resources/schema/product.proto b/src/main/resources/schema/product.proto deleted file mode 100644 index 563dbcd..0000000 --- a/src/main/resources/schema/product.proto +++ /dev/null @@ -1,57 +0,0 @@ -syntax = "proto3"; -package org.schemata.schema; - -import "schemata/protobuf/schemata.proto"; -import "category.proto"; -import "brand.proto"; - -option java_package = "org.protocol.schema"; -option java_outer_classname = "ProductBuilder"; - - -message Product { - - option(message_core).description = "This is the description of the Product table"; - option(message_core).comment = "The comment added after thought"; - option(message_core).see_also = "db.product MySQL table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = ENTITY; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - int32 id = 1 - [(field_core).description = "Unique identifier for Product", (is_primary_key) = true]; - - string name = 2 - [(field_core).description = "Name of the Product"]; - - Category category = 3 - [(field_core).description = "Category name of the product"]; - - Brand brand = 4 - [(field_core).description = "Brand name of the product"]; - - bool is_active = 5 - [(field_core).description = "define the active status of the Product. `true` == active; `false` = inactive`", (field_core).comment = "should refactor to non-binary status"]; - -} - -message ProductEvent { - option(message_core).description = "This is the description of the Product activity table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = EVENT; - option(event_type) = LIFECYCLE; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - Product previous_brand_state = 1 - [(field_core).description = "Previous version of the Product entity before the mutation"]; - - Product current_brand_state = 2 - [(field_core).description = "Current version of the Product entity before the mutation"]; - - ActivityType activity_type = 3 - [(field_core).description = "Lifecycle event type for the Product table"]; -} diff --git a/src/main/resources/schema/user.proto b/src/main/resources/schema/user.proto deleted file mode 100644 index 7c4d2bf..0000000 --- a/src/main/resources/schema/user.proto +++ /dev/null @@ -1,161 +0,0 @@ -syntax = "proto3"; -package org.schemata.schema; - -import "schemata/protobuf/schemata.proto"; -import "schemata/protobuf/subscribers.proto"; -import "schemata/protobuf/constraints.proto"; -import "product.proto"; -import "google/protobuf/timestamp.proto"; - - -option java_package = "org.protocol.schema"; -option java_outer_classname = "UserBuilder"; - -enum UserActivityType { - VIEW = 0; - READ_REVIEW = 1; - VIEW_DESCRIPTION = 2; -} - -message User { - - option(message_core).description = "This is the description of the users table"; - option(message_core).comment = "The comment added after thought"; - option(message_core).see_also = "db.user MySQL table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = ENTITY; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - option (upstream) = { - subscribers: {name: "Upstream Team A" usage: "Upstream Usage A"} - subscribers: {name: "Upstream Team B" usage: "Upstream Usage B"} - }; - - option (downstream) = { - subscribers: {name: "Downstream Team A" usage: "Downstream Usage A"} - subscribers: {name: "Downstream Team B" usage: "Downstream Usage B"} - }; - - option (constraints) = { - - constraint: [{ - name: "age range", - description: "age range constraint", - config: { - key: "column", - value: {string_value: "age"} - }, - config: { - key: "class_name", - value: {string_value: "great_expectations.core.validation_operators.expect_column_values_to_be_between"} - }, - config: { - key: "min_value", - value: {number_value: 18} - }, - config: { - key: "max_value", - value: {number_value: 150} - }, - config: { - key: "mostly", - value: {number_value: 0.95} - } - }], - constraint: [{ - name: "Timezone Constraint", - description: "Timezone should be either EST or PST", - config: { - key: "column", - value: {string_value: "timezone"} - }, - config: { - key: "class_name", - value: {string_value: "expect_column_values_to_be_in_set"} - }, - config: { - key: "value_set", - value: { - list_value: { - values: {string_value: "EST"} - values: {string_value: "PST"} - } - } - } - }] - }; - - int32 id = 1 - [(field_core).description = "Unique identifier for User", (is_primary_key) = true]; - - string name = 2 - [(field_core).description = "Name of the user"] ; - - string email = 3 - [(field_core).description = "email id for the user", (product_type) = "username", (is_classified) = true, (classification_level) = "LEVEL1"] ; - - bool is_active = 4 - [(field_core).description = "define the active status of the user. `true` == active; `false` = inactive`", (field_core).comment = "should refactor to non-binary status"]; - - string timezone = 5 - [(field_core).description = "preferred time zone for the user"] ; - - string age = 6 - [(field_core).description = "Age of the user"]; -} - -message UserEvent { - option(message_core).description = "This is the description of the users table"; - option(owner) = "Platform"; - option(domain) = "Core"; - option(schema_type) = EVENT; - option(event_type) = LIFECYCLE; - option(team_channel) = "#team-platform"; - option(alert_channel) = "#alerts-platform"; - - User previous_user_state = 1 - [(field_core).description = "Previous version of the user entity before the mutation"]; - - User current_user_state = 2 - [(field_core).description = "Current version of the user entity before the mutation"]; - - ActivityType activity_type = 3 - [(field_core).description = "Lifecycle event type for the Users table"]; - - google.protobuf.Timestamp timestamp = 4 [(field_core).description = "Timestamp of the activity"]; -} - -message UserActivityEvent { - option(message_core).description = "This is the description of the users table"; - option(owner) = "Product"; - option(domain) = "Growth"; - option(schema_type) = EVENT; - option(event_type) = ACTIVITY; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - User user = 1 [(field_core).description = "User entity reference"]; - Product product = 2 [(field_core).description = "Product entity reference"]; - UserActivityType activity_type = 3 [(field_core).description = "Type of the user activity"]; - google.protobuf.Timestamp timestamp = 4 [(field_core).description = "Timestamp of the activity"]; -} - -message UserActivityAggregate { - - option(message_core).description = "This is the aggregated user activity view count. The event aggregated by user & product"; - option(owner) = "Product"; - option(domain) = "Growth"; - option(schema_type) = EVENT; - option(event_type) = AGGREGATED; - option(team_channel) = "#team-growth"; - option(alert_channel) = "#alerts-growth"; - - User user = 1[(field_core).description = "User entity reference"]; - Product product = 2 [(field_core).description = "Product entity reference"]; - int64 count = 3 [(field_core).description = "Aggregated count of the user activity per product", (product_type) = "activity_count"]; - int32 windowTime = 4 [(field_core).description = "Max window time for the aggregation"]; - TimeUnit window_time_unit = 5 [(field_core).description = "TimeUnit of window for the aggregation"]; - google.protobuf.Timestamp timestamp = 6 [(field_core).description = "Timestamp of the activity"]; - -} \ No newline at end of file diff --git a/src/opencontract/v1/org/schemata/protobuf/constraints.proto b/src/opencontract/v1/org/schemata/protobuf/constraints.proto deleted file mode 100644 index 6572b3c..0000000 --- a/src/opencontract/v1/org/schemata/protobuf/constraints.proto +++ /dev/null @@ -1,31 +0,0 @@ -syntax = "proto3"; - -package org.schemata.schema; - -import "google/protobuf/descriptor.proto"; -import "google/protobuf/struct.proto"; - -option java_package = "org.schemata.schema"; -option java_outer_classname = "SchemataConstraintsBuilder"; - - - -// The Constraints message is used to define the constraints that are -// applied to a message. -message Constraint { - // The list of constraints that are applied to the message. - string name = 1; - string description = 2; - map config = 3; -} - -// You can define N number of constraints for a message or a field. -message Constraints { - // The list of constraints that are applied to the message. - repeated Constraint constraint = 1; -} - -// Add the Constraints rules to the MessageOptions. -extend google.protobuf.MessageOptions { - Constraints constraints = 800001; -} diff --git a/src/opencontract/v1/org/schemata/protobuf/subscribers.proto b/src/opencontract/v1/org/schemata/protobuf/subscribers.proto deleted file mode 100644 index 1c1efdb..0000000 --- a/src/opencontract/v1/org/schemata/protobuf/subscribers.proto +++ /dev/null @@ -1,32 +0,0 @@ -syntax = "proto3"; - -package org.schemata.schema; - -import "google/protobuf/descriptor.proto"; - -option java_package = "org.schemata.schema"; -option java_outer_classname = "SchemataSubscribersBuilder"; - -// The Consumers subscribe a stream by providing the name of the consumer & the usage of the stream. -message Subscribers { - string name = 1; - string usage = 2; -} - -// The Upstream message is used to send the list of subscribers to the publisher. -message Upstream { - repeated Subscribers subscribers = 2; -} - -// The Downstream message is used to send the list of subscribers to the publisher. -message Downstream { - repeated Subscribers subscribers = 1; -} - -// Add the Downstream Subscribers to the MessageOptions. -extend google.protobuf.MessageOptions { - Downstream downstream = 700001; - Upstream upstream = 700002; -} - - diff --git a/src/test/java/org/schemata/ResourceLoader.java b/src/test/java/org/schemata/ResourceLoader.java deleted file mode 100644 index 3bae391..0000000 --- a/src/test/java/org/schemata/ResourceLoader.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.schemata; - -import java.nio.file.Path; -import java.nio.file.Paths; - - -public class ResourceLoader { - - public static String getDescriptorsPath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - String absolutePath = resourceDirectory.toFile().getAbsolutePath(); - return absolutePath + "/descriptors/model.desc"; - } - - public static String getChangedDescriptorsPath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - String absolutePath = resourceDirectory.toFile().getAbsolutePath(); - return absolutePath + "/descriptors/changed_model.desc"; - } - - public static String getProtoEntitiesPath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - String absolutePath = resourceDirectory.toFile().getAbsolutePath(); - return absolutePath + "/schema/entities.proto"; - } - - public static String getAvroSchemaPath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - return resourceDirectory.toFile().getAbsolutePath(); - } - - public static String getBrandSchemaPath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - return resourceDirectory.toFile().getAbsolutePath() + "/avro_schema/brand.avsc"; - } - - public static String getInValidBrandSchemaPath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - return resourceDirectory.toFile().getAbsolutePath() + "/avro_schema/brand_dummy.avsc"; - } - - public static String getDbtBasePath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - return resourceDirectory.toFile().getAbsolutePath() + "/dbt"; - } - - public static String getChangedDbtBasePath() { - Path resourceDirectory = Paths.get("src", "test", "resources"); - return resourceDirectory.toFile().getAbsolutePath() + "/dbt_change"; - } - - public static String getInvalidDbtBasePath() { - Path resourceDirectory = Paths.get("src", "main", "dbt"); - return resourceDirectory.toFile().getAbsolutePath(); - } -} diff --git a/src/test/java/org/schemata/SchemataExecutorTest.java b/src/test/java/org/schemata/SchemataExecutorTest.java deleted file mode 100644 index e39a242..0000000 --- a/src/test/java/org/schemata/SchemataExecutorTest.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.schemata; - -import jdk.jfr.Description; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import picocli.CommandLine; - -import static org.junit.jupiter.api.Assertions.assertEquals; - - -public class SchemataExecutorTest { - - static CommandLine cmd; - - @BeforeAll - static void setup() { - var executor = new SchemataExecutor(); - cmd = new CommandLine(executor); - } - - @Test - @Description("Run schema validate function to run all the schema and column validation rules") - public void testSchemaValidateCmd() { - int exitCode = cmd.execute("validate", "-s=" + ResourceLoader.getDescriptorsPath(), "-p=PROTOBUF"); - assertEquals(0, exitCode); - } - - @Test - @Description("Test Schema score with an invalid schema name") - public void testScoreWithInvalidSchema() { - int exitCode = cmd.execute("score", "-s=" + ResourceLoader.getDescriptorsPath(), "User", "-p=PROTOBUF"); - assertEquals(-1, exitCode); - } - - @Test - @Description("Test Schema score with an valid schema name") - public void testScoreWithValidSchema() { - int exitCode = cmd.execute("score", "-s=" + ResourceLoader.getDescriptorsPath(), "-p=PROTOBUF", - "org.schemata.schema.CampaignCategoryTrackerEvent"); - assertEquals(0, exitCode); - } - - @Test - @Description("Test Schema documentation") - public void testSchemaDocumentationCmd() { - int exitCode = cmd.execute("document", "-s=" + ResourceLoader.getDescriptorsPath(), "-p=PROTOBUF"); - assertEquals(0, exitCode); - } -} diff --git a/src/test/java/org/schemata/graph/SchemaGraphTest.java b/src/test/java/org/schemata/graph/SchemaGraphTest.java deleted file mode 100644 index af122f5..0000000 --- a/src/test/java/org/schemata/graph/SchemaGraphTest.java +++ /dev/null @@ -1,125 +0,0 @@ -package org.schemata.graph; - -import com.google.protobuf.Descriptors; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.Set; -import java.util.stream.Collectors; - -import org.apache.commons.collections4.SetUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import org.schemata.ResourceLoader; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; -import org.schemata.exception.SchemaNotFoundException; -import org.schemata.provider.protobuf.ProtoFileDescriptorSetLoader; -import org.schemata.provider.protobuf.ProtoProcessor; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - - -public class SchemaGraphTest { - - static SchemaGraph graph; - - @BeforeAll - static void setUp() - throws IOException, Descriptors.DescriptorValidationException { - var stream = new FileInputStream(ResourceLoader.getDescriptorsPath()); - var protoFileDescriptorLoader = new ProtoFileDescriptorSetLoader(stream); - var parser = new ProtoProcessor(); - var schemaList = parser.parse(protoFileDescriptorLoader.loadDescriptors()); - graph = new SchemaGraph(schemaList); - } - - @Test - public void testWithInvalidSchema() { - Assertions.assertThrows(SchemaNotFoundException.class, () -> graph.getSchema("User"), - "Schema not found was expected"); - } - - @Test - public void testWithValidSchema() { - assertEquals("org.schemata.schema.UserEvent", graph.getSchema("org.schemata.schema.UserEvent").name()); - } - - @Test - public void testIncomingEdges() { - var incomingEdges = graph.incomingEdgesOf("org.schemata.schema.User"); - var expectedEdges = Set.of(newUserEdge("org.schemata.schema.UserEvent", "previous_user_state"), - newUserEdge("org.schemata.schema.UserEvent", "current_user_state"), - newUserEdge("org.schemata.schema.UserActivityEvent", "user"), - newUserEdge("org.schemata.schema.UserActivityAggregate", "user")); - assertEquals(4, incomingEdges.size()); - var actualEdges = incomingEdges.stream().map(WeightedSchemaEdge::summaryPrint).collect(Collectors.toSet()); - assertTrue(SetUtils.isEqualSet(expectedEdges, actualEdges)); - } - - @Test - public void testIncomingVertex() { - var incomingSchemaSet = graph.incomingVertexOf("org.schemata.schema.User"); - var expectedVertex = Set.of("org.schemata.schema.UserActivityAggregate", "org.schemata.schema.UserActivityEvent", - "org.schemata.schema.UserEvent"); - var actualVertex = incomingSchemaSet.stream().map(Schema::name).collect(Collectors.toSet()); - assertEquals(3, incomingSchemaSet.size()); - assertTrue(SetUtils.isEqualSet(expectedVertex, actualVertex)); - } - - @Test - public void testOutgoingEdges() { - var outgoingEdges = graph.outgoingEdgesOf("org.schemata.schema.UserActivityAggregate"); - var expectedEdges = Set.of(newUserActivityAggregateEdge("org.schemata.schema.User", "user"), - newUserActivityAggregateEdge("org.schemata.schema.Product", "product")); - assertEquals(2, outgoingEdges.size()); - var actualEdges = outgoingEdges.stream().map(WeightedSchemaEdge::summaryPrint).collect(Collectors.toSet()); - assertTrue(SetUtils.isEqualSet(expectedEdges, actualEdges)); - } - - @Test - public void testOutgoingVertex() { - var outgoingSchemaSet = graph.outgoingVertexOf("org.schemata.schema.UserActivityAggregate"); - var expectedVertex = Set.of("org.schemata.schema.Product", "org.schemata.schema.User"); - var actualVertex = outgoingSchemaSet.stream().map(Schema::name).collect(Collectors.toSet()); - assertEquals(2, outgoingSchemaSet.size()); - assertTrue(SetUtils.isEqualSet(expectedVertex, actualVertex)); - } - - @Test - public void testOutgoingEntityVertex() { - var outgoingSchemaSet = graph.outgoingEntityVertexOf("org.schemata.schema.UserActivityAggregate"); - for (var schema : outgoingSchemaSet) { - System.out.println(schema.name()); - } - } - - @Test - public void testPageRankScore() { - assertTrue(graph.getVertexPageRankScore("org.schemata.schema.Product") > graph.getVertexPageRankScore( - "org.schemata.schema.User")); - } - - @Test - public void testSchemataScore() { - assertTrue( - graph.getSchemataScore("org.schemata.schema.Product") > graph.getSchemataScore("org.schemata.schema.User")); - assertTrue(graph.getSchemataScore("org.schemata.schema.CampaignProductTrackerEvent") > graph.getSchemataScore( - "org.schemata.schema.CampaignCategoryTrackerEvent")); - } - - private String newUserEdge(String source, String edge) { - return newEdge(source, "org.schemata.schema.User", edge); - } - - private String newUserActivityAggregateEdge(String target, String edge) { - return newEdge("org.schemata.schema.UserActivityAggregate", target, edge); - } - - private String newEdge(String source, String target, String edge) { - Field field = new Field.Builder(source, edge, target, false).build(); - return new WeightedSchemaEdge(graph.getSchema(source), graph.getSchema(target), field).summaryPrint(); - } -} \ No newline at end of file diff --git a/src/test/java/org/schemata/provider/avro/AvroSchemaParserTest.java b/src/test/java/org/schemata/provider/avro/AvroSchemaParserTest.java deleted file mode 100644 index 5c90a56..0000000 --- a/src/test/java/org/schemata/provider/avro/AvroSchemaParserTest.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.schemata.provider.avro; - -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.schemata.ResourceLoader; -import org.schemata.domain.EventType; -import org.schemata.domain.SchemaType; - -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - - - -public class AvroSchemaParserTest { - - @Test - public void testListAvroSchemaFilesWithInvalidPath() { - assertThrows(IOException.class, () -> { - String avroSchemaPath = ResourceLoader.getAvroSchemaPath() + "dummy_path"; - var parser = new AvroSchemaParser(); - parser.listAvroSchemaFiles(avroSchemaPath); - }); - } - - @Test - public void testListAvroSchemaFiles() - throws IOException { - String avroSchemaPath = ResourceLoader.getAvroSchemaPath(); - var parser = new AvroSchemaParser(); - var schemaFileList = parser.listAvroSchemaFiles(avroSchemaPath); - assertAll("Assert list of schema files", - () -> assertEquals(1, schemaFileList.size())); - } - - @Test - public void testCompileAvroSchemaWithInvalidFile() { - assertThrows(IOException.class, () -> { - var parser = new AvroSchemaParser(); - parser.compileAvroSchema(ResourceLoader.getInValidBrandSchemaPath()); - }); - } - - @Test - public void testCompileAvroSchema() - throws IOException { - var parser = new AvroSchemaParser(); - var schema = parser.compileAvroSchema(ResourceLoader.getBrandSchemaPath()); - assertEquals("org.schemata.schema.Brand", schema.getFullName()); - } - - @Test - public void testBuildSchema() - throws IOException { - var parser = new AvroSchemaParser(); - var schema = parser.buildSchema(ResourceLoader.getBrandSchemaPath()); - assertAll("Assert schema properties", - () -> assertEquals("org.schemata.schema.Brand", schema.name()), - () -> assertEquals("This is the description of the Brand table", schema.description()), - () -> assertEquals(SchemaType.ENTITY.name(), schema.type()), - () -> assertEquals(EventType.NONE.name(), schema.eventType()), - () -> assertEquals(3, schema.fieldList().size()) - ); - System.out.println(schema); - } -} diff --git a/src/test/java/org/schemata/provider/dbt/DbtCatalogParserTest.java b/src/test/java/org/schemata/provider/dbt/DbtCatalogParserTest.java deleted file mode 100644 index 06d990e..0000000 --- a/src/test/java/org/schemata/provider/dbt/DbtCatalogParserTest.java +++ /dev/null @@ -1,68 +0,0 @@ -package org.schemata.provider.dbt; - -import com.google.gson.JsonElement; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.schemata.ResourceLoader; -import org.schemata.exception.SchemaParserException; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - - -public class DbtCatalogParserTest { - - JsonElement element; - DbtCatalogParser parser; - - @BeforeEach - public void init() { - parser = new DbtCatalogParser(); - element = parser.getCatalogJsonParser(ResourceLoader.getDbtBasePath()); - } - - @Test - public void testGetCatalogParserWithInvalidPath() { - assertThrows(SchemaParserException.class, - () -> new DbtCatalogParser().getCatalogJsonParser(ResourceLoader.getInvalidDbtBasePath())); - } - - @Test - public void testGetCatalogParser() { - assertDoesNotThrow(() -> new DbtCatalogParser().getCatalogJsonParser(ResourceLoader.getDbtBasePath())); - } - - @Test - public void testGetNodes() { - assertTrue(parser.getNodes(element).isJsonObject()); - } - - @Test - public void testExtractTable() { - var nodes = parser.getNodes(element); - String modelName = "model.dbtlearn.fct_reviews"; - var expected = new DbtCatalogMetadata.Table("dev", "fct_reviews", - "model.dbtlearn.fct_reviews", "", "transform"); - assertEquals(expected, parser.extractTable(modelName, nodes.get(modelName))); - } - - @Test - public void testExtractColumn() { - var nodes = parser.getNodes(element); - String modelName = "model.dbtlearn.fct_reviews"; - - var element = nodes.get(modelName); - - var expected = List.of(new DbtCatalogMetadata.Column("listing_id", "number", 1, ""), - new DbtCatalogMetadata.Column("review_date", "timestamp_ntz", 2, ""), - new DbtCatalogMetadata.Column("reviewer_name", "text", 3, ""), - new DbtCatalogMetadata.Column("review_text", "text", 4, ""), - new DbtCatalogMetadata.Column("review_sentiment", "text", 5, "") - ); - - assertEquals(expected, parser.extractColumn(element)); - } -} diff --git a/src/test/java/org/schemata/provider/dbt/DbtManifestParserTest.java b/src/test/java/org/schemata/provider/dbt/DbtManifestParserTest.java deleted file mode 100644 index 3c46e53..0000000 --- a/src/test/java/org/schemata/provider/dbt/DbtManifestParserTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.schemata.provider.dbt; - -import java.util.List; -import org.junit.jupiter.api.Test; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.schemata.ResourceLoader; -import org.schemata.domain.Depends; -import org.schemata.domain.EventType; -import org.schemata.domain.Link; -import org.schemata.domain.ModelType; -import org.schemata.domain.SchemaType; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - - -public class DbtManifestParserTest { - Map catalog; - DbtManifestParser manifestParser; - - @BeforeEach - public void init() { - catalog = new DbtCatalogParser().parse(ResourceLoader.getDbtBasePath()); - manifestParser = new DbtManifestParser(); - } - - @Test - public void testParse() { - assertDoesNotThrow(() -> manifestParser.parse(catalog, ResourceLoader.getDbtBasePath())); - } - - @Test - public void testSchemaTable() { - var schemaList = manifestParser.parse(catalog, ResourceLoader.getDbtBasePath()); - assertEquals(schemaList.size(), 7); - } - - @Test - public void testReviewsSchema() { - var schemaList = manifestParser.parse(catalog, ResourceLoader.getDbtBasePath()); - var schema = schemaList.stream().filter(f -> f.name().equals("model.dbtlearn.src_reviews")).toList(); - assertEquals(schema.size(), 1); - var reviewsSchema = schemaList.get(0); - assertEquals(reviewsSchema.domain(), "core"); - assertEquals(reviewsSchema.modelType(), ModelType.DIMENSION.name()); - assertEquals(reviewsSchema.eventType(), EventType.NONE.name()); - assertEquals(reviewsSchema.type(), SchemaType.MODEL.name()); - } - - @Test - public void testReviewColumns() { - var schemaList = manifestParser.parse(catalog, ResourceLoader.getDbtBasePath()); - var schema = schemaList.stream().filter(f -> f.name().equals("model.dbtlearn.src_reviews")).toList().get(0); - var fieldList = schema.fieldList(); - assertEquals(5, fieldList.size()); - var field = fieldList.stream().filter(f -> f.name().equals("listing_id")).toList().get(0); - assertTrue(field.isPrimaryKey()); - assertEquals(new Link("src_listings", "id"), field.link()); - List dependsList = List.of(new Depends("listings", "id")); - assertEquals(dependsList, field.depends()); - } -} diff --git a/src/test/java/org/schemata/provider/dbt/DbtSchemaCompatibilityCheckerTest.java b/src/test/java/org/schemata/provider/dbt/DbtSchemaCompatibilityCheckerTest.java deleted file mode 100644 index 8968a2e..0000000 --- a/src/test/java/org/schemata/provider/dbt/DbtSchemaCompatibilityCheckerTest.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.schemata.provider.dbt; - -import org.junit.jupiter.api.Test; -import org.schemata.ResourceLoader; - -import static org.junit.jupiter.api.Assertions.*; - -public class DbtSchemaCompatibilityCheckerTest { - - @Test - public void testValidSchemaChangesCheck() { - var checker = new DbtSchemaCompatibilityChecker(); - var result = checker.check(ResourceLoader.getDbtBasePath(), ResourceLoader.getDbtBasePath()); - assertTrue(result.isCompatible()); - } - - @Test - public void testInValidSchemaChangesCheck() { - var checker = new DbtSchemaCompatibilityChecker(); - var result = checker.check(ResourceLoader.getDbtBasePath(), ResourceLoader.getChangedDbtBasePath()); - assertFalse(result.isCompatible()); - } -} diff --git a/src/test/java/org/schemata/provider/dbt/DbtSchemaParserTest.java b/src/test/java/org/schemata/provider/dbt/DbtSchemaParserTest.java deleted file mode 100644 index 30c3c10..0000000 --- a/src/test/java/org/schemata/provider/dbt/DbtSchemaParserTest.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.schemata.provider.dbt; - -import org.junit.jupiter.api.Test; -import org.schemata.ResourceLoader; - - -public class DbtSchemaParserTest { - - @Test - public void testMe() { - DbtSchemaParser parser = new DbtSchemaParser(); - parser.getSchemaList(ResourceLoader.getDbtBasePath()); - } -} diff --git a/src/test/java/org/schemata/provider/protobuf/ProtoProcessorTest.java b/src/test/java/org/schemata/provider/protobuf/ProtoProcessorTest.java deleted file mode 100644 index cc349f3..0000000 --- a/src/test/java/org/schemata/provider/protobuf/ProtoProcessorTest.java +++ /dev/null @@ -1,97 +0,0 @@ -package org.schemata.provider.protobuf; - -import com.google.protobuf.Descriptors; - -import java.io.FileInputStream; -import java.io.IOException; -import java.util.List; - -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.schemata.ResourceLoader; -import org.schemata.domain.Constraints; -import org.schemata.domain.Schema; - -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - - -public class ProtoProcessorTest { - - private static Schema userSchema; - - @BeforeAll - static void setUp() - throws IOException, Descriptors.DescriptorValidationException { - - var stream = new FileInputStream(ResourceLoader.getDescriptorsPath()); - var protoFileDescriptorLoader = new ProtoFileDescriptorSetLoader(stream); - var parser = new ProtoProcessor(); - var schemaList = parser.parse(protoFileDescriptorLoader.loadDescriptors()); - assertAll("User Schema Sanity Check", () -> assertNotNull(schemaList), () -> assertEquals(14, schemaList.size())); - userSchema = schemaList.stream().filter(s -> s.name().equalsIgnoreCase("org.schemata.schema.User")).toList().get(0); - assertNotNull(userSchema); - } - - @Test - @DisplayName("Test User Schema metadata") - public void checkSchema() { - assertAll("User Schema properties", () -> assertNotNull(userSchema), - () -> assertEquals("org.schemata.schema.User", userSchema.name())); - } - - @Test - @DisplayName("Test User Fields metadata") - public void checkFields() { - assertAll("User Schema Fields Sanity Check", () -> assertNotNull(userSchema.fieldList()), - () -> assertTrue(userSchema.fieldList().size() > 1)); - var fieldList = userSchema.fieldList(); - assertEquals(6, fieldList.size()); - } - - @Test - @DisplayName("Test Downstream Subscribers List metadata") - public void checkDownstreamSubscribersList() { - assertAll("User Schema Downstream Subscribers Sanity Check", () -> assertNotNull(userSchema.downstreamSubscribersList()), - () -> assertTrue(userSchema.downstreamSubscribersList().size() > 1)); - var subscribersList = userSchema.downstreamSubscribersList(); - assertEquals(2, subscribersList.size()); - } - - @Test - @DisplayName("Test Upstream Subscribers List metadata") - public void checkUpstreamSubscribersList() { - assertAll("User Schema Upstream Subscribers Sanity Check", () -> assertNotNull(userSchema.upstreamSubscribersList()), - () -> assertTrue(userSchema.upstreamSubscribersList().size() > 1)); - var subscribersList = userSchema.upstreamSubscribersList(); - assertEquals(2, subscribersList.size()); - } - - @Test - public void checkConstraintsList() { - assertAll("User Schema Constraints Sanity Check", () -> assertNotNull(userSchema.constraintsList()), - () -> assertTrue(userSchema.constraintsList().size() > 1)); - var constraintsList = userSchema.constraintsList(); - assertEquals(2, constraintsList.size()); - } - - @Test - public void checkPrimitiveConstraints() { - var constraints = userSchema.constraintsList().stream().filter(v -> v.name().equals("age range")).toList(); - assertEquals(1, constraints.size()); - var constraint = constraints.get(0); - assertEquals(18.0, constraint.constraintMap().get("min_value").value()); - } - - @Test - public void checkListConstraints() { - var constraints = userSchema.constraintsList().stream().filter(v -> v.name().equals("Timezone Constraint")).toList(); - assertEquals(1, constraints.size()); - var constraint = constraints.get(0); - List expected = List.of("EST", "PST"); - assertEquals(expected, constraint.constraintMap().get("value_set").listValue()); - } -} diff --git a/src/test/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityCheckerTest.java b/src/test/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityCheckerTest.java deleted file mode 100644 index 789f7d8..0000000 --- a/src/test/java/org/schemata/provider/protobuf/ProtoSchemaCompatibilityCheckerTest.java +++ /dev/null @@ -1,21 +0,0 @@ -package org.schemata.provider.protobuf; - - -import org.junit.jupiter.api.Test; -import org.schemata.ResourceLoader; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; - -public class ProtoSchemaCompatibilityCheckerTest { - - - @Test - public void testCheck() { - var checker = new ProtoSchemaCompatibilityChecker(); - var result = checker.check(ResourceLoader.getDescriptorsPath(), ResourceLoader.getChangedDescriptorsPath()); - assertFalse(result.isCompatible()); - assertEquals(2, result.summary().size()); - assertEquals(1, result.summary().stream().filter(summary -> summary.fieldName().equals("name")).toList().size()); - } -} diff --git a/src/test/java/org/schemata/validate/FieldValidatorTest.java b/src/test/java/org/schemata/validate/FieldValidatorTest.java deleted file mode 100644 index 5c8dd1e..0000000 --- a/src/test/java/org/schemata/validate/FieldValidatorTest.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.schemata.validate; - -import org.junit.jupiter.api.Test; -import org.schemata.domain.Field; - -import static org.junit.jupiter.api.Assertions.assertEquals; - - -public class FieldValidatorTest { - - @Test - public void testWithEmptyDescriptor() { - Field.Builder builder = new Field.Builder("TestSchema", "TestField", "STRING", true); - builder.isClassified(true); - builder.classificationLevel("LEVEL3"); - var result = new FieldValidator().apply(builder.build()); - assertEquals(Status.ERROR, result.status()); - } - - @Test - public void testWithEmptyClassificationLevel() { - Field.Builder builder = new Field.Builder("TestSchema", "TestField", "STRING", true); - builder.isClassified(true); - builder.description("Field Description"); - var result = new FieldValidator().apply(builder.build()); - assertEquals(Status.ERROR, result.status()); - } - - @Test - public void testSuccessStatus() { - Field.Builder builder = new Field.Builder("TestSchema", "TestField", "STRING", true); - builder.isClassified(true); - builder.description("Field Description"); - builder.classificationLevel("LEVEL3"); - var result = new FieldValidator().apply(builder.build()); - assertEquals(Status.SUCCESS, result.status()); - } - - @Test - public void testSuccessStatusForNonClassifiedField() { - Field.Builder builder = new Field.Builder("TestSchema", "TestField", "STRING", true); - builder.description("Field Description"); - var result = new FieldValidator().apply(builder.build()); - assertEquals(Status.SUCCESS, result.status()); - } -} diff --git a/src/test/java/org/schemata/validate/RulesTest.java b/src/test/java/org/schemata/validate/RulesTest.java deleted file mode 100644 index b99a266..0000000 --- a/src/test/java/org/schemata/validate/RulesTest.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.schemata.validate; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; - - -public class RulesTest { - - @Test - public void testDescription() { - assertEquals("Schema domain metadata is null or empty", Rules.SCHEMA_DOMAIN_EMPTY.errorMessage); - } -} diff --git a/src/test/java/org/schemata/validate/SchemaValidatorTest.java b/src/test/java/org/schemata/validate/SchemaValidatorTest.java deleted file mode 100644 index a800075..0000000 --- a/src/test/java/org/schemata/validate/SchemaValidatorTest.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.schemata.validate; - -import java.util.List; -import org.junit.jupiter.api.Test; -import org.schemata.domain.Field; -import org.schemata.domain.Schema; - -import static org.junit.jupiter.api.Assertions.assertEquals; - - -public class SchemaValidatorTest { - - - @Test - public void testEntityWithValidPrimaryKey() { - Field.Builder fieldBuilder = new Field.Builder("SchemaName", "FieldName", "STRING", true); - fieldBuilder.primaryKey(true); - Schema.Builder builder = new Schema.Builder("SchemaName", List.of(fieldBuilder.build())); - builder.description("Schema Description"); - builder.owner("Growth"); - builder.domain("Core"); - builder.schemaType("ENTITY"); - assertEquals(Status.SUCCESS, new SchemaValidator().apply(builder.build()).status()); - } - - @Test - public void testValidEvent() { - Field.Builder fieldBuilder = new Field.Builder("SchemaName", "FieldName", "STRING", true); - Schema.Builder builder = new Schema.Builder("SchemaName", List.of(fieldBuilder.build())); - builder.description("Schema Description"); - builder.owner("Growth"); - builder.domain("Core"); - builder.schemaType("EVENT"); - assertEquals(Status.SUCCESS, new SchemaValidator().apply(builder.build()).status()); - } - - @Test - public void testWithEmptyDescriptor() { - Field.Builder fieldBuilder = new Field.Builder("SchemaName", "FieldName", "STRING", true); - Schema.Builder builder = new Schema.Builder("SchemaName", List.of(fieldBuilder.build())); - builder.owner("Growth"); - builder.domain("Core"); - builder.schemaType("EVENT"); - assertEquals(Status.ERROR, new SchemaValidator().apply(builder.build()).status()); - } - - @Test - public void testWithEmptyOwner() { - Field.Builder fieldBuilder = new Field.Builder("SchemaName", "FieldName", "STRING", true); - Schema.Builder builder = new Schema.Builder("SchemaName", List.of(fieldBuilder.build())); - builder.description("Schema Description"); - builder.domain("Core"); - builder.schemaType("EVENT"); - assertEquals(Status.ERROR, new SchemaValidator().apply(builder.build()).status()); - } - - @Test - public void testWithEmptyDomain() { - Field.Builder fieldBuilder = new Field.Builder("SchemaName", "FieldName", "STRING", true); - Schema.Builder builder = new Schema.Builder("SchemaName", List.of(fieldBuilder.build())); - builder.description("Schema Description"); - builder.owner("Growth"); - builder.schemaType("EVENT"); - assertEquals(Status.ERROR, new SchemaValidator().apply(builder.build()).status()); - } -} diff --git a/src/test/resources/avro_schema/brand.avsc b/src/test/resources/avro_schema/brand.avsc deleted file mode 100644 index 70db88f..0000000 --- a/src/test/resources/avro_schema/brand.avsc +++ /dev/null @@ -1,35 +0,0 @@ -{ - "namespace": "org.schemata.schema", - "type": "record", - "name": "Brand", - "desc": "This is the description of the Brand table", - "comment": "The comment added after thought", - "see_also": "db.brand MySQL table", - "owner": "Platform", - "domain": "Core", - "schema_type": "ENTITY", - "team_channel": "#team-platform", - "alert_channel": "#alerts-platform", - "fields": [ - { - "name": "id", - "type": "int", - "is_primary_key": "true", - "desc": "Unique identifier for Brand" - }, - { - "name": "name", - "type": [ - "string", - "null" - ], - "desc": "Name of the Brand" - }, - { - "name": "is_active", - "type": "boolean", - "desc": "define the active status of the Brand. `true` == active; `false` = inactive`", - "comment": "should refactor to non-binary status" - } - ] -} diff --git a/src/test/resources/dbt/catalog.json b/src/test/resources/dbt/catalog.json deleted file mode 100644 index f300443..0000000 --- a/src/test/resources/dbt/catalog.json +++ /dev/null @@ -1,470 +0,0 @@ -{ - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", - "dbt_version": "1.0.5", - "generated_at": "2022-04-29T04:29:35.188250Z", - "invocation_id": "6f5bd62d-50bb-4d99-b92b-933c06fe7759", - "env": {} - }, - "nodes": { - "model.dbtlearn.dim_listings_cleansed": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "DIM_LISTINGS_CLEANSED", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "LISTING_NAME": { - "type": "TEXT", - "index": 2, - "name": "LISTING_NAME", - "comment": null - }, - "ROOM_TYPE": { - "type": "TEXT", - "index": 3, - "name": "ROOM_TYPE", - "comment": null - }, - "MINIMUM_NIGHTS": { - "type": "NUMBER", - "index": 4, - "name": "MINIMUM_NIGHTS", - "comment": null - }, - "HOST_ID": { - "type": "NUMBER", - "index": 5, - "name": "HOST_ID", - "comment": null - }, - "PRICE": { - "type": "NUMBER", - "index": 6, - "name": "PRICE", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 7, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 8, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.dim_listings_cleansed" - }, - "model.dbtlearn.src_hosts": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "SRC_HOSTS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "HOST_ID": { - "type": "NUMBER", - "index": 1, - "name": "HOST_ID", - "comment": null - }, - "HOST_NAME": { - "type": "TEXT", - "index": 2, - "name": "HOST_NAME", - "comment": null - }, - "IS_SUPERHOST": { - "type": "TEXT", - "index": 3, - "name": "IS_SUPERHOST", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 4, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 5, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.src_hosts" - }, - "model.dbtlearn.src_reviews": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "SRC_REVIEWS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "REVIEW_DATE": { - "type": "TIMESTAMP_NTZ", - "index": 2, - "name": "REVIEW_DATE", - "comment": null - }, - "REVIEWER_NAME": { - "type": "TEXT", - "index": 3, - "name": "REVIEWER_NAME", - "comment": null - }, - "REVIEW_TEXT": { - "type": "TEXT", - "index": 4, - "name": "REVIEW_TEXT", - "comment": null - }, - "REVIEW_SENTIMENT": { - "type": "TEXT", - "index": 5, - "name": "REVIEW_SENTIMENT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.src_reviews" - }, - "model.dbtlearn.dim_hosts_cleansed": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "DIM_HOSTS_CLEANSED", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "HOST_ID": { - "type": "NUMBER", - "index": 1, - "name": "HOST_ID", - "comment": null - }, - "HOST_NAME": { - "type": "TEXT", - "index": 2, - "name": "HOST_NAME", - "comment": null - }, - "IS_SUPERHOST": { - "type": "TEXT", - "index": 3, - "name": "IS_SUPERHOST", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 4, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 5, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.dim_hosts_cleansed" - }, - "model.dbtlearn.dim_listings_w_hosts": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "DIM_LISTINGS_W_HOSTS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "LISTING_NAME": { - "type": "TEXT", - "index": 2, - "name": "LISTING_NAME", - "comment": null - }, - "ROOM_TYPE": { - "type": "TEXT", - "index": 3, - "name": "ROOM_TYPE", - "comment": null - }, - "MINIMUM_NIGHTS": { - "type": "NUMBER", - "index": 4, - "name": "MINIMUM_NIGHTS", - "comment": null - }, - "PRICE": { - "type": "NUMBER", - "index": 5, - "name": "PRICE", - "comment": null - }, - "HOST_ID": { - "type": "NUMBER", - "index": 6, - "name": "HOST_ID", - "comment": null - }, - "HOST_NAME": { - "type": "TEXT", - "index": 7, - "name": "HOST_NAME", - "comment": null - }, - "HOST_IS_SUPERHOST": { - "type": "TEXT", - "index": 8, - "name": "HOST_IS_SUPERHOST", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 9, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 10, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.dim_listings_w_hosts" - }, - "model.dbtlearn.fct_reviews": { - "metadata": { - "type": "BASE TABLE", - "schema": "DEV", - "name": "FCT_REVIEWS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "REVIEW_DATE": { - "type": "TIMESTAMP_NTZ", - "index": 2, - "name": "REVIEW_DATE", - "comment": null - }, - "REVIEWER_NAME": { - "type": "TEXT", - "index": 3, - "name": "REVIEWER_NAME", - "comment": null - }, - "REVIEW_TEXT": { - "type": "TEXT", - "index": 4, - "name": "REVIEW_TEXT", - "comment": null - }, - "REVIEW_SENTIMENT": { - "type": "TEXT", - "index": 5, - "name": "REVIEW_SENTIMENT", - "comment": null - } - }, - "stats": { - "bytes": { - "id": "bytes", - "label": "Approximate Size", - "value": 42548736.0, - "include": true, - "description": "Approximate size of the table as reported by Snowflake" - }, - "row_count": { - "id": "row_count", - "label": "Row Count", - "value": 409697.0, - "include": true, - "description": "An approximate count of rows in this table" - }, - "last_modified": { - "id": "last_modified", - "label": "Last Modified", - "value": "2022-04-05 20:47UTC", - "include": true, - "description": "The timestamp for last update/change" - }, - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": true, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.fct_reviews" - }, - "model.dbtlearn.src_listings": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "SRC_LISTINGS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "LISTING_NAME": { - "type": "TEXT", - "index": 2, - "name": "LISTING_NAME", - "comment": null - }, - "LISTING_URL": { - "type": "TEXT", - "index": 3, - "name": "LISTING_URL", - "comment": null - }, - "ROOM_TYPE": { - "type": "TEXT", - "index": 4, - "name": "ROOM_TYPE", - "comment": null - }, - "MINIMUM_NIGHTS": { - "type": "NUMBER", - "index": 5, - "name": "MINIMUM_NIGHTS", - "comment": null - }, - "HOST_ID": { - "type": "NUMBER", - "index": 6, - "name": "HOST_ID", - "comment": null - }, - "PRICE_STR": { - "type": "TEXT", - "index": 7, - "name": "PRICE_STR", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 8, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 9, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.src_listings" - } - }, - "sources": {}, - "errors": null -} \ No newline at end of file diff --git a/src/test/resources/dbt/manifest.json b/src/test/resources/dbt/manifest.json deleted file mode 100644 index 93d1025..0000000 --- a/src/test/resources/dbt/manifest.json +++ /dev/null @@ -1,4889 +0,0 @@ -{ - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v4.json", - "dbt_version": "1.0.5", - "generated_at": "2022-04-29T04:29:32.010163Z", - "invocation_id": "6f5bd62d-50bb-4d99-b92b-933c06fe7759", - "env": {}, - "project_id": "d996d79890e8636568af20a2a3912013", - "user_id": "36732dd7-1654-4270-920f-689678c03fe2", - "send_anonymous_usage_stats": true, - "adapter_type": "snowflake" - }, - "nodes": { - "model.dbtlearn.dim_listings_w_hosts": { - "raw_sql": "WITH l AS (\n SELECT *\n FROM {{ ref('dim_listings_cleansed') }}\n),\nh AS (\nSELECT *\nFROM {{ ref('dim_hosts_cleansed') }}\n )\nSELECT l.listing_id,\n l.listing_name,\n l.room_type,\n l.minimum_nights,\n l.price,\n l.host_id,\n h.host_name,\n h.is_superhost as host_is_superhost,\n l.created_at,\n GREATEST(l.updated_at, h.updated_at) as updated_at\nFROM l\n LEFT JOIN h ON (h.host_id = l.host_id)", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [ - "model.dbtlearn.dim_listings_cleansed", - "model.dbtlearn.dim_hosts_cleansed" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "dim", - "dim_listings_w_hosts" - ], - "unique_id": "model.dbtlearn.dim_listings_w_hosts", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "dim/dim_listings_w_hosts.sql", - "original_file_path": "models/dim/dim_listings_w_hosts.sql", - "name": "dim_listings_w_hosts", - "alias": "dim_listings_w_hosts", - "checksum": { - "name": "sha256", - "checksum": "243e0cabf45a05a6b0577d10399f5a023e43d3cf80398781e5e08b308cf832e9" - }, - "tags": [], - "refs": [ - [ - "dim_listings_cleansed" - ], - [ - "dim_hosts_cleansed" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/dim/dim_listings_w_hosts.sql", - "build_path": null, - "deferred": false, - "unrendered_config": {}, - "created_at": 1651204507.097216, - "compiled_sql": "WITH l AS (\n SELECT *\n FROM airbnb.dev.dim_listings_cleansed\n),\nh AS (\nSELECT *\nFROM airbnb.dev.dim_hosts_cleansed\n )\nSELECT l.listing_id,\n l.listing_name,\n l.room_type,\n l.minimum_nights,\n l.price,\n l.host_id,\n h.host_name,\n h.is_superhost as host_is_superhost,\n l.created_at,\n GREATEST(l.updated_at, h.updated_at) as updated_at\nFROM l\n LEFT JOIN h ON (h.host_id = l.host_id)", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.dim_listings_w_hosts" - }, - "model.dbtlearn.dim_listings_cleansed": { - "raw_sql": "WITH src_listings AS (\n SELECT *\n FROM {{ ref('src_listings') }}\n)\nSELECT listing_id,\n listing_name,\n room_type,\n CASE\n WHEN minimum_nights = 0 THEN 1\n ELSE minimum_nights\n END AS minimum_nights,\n host_id,\n REPLACE(\n price_str,\n '$'\n ) :: NUMBER(10,\n 2) AS price,\n created_at,\n updated_at\nFROM src_listings", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [ - "model.dbtlearn.src_listings" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "dim", - "dim_listings_cleansed" - ], - "unique_id": "model.dbtlearn.dim_listings_cleansed", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "dim/dim_listings_cleansed.sql", - "original_file_path": "models/dim/dim_listings_cleansed.sql", - "name": "dim_listings_cleansed", - "alias": "dim_listings_cleansed", - "checksum": { - "name": "sha256", - "checksum": "004260ee497b05f21c7efd290cdf2ec5cfa8cf840a5a4b7cd2d0b7e168b7b207" - }, - "tags": [], - "refs": [ - [ - "src_listings" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/dim/dim_listings_cleansed.sql", - "build_path": null, - "deferred": false, - "unrendered_config": {}, - "created_at": 1651204507.1046379, - "compiled_sql": "WITH src_listings AS (\n SELECT *\n FROM airbnb.dev.src_listings\n)\nSELECT listing_id,\n listing_name,\n room_type,\n CASE\n WHEN minimum_nights = 0 THEN 1\n ELSE minimum_nights\n END AS minimum_nights,\n host_id,\n REPLACE(\n price_str,\n '$'\n ) :: NUMBER(10,\n 2) AS price,\n created_at,\n updated_at\nFROM src_listings", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.dim_listings_cleansed" - }, - "model.dbtlearn.dim_hosts_cleansed": { - "raw_sql": "{{ config(\n materialized = 'view'\n) }}\nWITH src_hosts AS (\n SELECT *\n FROM {{ ref('src_hosts') }}\n)\nSELECT host_id,\n NVL(\n host_name,\n 'Anonymous'\n ) AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM src_hosts", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [ - "model.dbtlearn.src_hosts" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "dim", - "dim_hosts_cleansed" - ], - "unique_id": "model.dbtlearn.dim_hosts_cleansed", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "dim/dim_hosts_cleansed.sql", - "original_file_path": "models/dim/dim_hosts_cleansed.sql", - "name": "dim_hosts_cleansed", - "alias": "dim_hosts_cleansed", - "checksum": { - "name": "sha256", - "checksum": "24b3ba82411e376956843de60351cc0711311ae4623e44da3f38e872ef979615" - }, - "tags": [], - "refs": [ - [ - "src_hosts" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/dim/dim_hosts_cleansed.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view" - }, - "created_at": 1651204507.10578, - "compiled_sql": "\nWITH src_hosts AS (\n SELECT *\n FROM airbnb.dev.src_hosts\n)\nSELECT host_id,\n NVL(\n host_name,\n 'Anonymous'\n ) AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM src_hosts", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.dim_hosts_cleansed" - }, - "model.dbtlearn.fct_reviews": { - "raw_sql": "{{ config(\n materialized = 'incremental',\n on_schema_change='fail'\n )\n}}\nWITH src_reviews AS (\n SELECT *\n FROM {{ ref('src_reviews') }}\n)\nSELECT *\nFROM src_reviews\nWHERE review_text is not null {% if is_incremental() %}\n AND review_date > (select max(review_date) from {{ this }})\n{% endif %}", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [ - "macro.dbt.is_incremental" - ], - "nodes": [ - "model.dbtlearn.src_reviews" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "incremental", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "fail", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "fct", - "fct_reviews" - ], - "unique_id": "model.dbtlearn.fct_reviews", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "fct/fct_reviews.sql", - "original_file_path": "models/fct/fct_reviews.sql", - "name": "fct_reviews", - "alias": "fct_reviews", - "checksum": { - "name": "sha256", - "checksum": "36107c27a2f78c29d2262ef7fd2aec2329e075380fc5af2578c592bfc78ab851" - }, - "tags": [], - "refs": [ - [ - "src_reviews" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/fct/fct_reviews.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "incremental", - "on_schema_change": "fail" - }, - "created_at": 1651204507.107425, - "compiled_sql": "\nWITH src_reviews AS (\n SELECT *\n FROM airbnb.dev.src_reviews\n)\nSELECT *\nFROM src_reviews\nWHERE review_text is not null \n AND review_date > (select max(review_date) from airbnb.dev.fct_reviews)\n", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.fct_reviews" - }, - "model.dbtlearn.src_listings": { - "raw_sql": "WITH raw_listings AS (SELECT *\n FROM AIRBNB.RAW.RAW_LISTINGS\n)\nSELECT id AS listing_id,\n name AS listing_name,\n listing_url,\n room_type,\n minimum_nights,\n host_id,\n price AS price_str,\n created_at,\n updated_at\nFROM raw_listings", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "src", - "src_listings" - ], - "unique_id": "model.dbtlearn.src_listings", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "src/src_listings.sql", - "original_file_path": "models/src/src_listings.sql", - "name": "src_listings", - "alias": "src_listings", - "checksum": { - "name": "sha256", - "checksum": "373867ec1c842c5c2a1841399091a568c1f8f591f80649cb06ace15fff577eb3" - }, - "tags": [], - "refs": [], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/src/src_listings.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view" - }, - "created_at": 1651204507.1133409, - "compiled_sql": "WITH raw_listings AS (SELECT *\n FROM AIRBNB.RAW.RAW_LISTINGS\n)\nSELECT id AS listing_id,\n name AS listing_name,\n listing_url,\n room_type,\n minimum_nights,\n host_id,\n price AS price_str,\n created_at,\n updated_at\nFROM raw_listings", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.src_listings" - }, - "model.dbtlearn.src_hosts": { - "raw_sql": "WITH raw_hosts AS (SELECT *\n FROM AIRBNB.RAW.RAW_HOSTS\n)\nSELECT id AS host_id,\n NAME AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM raw_hosts", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "src", - "src_hosts" - ], - "unique_id": "model.dbtlearn.src_hosts", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "src/src_hosts.sql", - "original_file_path": "models/src/src_hosts.sql", - "name": "src_hosts", - "alias": "src_hosts", - "checksum": { - "name": "sha256", - "checksum": "032dd8da0ed7461628dcac6681d6880d923af89d3802606039ad20240821fe7d" - }, - "tags": [], - "refs": [], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/src/src_hosts.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view" - }, - "created_at": 1651204507.1144252, - "compiled_sql": "WITH raw_hosts AS (SELECT *\n FROM AIRBNB.RAW.RAW_HOSTS\n)\nSELECT id AS host_id,\n NAME AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM raw_hosts", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.src_hosts" - }, - "model.dbtlearn.src_reviews": { - "raw_sql": "{{ config(\n unique_key=\"column_name\"\n) }}\n\nWITH raw_reviews AS (SELECT *\n FROM AIRBNB.RAW.RAW_REVIEWS\n)\nSELECT listing_id,\n date AS review_date,\n reviewer_name,\n comments AS review_text,\n sentiment AS review_sentiment\nFROM raw_reviews", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "unique_key": "column_name", - "domain": "core", - "model_type": "dimension", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "src", - "src_reviews" - ], - "unique_id": "model.dbtlearn.src_reviews", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "src/src_reviews.sql", - "original_file_path": "models/src/src_reviews.sql", - "name": "src_reviews", - "alias": "src_reviews", - "checksum": { - "name": "sha256", - "checksum": "ca47823e4b83506e5e1e9f28b8316386479dd25f69f274bf2b699e71bb9ee307" - }, - "tags": [], - "refs": [], - "sources": [], - "description": "This is a comment for src_reviews", - "columns": { - "listing_id": { - "name": "listing_id", - "description": "This is listing id name", - "meta": { - "is_primary_key": true, - "link": { - "model": "src_listings", - "column": "id" - }, - "depends": [ - { - "model": "listings", - "column": "id" - } - ] - }, - "data_type": "int", - "quote": null, - "tags": [] - }, - "review_date": { - "name": "review_date", - "description": "review date", - "meta": {}, - "data_type": "timestamp", - "quote": null, - "tags": [] - }, - "reviewer_name": { - "name": "reviewer_name", - "description": "some reviewer name", - "meta": {}, - "data_type": "text", - "quote": null, - "tags": [] - }, - "review_text": { - "name": "review_text", - "description": "some review text", - "meta": {}, - "data_type": "text", - "quote": null, - "tags": [] - }, - "review_sentiment": { - "name": "review_sentiment", - "description": "some review sentiment", - "meta": {}, - "data_type": "text", - "quote": null, - "tags": [] - } - }, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": "dbtlearn://models/src/src_reviews.yml", - "compiled_path": "target/compiled/dbtlearn/models/src/src_reviews.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view", - "unique_key": "column_name" - }, - "created_at": 1651206572.0454872, - "compiled_sql": "\n\nWITH raw_reviews AS (SELECT *\n FROM AIRBNB.RAW.RAW_REVIEWS\n)\nSELECT listing_id,\n date AS review_date,\n reviewer_name,\n comments AS review_text,\n sentiment AS review_sentiment\nFROM raw_reviews", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.src_reviews" - } - }, - "sources": {}, - "macros": { - "macro.dbt_snowflake.snowflake__get_catalog": { - "unique_id": "macro.dbt_snowflake.snowflake__get_catalog", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/catalog.sql", - "original_file_path": "macros/catalog.sql", - "name": "snowflake__get_catalog", - "macro_sql": "{% macro snowflake__get_catalog(information_schema, schemas) -%}\n {% set query %}\n with tables as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n table_type as \"table_type\",\n comment as \"table_comment\",\n\n -- note: this is the _role_ that owns the table\n table_owner as \"table_owner\",\n\n 'Clustering Key' as \"stats:clustering_key:label\",\n clustering_key as \"stats:clustering_key:value\",\n 'The key used to cluster this table' as \"stats:clustering_key:description\",\n (clustering_key is not null) as \"stats:clustering_key:include\",\n\n 'Row Count' as \"stats:row_count:label\",\n row_count as \"stats:row_count:value\",\n 'An approximate count of rows in this table' as \"stats:row_count:description\",\n (row_count is not null) as \"stats:row_count:include\",\n\n 'Approximate Size' as \"stats:bytes:label\",\n bytes as \"stats:bytes:value\",\n 'Approximate size of the table as reported by Snowflake' as \"stats:bytes:description\",\n (bytes is not null) as \"stats:bytes:include\",\n\n 'Last Modified' as \"stats:last_modified:label\",\n to_varchar(convert_timezone('UTC', last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as \"stats:last_modified:value\",\n 'The timestamp for last update/change' as \"stats:last_modified:description\",\n (last_altered is not null and table_type='BASE TABLE') as \"stats:last_modified:include\"\n\n from {{ information_schema }}.tables\n\n ),\n\n columns as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n\n column_name as \"column_name\",\n ordinal_position as \"column_index\",\n data_type as \"column_type\",\n comment as \"column_comment\"\n\n from {{ information_schema }}.columns\n )\n\n select *\n from tables\n join columns using (\"table_database\", \"table_schema\", \"table_name\")\n where (\n {%- for schema in schemas -%}\n upper(\"table_schema\") = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n order by \"column_index\"\n {%- endset -%}\n\n {{ return(run_query(query)) }}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.854675 - }, - "macro.dbt_snowflake.snowflake__create_table_as": { - "unique_id": "macro.dbt_snowflake.snowflake__create_table_as", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__create_table_as", - "macro_sql": "{% macro snowflake__create_table_as(temporary, relation, sql) -%}\n {%- set transient = config.get('transient', default=true) -%}\n {%- set cluster_by_keys = config.get('cluster_by', default=none) -%}\n {%- set enable_automatic_clustering = config.get('automatic_clustering', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n\n {%- if cluster_by_keys is not none and cluster_by_keys is string -%}\n {%- set cluster_by_keys = [cluster_by_keys] -%}\n {%- endif -%}\n {%- if cluster_by_keys is not none -%}\n {%- set cluster_by_string = cluster_by_keys|join(\", \")-%}\n {% else %}\n {%- set cluster_by_string = none -%}\n {%- endif -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create or replace {% if temporary -%}\n temporary\n {%- elif transient -%}\n transient\n {%- endif %} table {{ relation }} {% if copy_grants and not temporary -%} copy grants {%- endif %} as\n (\n {%- if cluster_by_string is not none -%}\n select * from(\n {{ sql }}\n ) order by ({{ cluster_by_string }})\n {%- else -%}\n {{ sql }}\n {%- endif %}\n );\n {% if cluster_by_string is not none and not temporary -%}\n alter table {{relation}} cluster by ({{cluster_by_string}});\n {%- endif -%}\n {% if enable_automatic_clustering and cluster_by_string is not none and not temporary -%}\n alter table {{relation}} resume recluster;\n {%- endif -%}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.869168 - }, - "macro.dbt_snowflake.get_column_comment_sql": { - "unique_id": "macro.dbt_snowflake.get_column_comment_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "get_column_comment_sql", - "macro_sql": "{% macro get_column_comment_sql(column_name, column_dict) %}\n {{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} COMMENT $${{ column_dict[column_name]['description'] | replace('$', '[$]') }}$$\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8695621 - }, - "macro.dbt_snowflake.get_persist_docs_column_list": { - "unique_id": "macro.dbt_snowflake.get_persist_docs_column_list", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "get_persist_docs_column_list", - "macro_sql": "{% macro get_persist_docs_column_list(model_columns, query_columns) %}\n(\n {% for column_name in query_columns %}\n {% if (column_name|upper in model_columns) or (column_name in model_columns) %}\n {{ get_column_comment_sql(column_name, model_columns) }}\n {% else %}\n {{column_name}}\n {% endif %}\n {{ \", \" if not loop.last else \"\" }}\n {% endfor %}\n)\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.get_column_comment_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.870075 - }, - "macro.dbt_snowflake.snowflake__create_view_as": { - "unique_id": "macro.dbt_snowflake.snowflake__create_view_as", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__create_view_as", - "macro_sql": "{% macro snowflake__create_view_as(relation, sql) -%}\n {%- set secure = config.get('secure', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create or replace {% if secure -%}\n secure\n {%- endif %} view {{ relation }} \n {% if config.persist_column_docs() -%}\n {% set model_columns = model.columns %}\n {% set query_columns = get_columns_in_query(sql) %}\n {{ get_persist_docs_column_list(model_columns, query_columns) }}\n \n {%- endif %}\n {% if copy_grants -%} copy grants {%- endif %} as (\n {{ sql }}\n );\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_columns_in_query", - "macro.dbt_snowflake.get_persist_docs_column_list" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.870923 - }, - "macro.dbt_snowflake.snowflake__get_columns_in_relation": { - "unique_id": "macro.dbt_snowflake.snowflake__get_columns_in_relation", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__get_columns_in_relation", - "macro_sql": "{% macro snowflake__get_columns_in_relation(relation) -%}\n {%- set sql -%}\n describe table {{ relation }}\n {%- endset -%}\n {%- set result = run_query(sql) -%}\n\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many columns in relation {{ relation }}! dbt can only get\n information about relations with fewer than {{ maximum }} columns.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n\n {% set columns = [] %}\n {% for row in result %}\n {% do columns.append(api.Column.from_description(row['name'], row['type'])) %}\n {% endfor %}\n {% do return(columns) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.871804 - }, - "macro.dbt_snowflake.snowflake__list_schemas": { - "unique_id": "macro.dbt_snowflake.snowflake__list_schemas", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__list_schemas", - "macro_sql": "{% macro snowflake__list_schemas(database) -%}\n {# 10k limit from here: https://docs.snowflake.net/manuals/sql-reference/sql/show-schemas.html#usage-notes #}\n {% set maximum = 10000 %}\n {% set sql -%}\n show terse schemas in database {{ database }}\n limit {{ maximum }}\n {%- endset %}\n {% set result = run_query(sql) %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in database {{ database }}! dbt can only get\n information about databases with fewer than {{ maximum }} schemas.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {{ return(result) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.872472 - }, - "macro.dbt_snowflake.snowflake__list_relations_without_caching": { - "unique_id": "macro.dbt_snowflake.snowflake__list_relations_without_caching", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__list_relations_without_caching", - "macro_sql": "{% macro snowflake__list_relations_without_caching(schema_relation) %}\n {%- set sql -%}\n show terse objects in {{ schema_relation }}\n {%- endset -%}\n\n {%- set result = run_query(sql) -%}\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in schema {{ schema_relation }}! dbt can only get\n information about schemas with fewer than {{ maximum }} objects.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {%- do return(result) -%}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873039 - }, - "macro.dbt_snowflake.snowflake__check_schema_exists": { - "unique_id": "macro.dbt_snowflake.snowflake__check_schema_exists", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__check_schema_exists", - "macro_sql": "{% macro snowflake__check_schema_exists(information_schema, schema) -%}\n {% call statement('check_schema_exists', fetch_result=True) -%}\n select count(*)\n from {{ information_schema }}.schemata\n where upper(schema_name) = upper('{{ schema }}')\n and upper(catalog_name) = upper('{{ information_schema.database }}')\n {%- endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873407 - }, - "macro.dbt_snowflake.snowflake__current_timestamp": { - "unique_id": "macro.dbt_snowflake.snowflake__current_timestamp", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__current_timestamp", - "macro_sql": "{% macro snowflake__current_timestamp() -%}\n convert_timezone('UTC', current_timestamp())\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873486 - }, - "macro.dbt_snowflake.snowflake__snapshot_string_as_time": { - "unique_id": "macro.dbt_snowflake.snowflake__snapshot_string_as_time", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__snapshot_string_as_time", - "macro_sql": "{% macro snowflake__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"to_timestamp_ntz('\" ~ timestamp ~ \"')\" -%}\n {{ return(result) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873683 - }, - "macro.dbt_snowflake.snowflake__snapshot_get_time": { - "unique_id": "macro.dbt_snowflake.snowflake__snapshot_get_time", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__snapshot_get_time", - "macro_sql": "{% macro snowflake__snapshot_get_time() -%}\n to_timestamp_ntz({{ current_timestamp() }})\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873798 - }, - "macro.dbt_snowflake.snowflake__rename_relation": { - "unique_id": "macro.dbt_snowflake.snowflake__rename_relation", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__rename_relation", - "macro_sql": "{% macro snowflake__rename_relation(from_relation, to_relation) -%}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ to_relation }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.874016 - }, - "macro.dbt_snowflake.snowflake__alter_column_type": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_column_type", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_column_type", - "macro_sql": "{% macro snowflake__alter_column_type(relation, column_name, new_column_type) -%}\n {% call statement('alter_column_type') %}\n alter table {{ relation }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }};\n {% endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.874296 - }, - "macro.dbt_snowflake.snowflake__alter_relation_comment": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_comment", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_relation_comment", - "macro_sql": "{% macro snowflake__alter_relation_comment(relation, relation_comment) -%}\n comment on {{ relation.type }} {{ relation }} IS $${{ relation_comment | replace('$', '[$]') }}$$;\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8745751 - }, - "macro.dbt_snowflake.snowflake__alter_column_comment": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_column_comment", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_column_comment", - "macro_sql": "{% macro snowflake__alter_column_comment(relation, column_dict) -%}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n alter {{ relation.type }} {{ relation }} alter\n {% for column_name in column_dict if (column_name in existing_columns) or (column_name|upper in existing_columns) %}\n {{ get_column_comment_sql(column_name, column_dict) }} {{ ',' if not loop.last else ';' }}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.get_column_comment_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.875154 - }, - "macro.dbt_snowflake.get_current_query_tag": { - "unique_id": "macro.dbt_snowflake.get_current_query_tag", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "get_current_query_tag", - "macro_sql": "{% macro get_current_query_tag() -%}\n {{ return(run_query(\"show parameters like 'query_tag' in session\").rows[0]['value']) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.875356 - }, - "macro.dbt_snowflake.set_query_tag": { - "unique_id": "macro.dbt_snowflake.set_query_tag", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "set_query_tag", - "macro_sql": "{% macro set_query_tag() -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% set original_query_tag = get_current_query_tag() %}\n {{ log(\"Setting query_tag to '\" ~ new_query_tag ~ \"'. Will reset to '\" ~ original_query_tag ~ \"' after materialization.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(new_query_tag)) %}\n {{ return(original_query_tag)}}\n {% endif %}\n {{ return(none)}}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.get_current_query_tag", - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8758762 - }, - "macro.dbt_snowflake.unset_query_tag": { - "unique_id": "macro.dbt_snowflake.unset_query_tag", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "unset_query_tag", - "macro_sql": "{% macro unset_query_tag(original_query_tag) -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% if original_query_tag %}\n {{ log(\"Resetting query_tag to '\" ~ original_query_tag ~ \"'.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(original_query_tag)) %}\n {% else %}\n {{ log(\"No original query_tag, unsetting parameter.\") }}\n {% do run_query(\"alter session unset query_tag\") %}\n {% endif %}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.876409 - }, - "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_relation_add_remove_columns", - "macro_sql": "{% macro snowflake__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns %}\n \n {% set sql -%}\n alter {{ relation.type }} {{ relation }} add column\n {% for column in add_columns %}\n {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n {% endif %}\n\n {% if remove_columns %}\n \n {% set sql -%}\n alter {{ relation.type }} {{ relation }} drop column\n {% for column in remove_columns %}\n {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n {%- endset -%}\n \n {% do run_query(sql) %}\n \n {% endif %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.877253 - }, - "macro.dbt_snowflake.snowflake_dml_explicit_transaction": { - "unique_id": "macro.dbt_snowflake.snowflake_dml_explicit_transaction", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake_dml_explicit_transaction", - "macro_sql": "{% macro snowflake_dml_explicit_transaction(dml) %}\n {#\n Use this macro to wrap all INSERT, MERGE, UPDATE, DELETE, and TRUNCATE \n statements before passing them into run_query(), or calling in the 'main' statement\n of a materialization\n #}\n {% set dml_transaction -%}\n begin;\n {{ dml }};\n commit;\n {%- endset %}\n \n {% do return(dml_transaction) %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.877481 - }, - "macro.dbt_snowflake.snowflake__truncate_relation": { - "unique_id": "macro.dbt_snowflake.snowflake__truncate_relation", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__truncate_relation", - "macro_sql": "{% macro snowflake__truncate_relation(relation) -%}\n {% set truncate_dml %}\n truncate table {{ relation }}\n {% endset %}\n {% call statement('truncate_relation') -%}\n {{ snowflake_dml_explicit_transaction(truncate_dml) }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.877745 - }, - "macro.dbt_snowflake.snowflake__get_merge_sql": { - "unique_id": "macro.dbt_snowflake.snowflake__get_merge_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/merge.sql", - "original_file_path": "macros/materializations/merge.sql", - "name": "snowflake__get_merge_sql", - "macro_sql": "{% macro snowflake__get_merge_sql(target, source_sql, unique_key, dest_columns, predicates) -%}\n\n {#\n Workaround for Snowflake not being happy with a merge on a constant-false predicate.\n When no unique_key is provided, this macro will do a regular insert. If a unique_key\n is provided, then this macro will do a proper merge instead.\n #}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute='name')) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {%- set dml -%}\n {%- if unique_key is none -%}\n\n {{ sql_header if sql_header is not none }}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source_sql }}\n )\n\n {%- else -%}\n\n {{ default__get_merge_sql(target, source_sql, unique_key, dest_columns, predicates) }}\n\n {%- endif -%}\n {%- endset -%}\n \n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv", - "macro.dbt.default__get_merge_sql", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.879235 - }, - "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql": { - "unique_id": "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/merge.sql", - "original_file_path": "macros/materializations/merge.sql", - "name": "snowflake__get_delete_insert_merge_sql", - "macro_sql": "{% macro snowflake__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) %}\n {% set dml = default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) %}\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_delete_insert_merge_sql", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.879544 - }, - "macro.dbt_snowflake.snowflake__snapshot_merge_sql": { - "unique_id": "macro.dbt_snowflake.snowflake__snapshot_merge_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/merge.sql", - "original_file_path": "macros/materializations/merge.sql", - "name": "snowflake__snapshot_merge_sql", - "macro_sql": "{% macro snowflake__snapshot_merge_sql(target, source, insert_cols) %}\n {% set dml = default__snapshot_merge_sql(target, source, insert_cols) %}\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__snapshot_merge_sql", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.879824 - }, - "macro.dbt_snowflake.snowflake__load_csv_rows": { - "unique_id": "macro.dbt_snowflake.snowflake__load_csv_rows", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/seed.sql", - "original_file_path": "macros/materializations/seed.sql", - "name": "snowflake__load_csv_rows", - "macro_sql": "{% macro snowflake__load_csv_rows(model, agate_table) %}\n {% set batch_size = get_batch_size() %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query('BEGIN', auto_begin=False) %}\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n {% do adapter.add_query('COMMIT', auto_begin=False) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_batch_size", - "macro.dbt.get_seed_column_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.882269 - }, - "macro.dbt_snowflake.materialization_seed_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_seed_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/seed.sql", - "original_file_path": "macros/materializations/seed.sql", - "name": "materialization_seed_snowflake", - "macro_sql": "{% materialization seed, adapter='snowflake' %}\n {% set original_query_tag = set_query_tag() %}\n\n {% set relations = materialization_seed_default() %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return(relations) }}\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.materialization_seed_default", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8825972 - }, - "macro.dbt_snowflake.materialization_view_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_view_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/view.sql", - "original_file_path": "macros/materializations/view.sql", - "name": "materialization_view_snowflake", - "macro_sql": "{% materialization view, adapter='snowflake' -%}\n\n {% set original_query_tag = set_query_tag() %}\n {% set to_return = create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n {% do persist_docs(target_relation, model, for_columns=false) %}\n\n {% do return(to_return) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n{%- endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.create_or_replace_view", - "macro.dbt.persist_docs", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.883271 - }, - "macro.dbt_snowflake.materialization_table_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_table_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/table.sql", - "original_file_path": "macros/materializations/table.sql", - "name": "materialization_table_snowflake", - "macro_sql": "{% materialization table, adapter='snowflake' %}\n\n {% set original_query_tag = set_query_tag() %}\n\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database, type='table') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n {#-- Drop the relation if it was a view to \"convert\" it in a table. This may lead to\n -- downtime, but it should be a relatively infrequent occurrence #}\n {% if old_relation is not none and not old_relation.is_table %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ drop_relation_if_exists(old_relation) }}\n {% endif %}\n\n --build model\n {% call statement('main') -%}\n {{ create_table_as(false, target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.run_hooks", - "macro.dbt.drop_relation_if_exists", - "macro.dbt.statement", - "macro.dbt.create_table_as", - "macro.dbt.persist_docs", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8848581 - }, - "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy": { - "unique_id": "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/incremental.sql", - "original_file_path": "macros/materializations/incremental.sql", - "name": "dbt_snowflake_validate_get_incremental_strategy", - "macro_sql": "{% macro dbt_snowflake_validate_get_incremental_strategy(config) %}\n {#-- Find and validate the incremental strategy #}\n {%- set strategy = config.get(\"incremental_strategy\", default=\"merge\") -%}\n\n {% set invalid_strategy_msg -%}\n Invalid incremental strategy provided: {{ strategy }}\n Expected one of: 'merge', 'delete+insert'\n {%- endset %}\n {% if strategy not in ['merge', 'delete+insert'] %}\n {% do exceptions.raise_compiler_error(invalid_strategy_msg) %}\n {% endif %}\n\n {% do return(strategy) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.886385 - }, - "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql": { - "unique_id": "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/incremental.sql", - "original_file_path": "macros/materializations/incremental.sql", - "name": "dbt_snowflake_get_incremental_sql", - "macro_sql": "{% macro dbt_snowflake_get_incremental_sql(strategy, tmp_relation, target_relation, unique_key, dest_columns) %}\n {% if strategy == 'merge' %}\n {% do return(get_merge_sql(target_relation, tmp_relation, unique_key, dest_columns)) %}\n {% elif strategy == 'delete+insert' %}\n {% do return(get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns)) %}\n {% else %}\n {% do exceptions.raise_compiler_error('invalid strategy: ' ~ strategy) %}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_merge_sql", - "macro.dbt.get_delete_insert_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.88697 - }, - "macro.dbt_snowflake.materialization_incremental_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_incremental_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/incremental.sql", - "original_file_path": "macros/materializations/incremental.sql", - "name": "materialization_incremental_snowflake", - "macro_sql": "{% materialization incremental, adapter='snowflake' -%}\n \n {% set original_query_tag = set_query_tag() %}\n\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set target_relation = this %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(this) %}\n\n {#-- Validate early so we don't run SQL if the strategy is invalid --#}\n {% set strategy = dbt_snowflake_validate_get_incremental_strategy(config) -%}\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {{ run_hooks(pre_hooks) }}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n \n {% elif existing_relation.is_view %}\n {#-- Can't overwrite a view with a table - we must drop --#}\n {{ log(\"Dropping relation \" ~ target_relation ~ \" because it is a view and this model is a table.\") }}\n {% do adapter.drop_relation(existing_relation) %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n \n {% elif full_refresh_mode %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n \n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = dbt_snowflake_get_incremental_sql(strategy, tmp_relation, target_relation, unique_key, dest_columns) %}\n \n {% endif %}\n\n {%- call statement('main') -%}\n {{ build_sql }}\n {%- endcall -%}\n\n {{ run_hooks(post_hooks) }}\n\n {% set target_relation = target_relation.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.should_full_refresh", - "macro.dbt.load_relation", - "macro.dbt.make_temp_relation", - "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy", - "macro.dbt.incremental_validate_on_schema_change", - "macro.dbt.run_hooks", - "macro.dbt.create_table_as", - "macro.dbt.run_query", - "macro.dbt.process_schema_changes", - "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql", - "macro.dbt.statement", - "macro.dbt.persist_docs", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.889248 - }, - "macro.dbt_snowflake.materialization_snapshot_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_snapshot_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/snapshot.sql", - "original_file_path": "macros/materializations/snapshot.sql", - "name": "materialization_snapshot_snowflake", - "macro_sql": "{% materialization snapshot, adapter='snowflake' %}\n {% set original_query_tag = set_query_tag() %}\n\n {% set relations = materialization_snapshot_default() %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return(relations) }}\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.materialization_snapshot_default", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.88973 - }, - "macro.dbt.run_hooks": { - "unique_id": "macro.dbt.run_hooks", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "run_hooks", - "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.890977 - }, - "macro.dbt.make_hook_config": { - "unique_id": "macro.dbt.make_hook_config", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "make_hook_config", - "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891188 - }, - "macro.dbt.before_begin": { - "unique_id": "macro.dbt.before_begin", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "before_begin", - "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_hook_config" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891339 - }, - "macro.dbt.in_transaction": { - "unique_id": "macro.dbt.in_transaction", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "in_transaction", - "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_hook_config" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891488 - }, - "macro.dbt.after_commit": { - "unique_id": "macro.dbt.after_commit", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "after_commit", - "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_hook_config" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891636 - }, - "macro.dbt.set_sql_header": { - "unique_id": "macro.dbt.set_sql_header", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/configs.sql", - "original_file_path": "macros/materializations/configs.sql", - "name": "set_sql_header", - "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8921118 - }, - "macro.dbt.should_full_refresh": { - "unique_id": "macro.dbt.should_full_refresh", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/configs.sql", - "original_file_path": "macros/materializations/configs.sql", - "name": "should_full_refresh", - "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.892443 - }, - "macro.dbt.should_store_failures": { - "unique_id": "macro.dbt.should_store_failures", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/configs.sql", - "original_file_path": "macros/materializations/configs.sql", - "name": "should_store_failures", - "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8928242 - }, - "macro.dbt.snapshot_merge_sql": { - "unique_id": "macro.dbt.snapshot_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/snapshot_merge.sql", - "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", - "name": "snapshot_merge_sql", - "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__snapshot_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.893415 - }, - "macro.dbt.default__snapshot_merge_sql": { - "unique_id": "macro.dbt.default__snapshot_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/snapshot_merge.sql", - "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", - "name": "default__snapshot_merge_sql", - "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8937159 - }, - "macro.dbt.strategy_dispatch": { - "unique_id": "macro.dbt.strategy_dispatch", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "strategy_dispatch", - "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.897434 - }, - "macro.dbt.snapshot_hash_arguments": { - "unique_id": "macro.dbt.snapshot_hash_arguments", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_hash_arguments", - "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__snapshot_hash_arguments" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8976178 - }, - "macro.dbt.default__snapshot_hash_arguments": { - "unique_id": "macro.dbt.default__snapshot_hash_arguments", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "default__snapshot_hash_arguments", - "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8978539 - }, - "macro.dbt.snapshot_get_time": { - "unique_id": "macro.dbt.snapshot_get_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_get_time", - "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__snapshot_get_time" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.898006 - }, - "macro.dbt.default__snapshot_get_time": { - "unique_id": "macro.dbt.default__snapshot_get_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "default__snapshot_get_time", - "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.898108 - }, - "macro.dbt.snapshot_timestamp_strategy": { - "unique_id": "macro.dbt.snapshot_timestamp_strategy", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_timestamp_strategy", - "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.snapshot_hash_arguments" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.898955 - }, - "macro.dbt.snapshot_string_as_time": { - "unique_id": "macro.dbt.snapshot_string_as_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_string_as_time", - "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__snapshot_string_as_time" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.89913 - }, - "macro.dbt.default__snapshot_string_as_time": { - "unique_id": "macro.dbt.default__snapshot_string_as_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "default__snapshot_string_as_time", - "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.899321 - }, - "macro.dbt.snapshot_check_all_get_existing_columns": { - "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_check_all_get_existing_columns", - "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_columns_in_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9003892 - }, - "macro.dbt.snapshot_check_strategy": { - "unique_id": "macro.dbt.snapshot_check_strategy", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_check_strategy", - "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.snapshot_get_time", - "macro.dbt.run_query", - "macro.dbt.snapshot_string_as_time", - "macro.dbt.snapshot_check_all_get_existing_columns", - "macro.dbt.snapshot_hash_arguments" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9024189 - }, - "macro.dbt.create_columns": { - "unique_id": "macro.dbt.create_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "create_columns", - "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_columns" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906004 - }, - "macro.dbt.default__create_columns": { - "unique_id": "macro.dbt.default__create_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__create_columns", - "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906307 - }, - "macro.dbt.post_snapshot": { - "unique_id": "macro.dbt.post_snapshot", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "post_snapshot", - "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__post_snapshot" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906485 - }, - "macro.dbt.default__post_snapshot": { - "unique_id": "macro.dbt.default__post_snapshot", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__post_snapshot", - "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906583 - }, - "macro.dbt.snapshot_staging_table": { - "unique_id": "macro.dbt.snapshot_staging_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "snapshot_staging_table", - "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__snapshot_staging_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906805 - }, - "macro.dbt.default__snapshot_staging_table": { - "unique_id": "macro.dbt.default__snapshot_staging_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__snapshot_staging_table", - "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.snapshot_get_time" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.907786 - }, - "macro.dbt.build_snapshot_table": { - "unique_id": "macro.dbt.build_snapshot_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "build_snapshot_table", - "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__build_snapshot_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.907994 - }, - "macro.dbt.default__build_snapshot_table": { - "unique_id": "macro.dbt.default__build_snapshot_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__build_snapshot_table", - "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.908271 - }, - "macro.dbt.build_snapshot_staging_table": { - "unique_id": "macro.dbt.build_snapshot_staging_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "build_snapshot_staging_table", - "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_temp_relation", - "macro.dbt.snapshot_staging_table", - "macro.dbt.statement", - "macro.dbt.create_table_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.908737 - }, - "macro.dbt.materialization_snapshot_default": { - "unique_id": "macro.dbt.materialization_snapshot_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/snapshot.sql", - "original_file_path": "macros/materializations/snapshots/snapshot.sql", - "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_schema", - "macro.dbt.get_or_create_relation", - "macro.dbt.run_hooks", - "macro.dbt.strategy_dispatch", - "macro.dbt.build_snapshot_table", - "macro.dbt.create_table_as", - "macro.dbt.build_snapshot_staging_table", - "macro.dbt.create_columns", - "macro.dbt.snapshot_merge_sql", - "macro.dbt.statement", - "macro.dbt.persist_docs", - "macro.dbt.create_indexes", - "macro.dbt.post_snapshot" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.915369 - }, - "macro.dbt.materialization_test_default": { - "unique_id": "macro.dbt.materialization_test_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/test.sql", - "original_file_path": "macros/materializations/tests/test.sql", - "name": "materialization_test_default", - "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.should_store_failures", - "macro.dbt.statement", - "macro.dbt.create_table_as", - "macro.dbt.get_test_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.917732 - }, - "macro.dbt.get_test_sql": { - "unique_id": "macro.dbt.get_test_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/helpers.sql", - "original_file_path": "macros/materializations/tests/helpers.sql", - "name": "get_test_sql", - "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_test_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.918294 - }, - "macro.dbt.default__get_test_sql": { - "unique_id": "macro.dbt.default__get_test_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/helpers.sql", - "original_file_path": "macros/materializations/tests/helpers.sql", - "name": "default__get_test_sql", - "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.918626 - }, - "macro.dbt.get_where_subquery": { - "unique_id": "macro.dbt.get_where_subquery", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/where_subquery.sql", - "original_file_path": "macros/materializations/tests/where_subquery.sql", - "name": "get_where_subquery", - "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_where_subquery" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.919103 - }, - "macro.dbt.default__get_where_subquery": { - "unique_id": "macro.dbt.default__get_where_subquery", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/where_subquery.sql", - "original_file_path": "macros/materializations/tests/where_subquery.sql", - "name": "default__get_where_subquery", - "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.919512 - }, - "macro.dbt.get_quoted_csv": { - "unique_id": "macro.dbt.get_quoted_csv", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/column_helpers.sql", - "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", - "name": "get_quoted_csv", - "macro_sql": "{% macro get_quoted_csv(column_names) %}\n \n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.920457 - }, - "macro.dbt.diff_columns": { - "unique_id": "macro.dbt.diff_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/column_helpers.sql", - "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", - "name": "diff_columns", - "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n \n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n \n {{ return(result) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.921053 - }, - "macro.dbt.diff_column_data_types": { - "unique_id": "macro.dbt.diff_column_data_types", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/column_helpers.sql", - "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", - "name": "diff_column_data_types", - "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n \n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} \n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.921717 - }, - "macro.dbt.get_merge_sql": { - "unique_id": "macro.dbt.get_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "get_merge_sql", - "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9248629 - }, - "macro.dbt.default__get_merge_sql": { - "unique_id": "macro.dbt.default__get_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "default__get_merge_sql", - "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.926126 - }, - "macro.dbt.get_delete_insert_merge_sql": { - "unique_id": "macro.dbt.get_delete_insert_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "get_delete_insert_merge_sql", - "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.926379 - }, - "macro.dbt.default__get_delete_insert_merge_sql": { - "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "default__get_delete_insert_merge_sql", - "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.926843 - }, - "macro.dbt.get_insert_overwrite_merge_sql": { - "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "get_insert_overwrite_merge_sql", - "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_insert_overwrite_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.927128 - }, - "macro.dbt.default__get_insert_overwrite_merge_sql": { - "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "default__get_insert_overwrite_merge_sql", - "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.927835 - }, - "macro.dbt.is_incremental": { - "unique_id": "macro.dbt.is_incremental", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/is_incremental.sql", - "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", - "name": "is_incremental", - "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.should_full_refresh" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.928639 - }, - "macro.dbt.materialization_incremental_default": { - "unique_id": "macro.dbt.materialization_incremental_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/incremental.sql", - "original_file_path": "macros/materializations/models/incremental/incremental.sql", - "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) %} \n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns) %}\n \n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.load_relation", - "macro.dbt.make_temp_relation", - "macro.dbt.should_full_refresh", - "macro.dbt.incremental_validate_on_schema_change", - "macro.dbt.drop_relation_if_exists", - "macro.dbt.run_hooks", - "macro.dbt.create_table_as", - "macro.dbt.run_query", - "macro.dbt.process_schema_changes", - "macro.dbt.get_delete_insert_merge_sql", - "macro.dbt.statement", - "macro.dbt.persist_docs", - "macro.dbt.create_indexes" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9340842 - }, - "macro.dbt.incremental_validate_on_schema_change": { - "unique_id": "macro.dbt.incremental_validate_on_schema_change", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "incremental_validate_on_schema_change", - "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n \n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n \n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n \n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n \n {% endif %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.93955 - }, - "macro.dbt.check_for_schema_changes": { - "unique_id": "macro.dbt.check_for_schema_changes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "check_for_schema_changes", - "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n \n {% set schema_changed = False %}\n \n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n \n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n \n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.diff_columns", - "macro.dbt.diff_column_data_types" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9409401 - }, - "macro.dbt.sync_column_schemas": { - "unique_id": "macro.dbt.sync_column_schemas", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "sync_column_schemas", - "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n \n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n \n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} \n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n \n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n \n {% do log(schema_change_message) %}\n \n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.alter_relation_add_remove_columns", - "macro.dbt.alter_column_type" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.94226 - }, - "macro.dbt.process_schema_changes": { - "unique_id": "macro.dbt.process_schema_changes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "process_schema_changes", - "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n \n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n \n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n \n {% if schema_changes_dict['schema_changed'] %}\n \n {% if on_schema_change == 'fail' %}\n \n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways: \n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n \n {% do exceptions.raise_compiler_error(fail_msg) %}\n \n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n \n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {% endif %}\n \n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n \n {% endif %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.check_for_schema_changes", - "macro.dbt.sync_column_schemas" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9430559 - }, - "macro.dbt.materialization_table_default": { - "unique_id": "macro.dbt.materialization_table_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/table.sql", - "original_file_path": "macros/materializations/models/table/table.sql", - "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.drop_relation_if_exists", - "macro.dbt.run_hooks", - "macro.dbt.statement", - "macro.dbt.get_create_table_as_sql", - "macro.dbt.create_indexes", - "macro.dbt.persist_docs" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.947229 - }, - "macro.dbt.get_create_table_as_sql": { - "unique_id": "macro.dbt.get_create_table_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "get_create_table_as_sql", - "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_create_table_as_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.947777 - }, - "macro.dbt.default__get_create_table_as_sql": { - "unique_id": "macro.dbt.default__get_create_table_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "default__get_create_table_as_sql", - "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_table_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.947985 - }, - "macro.dbt.create_table_as": { - "unique_id": "macro.dbt.create_table_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "create_table_as", - "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__create_table_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.948208 - }, - "macro.dbt.default__create_table_as": { - "unique_id": "macro.dbt.default__create_table_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "default__create_table_as", - "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n \n {{ sql_header if sql_header is not none }}\n \n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.94865 - }, - "macro.dbt.materialization_view_default": { - "unique_id": "macro.dbt.materialization_view_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/view.sql", - "original_file_path": "macros/materializations/models/view/view.sql", - "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_hooks", - "macro.dbt.drop_relation_if_exists", - "macro.dbt.statement", - "macro.dbt.create_view_as", - "macro.dbt.persist_docs" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.952714 - }, - "macro.dbt.handle_existing_table": { - "unique_id": "macro.dbt.handle_existing_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/helpers.sql", - "original_file_path": "macros/materializations/models/view/helpers.sql", - "name": "handle_existing_table", - "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__handle_existing_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.953145 - }, - "macro.dbt.default__handle_existing_table": { - "unique_id": "macro.dbt.default__handle_existing_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/helpers.sql", - "original_file_path": "macros/materializations/models/view/helpers.sql", - "name": "default__handle_existing_table", - "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.953383 - }, - "macro.dbt.create_or_replace_view": { - "unique_id": "macro.dbt.create_or_replace_view", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_or_replace_view.sql", - "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", - "name": "create_or_replace_view", - "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_hooks", - "macro.dbt.handle_existing_table", - "macro.dbt.should_full_refresh", - "macro.dbt.statement", - "macro.dbt.get_create_view_as_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.954754 - }, - "macro.dbt.get_create_view_as_sql": { - "unique_id": "macro.dbt.get_create_view_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "get_create_view_as_sql", - "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_create_view_as_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955221 - }, - "macro.dbt.default__get_create_view_as_sql": { - "unique_id": "macro.dbt.default__get_create_view_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "default__get_create_view_as_sql", - "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_view_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955395 - }, - "macro.dbt.create_view_as": { - "unique_id": "macro.dbt.create_view_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "create_view_as", - "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__create_view_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955586 - }, - "macro.dbt.default__create_view_as": { - "unique_id": "macro.dbt.default__create_view_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "default__create_view_as", - "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955868 - }, - "macro.dbt.materialization_seed_default": { - "unique_id": "macro.dbt.materialization_seed_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/seed.sql", - "original_file_path": "macros/materializations/seeds/seed.sql", - "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.should_full_refresh", - "macro.dbt.run_hooks", - "macro.dbt.reset_csv_table", - "macro.dbt.create_csv_table", - "macro.dbt.load_csv_rows", - "macro.dbt.noop_statement", - "macro.dbt.persist_docs", - "macro.dbt.create_indexes" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9590452 - }, - "macro.dbt.create_csv_table": { - "unique_id": "macro.dbt.create_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "create_csv_table", - "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_csv_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.963721 - }, - "macro.dbt.default__create_csv_table": { - "unique_id": "macro.dbt.default__create_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__create_csv_table", - "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.964705 - }, - "macro.dbt.reset_csv_table": { - "unique_id": "macro.dbt.reset_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "reset_csv_table", - "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__reset_csv_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.96496 - }, - "macro.dbt.default__reset_csv_table": { - "unique_id": "macro.dbt.default__reset_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__reset_csv_table", - "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_csv_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9654882 - }, - "macro.dbt.get_binding_char": { - "unique_id": "macro.dbt.get_binding_char", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "get_binding_char", - "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_binding_char" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.965648 - }, - "macro.dbt.default__get_binding_char": { - "unique_id": "macro.dbt.default__get_binding_char", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__get_binding_char", - "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.965769 - }, - "macro.dbt.get_batch_size": { - "unique_id": "macro.dbt.get_batch_size", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "get_batch_size", - "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_batch_size" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.965937 - }, - "macro.dbt.default__get_batch_size": { - "unique_id": "macro.dbt.default__get_batch_size", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__get_batch_size", - "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.966062 - }, - "macro.dbt.get_seed_column_quoted_csv": { - "unique_id": "macro.dbt.get_seed_column_quoted_csv", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "get_seed_column_quoted_csv", - "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9665868 - }, - "macro.dbt.load_csv_rows": { - "unique_id": "macro.dbt.load_csv_rows", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "load_csv_rows", - "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__load_csv_rows" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.966848 - }, - "macro.dbt.default__load_csv_rows": { - "unique_id": "macro.dbt.default__load_csv_rows", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__load_csv_rows", - "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_batch_size", - "macro.dbt.get_seed_column_quoted_csv", - "macro.dbt.get_binding_char" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.968174 - }, - "macro.dbt.generate_alias_name": { - "unique_id": "macro.dbt.generate_alias_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_alias.sql", - "original_file_path": "macros/get_custom_name/get_custom_alias.sql", - "name": "generate_alias_name", - "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__generate_alias_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.968678 - }, - "macro.dbt.default__generate_alias_name": { - "unique_id": "macro.dbt.default__generate_alias_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_alias.sql", - "original_file_path": "macros/get_custom_name/get_custom_alias.sql", - "name": "default__generate_alias_name", - "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9689212 - }, - "macro.dbt.generate_schema_name": { - "unique_id": "macro.dbt.generate_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_schema.sql", - "original_file_path": "macros/get_custom_name/get_custom_schema.sql", - "name": "generate_schema_name", - "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__generate_schema_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.969553 - }, - "macro.dbt.default__generate_schema_name": { - "unique_id": "macro.dbt.default__generate_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_schema.sql", - "original_file_path": "macros/get_custom_name/get_custom_schema.sql", - "name": "default__generate_schema_name", - "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.969832 - }, - "macro.dbt.generate_schema_name_for_env": { - "unique_id": "macro.dbt.generate_schema_name_for_env", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_schema.sql", - "original_file_path": "macros/get_custom_name/get_custom_schema.sql", - "name": "generate_schema_name_for_env", - "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.97014 - }, - "macro.dbt.generate_database_name": { - "unique_id": "macro.dbt.generate_database_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_database.sql", - "original_file_path": "macros/get_custom_name/get_custom_database.sql", - "name": "generate_database_name", - "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__generate_database_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9706528 - }, - "macro.dbt.default__generate_database_name": { - "unique_id": "macro.dbt.default__generate_database_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_database.sql", - "original_file_path": "macros/get_custom_name/get_custom_database.sql", - "name": "default__generate_database_name", - "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.970923 - }, - "macro.dbt.default__test_relationships": { - "unique_id": "macro.dbt.default__test_relationships", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/relationships.sql", - "original_file_path": "macros/generic_test_sql/relationships.sql", - "name": "default__test_relationships", - "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.971371 - }, - "macro.dbt.default__test_not_null": { - "unique_id": "macro.dbt.default__test_not_null", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/not_null.sql", - "original_file_path": "macros/generic_test_sql/not_null.sql", - "name": "default__test_not_null", - "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.971654 - }, - "macro.dbt.default__test_unique": { - "unique_id": "macro.dbt.default__test_unique", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/unique.sql", - "original_file_path": "macros/generic_test_sql/unique.sql", - "name": "default__test_unique", - "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9720042 - }, - "macro.dbt.default__test_accepted_values": { - "unique_id": "macro.dbt.default__test_accepted_values", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/accepted_values.sql", - "original_file_path": "macros/generic_test_sql/accepted_values.sql", - "name": "default__test_accepted_values", - "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.972692 - }, - "macro.dbt.statement": { - "unique_id": "macro.dbt.statement", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/statement.sql", - "original_file_path": "macros/etc/statement.sql", - "name": "statement", - "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.973983 - }, - "macro.dbt.noop_statement": { - "unique_id": "macro.dbt.noop_statement", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/statement.sql", - "original_file_path": "macros/etc/statement.sql", - "name": "noop_statement", - "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9746199 - }, - "macro.dbt.run_query": { - "unique_id": "macro.dbt.run_query", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/statement.sql", - "original_file_path": "macros/etc/statement.sql", - "name": "run_query", - "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.974944 - }, - "macro.dbt.convert_datetime": { - "unique_id": "macro.dbt.convert_datetime", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "convert_datetime", - "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9769669 - }, - "macro.dbt.dates_in_range": { - "unique_id": "macro.dbt.dates_in_range", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.convert_datetime" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.978344 - }, - "macro.dbt.partition_range": { - "unique_id": "macro.dbt.partition_range", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "partition_range", - "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.dates_in_range" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.979158 - }, - "macro.dbt.py_current_timestring": { - "unique_id": "macro.dbt.py_current_timestring", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "py_current_timestring", - "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.979411 - }, - "macro.dbt.create_schema": { - "unique_id": "macro.dbt.create_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "create_schema", - "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_schema" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.979896 - }, - "macro.dbt.default__create_schema": { - "unique_id": "macro.dbt.default__create_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "default__create_schema", - "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.980093 - }, - "macro.dbt.drop_schema": { - "unique_id": "macro.dbt.drop_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "drop_schema", - "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__drop_schema" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.980265 - }, - "macro.dbt.default__drop_schema": { - "unique_id": "macro.dbt.default__drop_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "default__drop_schema", - "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9804611 - }, - "macro.dbt.get_create_index_sql": { - "unique_id": "macro.dbt.get_create_index_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "get_create_index_sql", - "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_create_index_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.981035 - }, - "macro.dbt.default__get_create_index_sql": { - "unique_id": "macro.dbt.default__get_create_index_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "default__get_create_index_sql", - "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.981178 - }, - "macro.dbt.create_indexes": { - "unique_id": "macro.dbt.create_indexes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "create_indexes", - "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_indexes" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.981345 - }, - "macro.dbt.default__create_indexes": { - "unique_id": "macro.dbt.default__create_indexes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "default__create_indexes", - "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_create_index_sql", - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9817758 - }, - "macro.dbt.make_temp_relation": { - "unique_id": "macro.dbt.make_temp_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "make_temp_relation", - "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__make_temp_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9835799 - }, - "macro.dbt.default__make_temp_relation": { - "unique_id": "macro.dbt.default__make_temp_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__make_temp_relation", - "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.983905 - }, - "macro.dbt.drop_relation": { - "unique_id": "macro.dbt.drop_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "drop_relation", - "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__drop_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.984101 - }, - "macro.dbt.default__drop_relation": { - "unique_id": "macro.dbt.default__drop_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__drop_relation", - "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9843352 - }, - "macro.dbt.truncate_relation": { - "unique_id": "macro.dbt.truncate_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "truncate_relation", - "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__truncate_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.98453 - }, - "macro.dbt.default__truncate_relation": { - "unique_id": "macro.dbt.default__truncate_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__truncate_relation", - "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.984697 - }, - "macro.dbt.rename_relation": { - "unique_id": "macro.dbt.rename_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "rename_relation", - "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__rename_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.984915 - }, - "macro.dbt.default__rename_relation": { - "unique_id": "macro.dbt.default__rename_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__rename_relation", - "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.985218 - }, - "macro.dbt.get_or_create_relation": { - "unique_id": "macro.dbt.get_or_create_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "get_or_create_relation", - "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_or_create_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.985485 - }, - "macro.dbt.default__get_or_create_relation": { - "unique_id": "macro.dbt.default__get_or_create_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__get_or_create_relation", - "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.986151 - }, - "macro.dbt.load_relation": { - "unique_id": "macro.dbt.load_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "load_relation", - "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9864 - }, - "macro.dbt.drop_relation_if_exists": { - "unique_id": "macro.dbt.drop_relation_if_exists", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "drop_relation_if_exists", - "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.986603 - }, - "macro.dbt.current_timestamp": { - "unique_id": "macro.dbt.current_timestamp", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "current_timestamp", - "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.987173 - }, - "macro.dbt.default__current_timestamp": { - "unique_id": "macro.dbt.default__current_timestamp", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "default__current_timestamp", - "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9873269 - }, - "macro.dbt.collect_freshness": { - "unique_id": "macro.dbt.collect_freshness", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "collect_freshness", - "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__collect_freshness" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.987576 - }, - "macro.dbt.default__collect_freshness": { - "unique_id": "macro.dbt.default__collect_freshness", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "default__collect_freshness", - "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement", - "macro.dbt.current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.98803 - }, - "macro.dbt.alter_column_comment": { - "unique_id": "macro.dbt.alter_column_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "alter_column_comment", - "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_column_comment" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9888341 - }, - "macro.dbt.default__alter_column_comment": { - "unique_id": "macro.dbt.default__alter_column_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "default__alter_column_comment", - "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.989012 - }, - "macro.dbt.alter_relation_comment": { - "unique_id": "macro.dbt.alter_relation_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "alter_relation_comment", - "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_relation_comment" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9892302 - }, - "macro.dbt.default__alter_relation_comment": { - "unique_id": "macro.dbt.default__alter_relation_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "default__alter_relation_comment", - "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.989405 - }, - "macro.dbt.persist_docs": { - "unique_id": "macro.dbt.persist_docs", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "persist_docs", - "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__persist_docs" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.989704 - }, - "macro.dbt.default__persist_docs": { - "unique_id": "macro.dbt.default__persist_docs", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "default__persist_docs", - "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query", - "macro.dbt.alter_relation_comment", - "macro.dbt.alter_column_comment" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.990229 - }, - "macro.dbt.get_catalog": { - "unique_id": "macro.dbt.get_catalog", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "get_catalog", - "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_catalog" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.991852 - }, - "macro.dbt.default__get_catalog": { - "unique_id": "macro.dbt.default__get_catalog", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__get_catalog", - "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992126 - }, - "macro.dbt.information_schema_name": { - "unique_id": "macro.dbt.information_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "information_schema_name", - "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__information_schema_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992324 - }, - "macro.dbt.default__information_schema_name": { - "unique_id": "macro.dbt.default__information_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__information_schema_name", - "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992484 - }, - "macro.dbt.list_schemas": { - "unique_id": "macro.dbt.list_schemas", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "list_schemas", - "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__list_schemas" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992687 - }, - "macro.dbt.default__list_schemas": { - "unique_id": "macro.dbt.default__list_schemas", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__list_schemas", - "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.information_schema_name", - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.99295 - }, - "macro.dbt.check_schema_exists": { - "unique_id": "macro.dbt.check_schema_exists", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "check_schema_exists", - "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__check_schema_exists" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9931688 - }, - "macro.dbt.default__check_schema_exists": { - "unique_id": "macro.dbt.default__check_schema_exists", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__check_schema_exists", - "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.993557 - }, - "macro.dbt.list_relations_without_caching": { - "unique_id": "macro.dbt.list_relations_without_caching", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "list_relations_without_caching", - "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__list_relations_without_caching" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9937558 - }, - "macro.dbt.default__list_relations_without_caching": { - "unique_id": "macro.dbt.default__list_relations_without_caching", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__list_relations_without_caching", - "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9939241 - }, - "macro.dbt.get_columns_in_relation": { - "unique_id": "macro.dbt.get_columns_in_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "get_columns_in_relation", - "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_columns_in_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.995882 - }, - "macro.dbt.default__get_columns_in_relation": { - "unique_id": "macro.dbt.default__get_columns_in_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__get_columns_in_relation", - "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.996049 - }, - "macro.dbt.sql_convert_columns_in_relation": { - "unique_id": "macro.dbt.sql_convert_columns_in_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "sql_convert_columns_in_relation", - "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.996382 - }, - "macro.dbt.get_columns_in_query": { - "unique_id": "macro.dbt.get_columns_in_query", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "get_columns_in_query", - "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_columns_in_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9965818 - }, - "macro.dbt.default__get_columns_in_query": { - "unique_id": "macro.dbt.default__get_columns_in_query", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__get_columns_in_query", - "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.996951 - }, - "macro.dbt.alter_column_type": { - "unique_id": "macro.dbt.alter_column_type", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "alter_column_type", - "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_column_type" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.997199 - }, - "macro.dbt.default__alter_column_type": { - "unique_id": "macro.dbt.default__alter_column_type", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__alter_column_type", - "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.997854 - }, - "macro.dbt.alter_relation_add_remove_columns": { - "unique_id": "macro.dbt.alter_relation_add_remove_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "alter_relation_add_remove_columns", - "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.998132 - }, - "macro.dbt.default__alter_relation_add_remove_columns": { - "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__alter_relation_add_remove_columns", - "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n \n {% set sql -%}\n \n alter {{ relation.type }} {{ relation }}\n \n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n \n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n \n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.998991 - }, - "macro.dbt.test_unique": { - "unique_id": "macro.dbt.test_unique", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_unique", - "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_unique" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.999637 - }, - "macro.dbt.test_not_null": { - "unique_id": "macro.dbt.test_not_null", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_not_null", - "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_not_null" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.999885 - }, - "macro.dbt.test_accepted_values": { - "unique_id": "macro.dbt.test_accepted_values", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_accepted_values", - "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_accepted_values" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204507.000203 - }, - "macro.dbt.test_relationships": { - "unique_id": "macro.dbt.test_relationships", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_relationships", - "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_relationships" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204507.000502 - } - }, - "docs": { - "dbt.__overview__": { - "unique_id": "dbt.__overview__", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "overview.md", - "original_file_path": "docs/overview.md", - "name": "__overview__", - "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion" - } - }, - "exposures": {}, - "metrics": {}, - "selectors": {}, - "disabled": {}, - "parent_map": { - "model.dbtlearn.dim_listings_w_hosts": [ - "model.dbtlearn.dim_hosts_cleansed", - "model.dbtlearn.dim_listings_cleansed" - ], - "model.dbtlearn.dim_listings_cleansed": [ - "model.dbtlearn.src_listings" - ], - "model.dbtlearn.dim_hosts_cleansed": [ - "model.dbtlearn.src_hosts" - ], - "model.dbtlearn.fct_reviews": [ - "model.dbtlearn.src_reviews" - ], - "model.dbtlearn.src_listings": [], - "model.dbtlearn.src_hosts": [], - "model.dbtlearn.src_reviews": [] - }, - "child_map": { - "model.dbtlearn.dim_listings_w_hosts": [], - "model.dbtlearn.dim_listings_cleansed": [ - "model.dbtlearn.dim_listings_w_hosts" - ], - "model.dbtlearn.dim_hosts_cleansed": [ - "model.dbtlearn.dim_listings_w_hosts" - ], - "model.dbtlearn.fct_reviews": [], - "model.dbtlearn.src_listings": [ - "model.dbtlearn.dim_listings_cleansed" - ], - "model.dbtlearn.src_hosts": [ - "model.dbtlearn.dim_hosts_cleansed" - ], - "model.dbtlearn.src_reviews": [ - "model.dbtlearn.fct_reviews" - ] - } -} \ No newline at end of file diff --git a/src/test/resources/dbt_change/catalog.json b/src/test/resources/dbt_change/catalog.json deleted file mode 100644 index f0399fb..0000000 --- a/src/test/resources/dbt_change/catalog.json +++ /dev/null @@ -1,470 +0,0 @@ -{ - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", - "dbt_version": "1.0.5", - "generated_at": "2022-04-29T04:29:35.188250Z", - "invocation_id": "6f5bd62d-50bb-4d99-b92b-933c06fe7759", - "env": {} - }, - "nodes": { - "model.dbtlearn.dim_listings_cleansed_changed": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "DIM_LISTINGS_CLEANSED_CHANGED", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "LISTING_NAME": { - "type": "TEXT", - "index": 2, - "name": "LISTING_NAME", - "comment": null - }, - "ROOM_TYPE": { - "type": "TEXT", - "index": 3, - "name": "ROOM_TYPE", - "comment": null - }, - "MINIMUM_NIGHTS": { - "type": "NUMBER", - "index": 4, - "name": "MINIMUM_NIGHTS", - "comment": null - }, - "HOST_ID": { - "type": "NUMBER", - "index": 5, - "name": "HOST_ID", - "comment": null - }, - "PRICE": { - "type": "NUMBER", - "index": 6, - "name": "PRICE", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 7, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 8, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.dim_listings_cleansed_changed" - }, - "model.dbtlearn.src_hosts": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "SRC_HOSTS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "HOST_ID": { - "type": "NUMBER", - "index": 1, - "name": "HOST_ID", - "comment": null - }, - "HOST_NAME": { - "type": "TEXT", - "index": 2, - "name": "HOST_NAME", - "comment": null - }, - "IS_SUPERHOST": { - "type": "TEXT", - "index": 3, - "name": "IS_SUPERHOST", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 4, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 5, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.src_hosts" - }, - "model.dbtlearn.src_reviews": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "SRC_REVIEWS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "REVIEW_DATE": { - "type": "TIMESTAMP_NTZ", - "index": 2, - "name": "REVIEW_DATE", - "comment": null - }, - "REVIEWER_NAME": { - "type": "TEXT", - "index": 3, - "name": "REVIEWER_NAME", - "comment": null - }, - "REVIEW_TEXT": { - "type": "TEXT", - "index": 4, - "name": "REVIEW_TEXT", - "comment": null - }, - "REVIEW_SENTIMENT": { - "type": "TEXT", - "index": 5, - "name": "REVIEW_SENTIMENT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.src_reviews" - }, - "model.dbtlearn.dim_hosts_cleansed": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "DIM_HOSTS_CLEANSED", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "HOST_ID": { - "type": "NUMBER", - "index": 1, - "name": "HOST_ID", - "comment": null - }, - "HOST_NAME": { - "type": "TEXT", - "index": 2, - "name": "HOST_NAME", - "comment": null - }, - "IS_SUPERHOST": { - "type": "TEXT", - "index": 3, - "name": "IS_SUPERHOST", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 4, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 5, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.dim_hosts_cleansed" - }, - "model.dbtlearn.dim_listings_w_hosts": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "DIM_LISTINGS_W_HOSTS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "LISTING_NAME": { - "type": "TEXT", - "index": 2, - "name": "LISTING_NAME", - "comment": null - }, - "ROOM_TYPE": { - "type": "TEXT", - "index": 3, - "name": "ROOM_TYPE", - "comment": null - }, - "MINIMUM_NIGHTS": { - "type": "NUMBER", - "index": 4, - "name": "MINIMUM_NIGHTS", - "comment": null - }, - "PRICE": { - "type": "NUMBER", - "index": 5, - "name": "PRICE", - "comment": null - }, - "HOST_ID": { - "type": "NUMBER", - "index": 6, - "name": "HOST_ID", - "comment": null - }, - "HOST_NAME": { - "type": "TEXT", - "index": 7, - "name": "HOST_NAME", - "comment": null - }, - "HOST_IS_SUPERHOST": { - "type": "TEXT", - "index": 8, - "name": "HOST_IS_SUPERHOST", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 9, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 10, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.dim_listings_w_hosts" - }, - "model.dbtlearn.fct_reviews": { - "metadata": { - "type": "BASE TABLE", - "schema": "DEV", - "name": "FCT_REVIEWS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "REVIEW_DATE": { - "type": "TIMESTAMP_NTZ", - "index": 2, - "name": "REVIEW_DATE", - "comment": null - }, - "REVIEWER_NAME": { - "type": "TEXT", - "index": 3, - "name": "REVIEWER_NAME", - "comment": null - }, - "REVIEW_TEXT": { - "type": "TEXT", - "index": 4, - "name": "REVIEW_TEXT", - "comment": null - }, - "REVIEW_SENTIMENT": { - "type": "TEXT", - "index": 5, - "name": "REVIEW_SENTIMENT", - "comment": null - } - }, - "stats": { - "bytes": { - "id": "bytes", - "label": "Approximate Size", - "value": 42548736.0, - "include": true, - "description": "Approximate size of the table as reported by Snowflake" - }, - "row_count": { - "id": "row_count", - "label": "Row Count", - "value": 409697.0, - "include": true, - "description": "An approximate count of rows in this table" - }, - "last_modified": { - "id": "last_modified", - "label": "Last Modified", - "value": "2022-04-05 20:47UTC", - "include": true, - "description": "The timestamp for last update/change" - }, - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": true, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.fct_reviews" - }, - "model.dbtlearn.src_listings": { - "metadata": { - "type": "VIEW", - "schema": "DEV", - "name": "SRC_LISTINGS", - "database": "AIRBNB", - "comment": null, - "owner": "TRANSFORM" - }, - "columns": { - "LISTING_ID": { - "type": "NUMBER", - "index": 1, - "name": "LISTING_ID", - "comment": null - }, - "LISTING_NAME": { - "type": "TEXT", - "index": 2, - "name": "LISTING_NAME", - "comment": null - }, - "LISTING_URL": { - "type": "TEXT", - "index": 3, - "name": "LISTING_URL", - "comment": null - }, - "ROOM_TYPE": { - "type": "TEXT", - "index": 4, - "name": "ROOM_TYPE", - "comment": null - }, - "MINIMUM_NIGHTS": { - "type": "NUMBER", - "index": 5, - "name": "MINIMUM_NIGHTS", - "comment": null - }, - "HOST_ID": { - "type": "NUMBER", - "index": 6, - "name": "HOST_ID", - "comment": null - }, - "PRICE_STR": { - "type": "TEXT", - "index": 7, - "name": "PRICE_STR", - "comment": null - }, - "CREATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 8, - "name": "CREATED_AT", - "comment": null - }, - "UPDATED_AT": { - "type": "TIMESTAMP_NTZ", - "index": 9, - "name": "UPDATED_AT", - "comment": null - } - }, - "stats": { - "has_stats": { - "id": "has_stats", - "label": "Has Stats?", - "value": false, - "include": false, - "description": "Indicates whether there are statistics for this table" - } - }, - "unique_id": "model.dbtlearn.src_listings" - } - }, - "sources": {}, - "errors": null -} \ No newline at end of file diff --git a/src/test/resources/dbt_change/manifest.json b/src/test/resources/dbt_change/manifest.json deleted file mode 100644 index b702426..0000000 --- a/src/test/resources/dbt_change/manifest.json +++ /dev/null @@ -1,4889 +0,0 @@ -{ - "metadata": { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v4.json", - "dbt_version": "1.0.5", - "generated_at": "2022-04-29T04:29:32.010163Z", - "invocation_id": "6f5bd62d-50bb-4d99-b92b-933c06fe7759", - "env": {}, - "project_id": "d996d79890e8636568af20a2a3912013", - "user_id": "36732dd7-1654-4270-920f-689678c03fe2", - "send_anonymous_usage_stats": true, - "adapter_type": "snowflake" - }, - "nodes": { - "model.dbtlearn.dim_listings_w_hosts": { - "raw_sql": "WITH l AS (\n SELECT *\n FROM {{ ref('dim_listings_cleansed_changed') }}\n),\nh AS (\nSELECT *\nFROM {{ ref('dim_hosts_cleansed') }}\n )\nSELECT l.listing_id,\n l.listing_name,\n l.room_type,\n l.minimum_nights,\n l.price,\n l.host_id,\n h.host_name,\n h.is_superhost as host_is_superhost,\n l.created_at,\n GREATEST(l.updated_at, h.updated_at) as updated_at\nFROM l\n LEFT JOIN h ON (h.host_id = l.host_id)", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [ - "model.dbtlearn.dim_listings_cleansed_changed", - "model.dbtlearn.dim_hosts_cleansed" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "dim", - "dim_listings_w_hosts" - ], - "unique_id": "model.dbtlearn.dim_listings_w_hosts", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "dim/dim_listings_w_hosts.sql", - "original_file_path": "models/dim/dim_listings_w_hosts.sql", - "name": "dim_listings_w_hosts", - "alias": "dim_listings_w_hosts", - "checksum": { - "name": "sha256", - "checksum": "243e0cabf45a05a6b0577d10399f5a023e43d3cf80398781e5e08b308cf832e9" - }, - "tags": [], - "refs": [ - [ - "dim_listings_cleansed_changed" - ], - [ - "dim_hosts_cleansed" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/dim/dim_listings_w_hosts.sql", - "build_path": null, - "deferred": false, - "unrendered_config": {}, - "created_at": 1651204507.097216, - "compiled_sql": "WITH l AS (\n SELECT *\n FROM airbnb.dev.dim_listings_cleansed_changed\n),\nh AS (\nSELECT *\nFROM airbnb.dev.dim_hosts_cleansed\n )\nSELECT l.listing_id,\n l.listing_name,\n l.room_type,\n l.minimum_nights,\n l.price,\n l.host_id,\n h.host_name,\n h.is_superhost as host_is_superhost,\n l.created_at,\n GREATEST(l.updated_at, h.updated_at) as updated_at\nFROM l\n LEFT JOIN h ON (h.host_id = l.host_id)", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.dim_listings_w_hosts" - }, - "model.dbtlearn.dim_listings_cleansed_changed": { - "raw_sql": "WITH src_listings AS (\n SELECT *\n FROM {{ ref('src_listings') }}\n)\nSELECT listing_id,\n listing_name,\n room_type,\n CASE\n WHEN minimum_nights = 0 THEN 1\n ELSE minimum_nights\n END AS minimum_nights,\n host_id,\n REPLACE(\n price_str,\n '$'\n ) :: NUMBER(10,\n 2) AS price,\n created_at,\n updated_at\nFROM src_listings", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [ - "model.dbtlearn.src_listings" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "dim", - "dim_listings_cleansed_changed" - ], - "unique_id": "model.dbtlearn.dim_listings_cleansed_changed", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "dim/dim_listings_cleansed_changed.sql", - "original_file_path": "models/dim/dim_listings_cleansed_changed.sql", - "name": "dim_listings_cleansed_changed", - "alias": "dim_listings_cleansed_changed", - "checksum": { - "name": "sha256", - "checksum": "004260ee497b05f21c7efd290cdf2ec5cfa8cf840a5a4b7cd2d0b7e168b7b207" - }, - "tags": [], - "refs": [ - [ - "src_listings" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/dim/dim_listings_cleansed_changed.sql", - "build_path": null, - "deferred": false, - "unrendered_config": {}, - "created_at": 1651204507.1046379, - "compiled_sql": "WITH src_listings AS (\n SELECT *\n FROM airbnb.dev.src_listings\n)\nSELECT listing_id,\n listing_name,\n room_type,\n CASE\n WHEN minimum_nights = 0 THEN 1\n ELSE minimum_nights\n END AS minimum_nights,\n host_id,\n REPLACE(\n price_str,\n '$'\n ) :: NUMBER(10,\n 2) AS price,\n created_at,\n updated_at\nFROM src_listings", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.dim_listings_cleansed_changed" - }, - "model.dbtlearn.dim_hosts_cleansed": { - "raw_sql": "{{ config(\n materialized = 'view'\n) }}\nWITH src_hosts AS (\n SELECT *\n FROM {{ ref('src_hosts') }}\n)\nSELECT host_id,\n NVL(\n host_name,\n 'Anonymous'\n ) AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM src_hosts", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [ - "model.dbtlearn.src_hosts" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "dim", - "dim_hosts_cleansed" - ], - "unique_id": "model.dbtlearn.dim_hosts_cleansed", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "dim/dim_hosts_cleansed.sql", - "original_file_path": "models/dim/dim_hosts_cleansed.sql", - "name": "dim_hosts_cleansed", - "alias": "dim_hosts_cleansed", - "checksum": { - "name": "sha256", - "checksum": "24b3ba82411e376956843de60351cc0711311ae4623e44da3f38e872ef979615" - }, - "tags": [], - "refs": [ - [ - "src_hosts" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/dim/dim_hosts_cleansed.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view" - }, - "created_at": 1651204507.10578, - "compiled_sql": "\nWITH src_hosts AS (\n SELECT *\n FROM airbnb.dev.src_hosts\n)\nSELECT host_id,\n NVL(\n host_name,\n 'Anonymous'\n ) AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM src_hosts", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.dim_hosts_cleansed" - }, - "model.dbtlearn.fct_reviews": { - "raw_sql": "{{ config(\n materialized = 'incremental',\n on_schema_change='fail'\n )\n}}\nWITH src_reviews AS (\n SELECT *\n FROM {{ ref('src_reviews') }}\n)\nSELECT *\nFROM src_reviews\nWHERE review_text is not null {% if is_incremental() %}\n AND review_date > (select max(review_date) from {{ this }})\n{% endif %}", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [ - "macro.dbt.is_incremental" - ], - "nodes": [ - "model.dbtlearn.src_reviews" - ] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "incremental", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "fail", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "fct", - "fct_reviews" - ], - "unique_id": "model.dbtlearn.fct_reviews", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "fct/fct_reviews.sql", - "original_file_path": "models/fct/fct_reviews.sql", - "name": "fct_reviews", - "alias": "fct_reviews", - "checksum": { - "name": "sha256", - "checksum": "36107c27a2f78c29d2262ef7fd2aec2329e075380fc5af2578c592bfc78ab851" - }, - "tags": [], - "refs": [ - [ - "src_reviews" - ] - ], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/fct/fct_reviews.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "incremental", - "on_schema_change": "fail" - }, - "created_at": 1651204507.107425, - "compiled_sql": "\nWITH src_reviews AS (\n SELECT *\n FROM airbnb.dev.src_reviews\n)\nSELECT *\nFROM src_reviews\nWHERE review_text is not null \n AND review_date > (select max(review_date) from airbnb.dev.fct_reviews)\n", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.fct_reviews" - }, - "model.dbtlearn.src_listings": { - "raw_sql": "WITH raw_listings AS (SELECT *\n FROM AIRBNB.RAW.RAW_LISTINGS\n)\nSELECT id AS listing_id,\n name AS listing_name,\n listing_url,\n room_type,\n minimum_nights,\n host_id,\n price AS price_str,\n created_at,\n updated_at\nFROM raw_listings", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "src", - "src_listings" - ], - "unique_id": "model.dbtlearn.src_listings", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "src/src_listings.sql", - "original_file_path": "models/src/src_listings.sql", - "name": "src_listings", - "alias": "src_listings", - "checksum": { - "name": "sha256", - "checksum": "373867ec1c842c5c2a1841399091a568c1f8f591f80649cb06ace15fff577eb3" - }, - "tags": [], - "refs": [], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/src/src_listings.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view" - }, - "created_at": 1651204507.1133409, - "compiled_sql": "WITH raw_listings AS (SELECT *\n FROM AIRBNB.RAW.RAW_LISTINGS\n)\nSELECT id AS listing_id,\n name AS listing_name,\n listing_url,\n room_type,\n minimum_nights,\n host_id,\n price AS price_str,\n created_at,\n updated_at\nFROM raw_listings", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.src_listings" - }, - "model.dbtlearn.src_hosts": { - "raw_sql": "WITH raw_hosts AS (SELECT *\n FROM AIRBNB.RAW.RAW_HOSTS\n)\nSELECT id AS host_id,\n NAME AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM raw_hosts", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "src", - "src_hosts" - ], - "unique_id": "model.dbtlearn.src_hosts", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "src/src_hosts.sql", - "original_file_path": "models/src/src_hosts.sql", - "name": "src_hosts", - "alias": "src_hosts", - "checksum": { - "name": "sha256", - "checksum": "032dd8da0ed7461628dcac6681d6880d923af89d3802606039ad20240821fe7d" - }, - "tags": [], - "refs": [], - "sources": [], - "description": "", - "columns": {}, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "compiled_path": "target/compiled/dbtlearn/models/src/src_hosts.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view" - }, - "created_at": 1651204507.1144252, - "compiled_sql": "WITH raw_hosts AS (SELECT *\n FROM AIRBNB.RAW.RAW_HOSTS\n)\nSELECT id AS host_id,\n NAME AS host_name,\n is_superhost,\n created_at,\n updated_at\nFROM raw_hosts", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.src_hosts" - }, - "model.dbtlearn.src_reviews": { - "raw_sql": "{{ config(\n unique_key=\"column_name\"\n) }}\n\nWITH raw_reviews AS (SELECT *\n FROM AIRBNB.RAW.RAW_REVIEWS\n)\nSELECT listing_id,\n date AS review_date,\n reviewer_name,\n comments AS review_text,\n sentiment AS review_sentiment\nFROM raw_reviews", - "compiled": true, - "resource_type": "model", - "depends_on": { - "macros": [], - "nodes": [] - }, - "config": { - "enabled": true, - "alias": null, - "schema": null, - "database": null, - "tags": [], - "meta": {}, - "materialized": "view", - "persist_docs": {}, - "quoting": {}, - "column_types": {}, - "full_refresh": null, - "on_schema_change": "ignore", - "unique_key": "column_name", - "domain": "core", - "model_type": "dimension", - "post-hook": [], - "pre-hook": [] - }, - "database": "airbnb", - "schema": "dev", - "fqn": [ - "dbtlearn", - "src", - "src_reviews" - ], - "unique_id": "model.dbtlearn.src_reviews", - "package_name": "dbtlearn", - "root_path": "/Users/ananthpackkildurai/workspace/dbt_tutorial/dbtlearn", - "path": "src/src_reviews.sql", - "original_file_path": "models/src/src_reviews.sql", - "name": "src_reviews", - "alias": "src_reviews", - "checksum": { - "name": "sha256", - "checksum": "ca47823e4b83506e5e1e9f28b8316386479dd25f69f274bf2b699e71bb9ee307" - }, - "tags": [], - "refs": [], - "sources": [], - "description": "This is a comment for src_reviews", - "columns": { - "listing_id": { - "name": "listing_id", - "description": "This is listing id name", - "meta": { - "is_primary_key": true, - "link": { - "model": "src_listings", - "column": "id" - }, - "depends": [ - { - "model": "listings", - "column": "id" - } - ] - }, - "data_type": "int", - "quote": null, - "tags": [] - }, - "review_date": { - "name": "review_date", - "description": "review date", - "meta": {}, - "data_type": "timestamp", - "quote": null, - "tags": [] - }, - "reviewer_name": { - "name": "reviewer_name", - "description": "some reviewer name", - "meta": {}, - "data_type": "text", - "quote": null, - "tags": [] - }, - "review_text": { - "name": "review_text", - "description": "some review text", - "meta": {}, - "data_type": "text", - "quote": null, - "tags": [] - }, - "review_sentiment": { - "name": "review_sentiment", - "description": "some review sentiment", - "meta": {}, - "data_type": "text", - "quote": null, - "tags": [] - } - }, - "meta": {}, - "docs": { - "show": true - }, - "patch_path": "dbtlearn://models/src/src_reviews.yml", - "compiled_path": "target/compiled/dbtlearn/models/src/src_reviews.sql", - "build_path": null, - "deferred": false, - "unrendered_config": { - "materialized": "view", - "unique_key": "column_name" - }, - "created_at": 1651206572.0454872, - "compiled_sql": "\n\nWITH raw_reviews AS (SELECT *\n FROM AIRBNB.RAW.RAW_REVIEWS\n)\nSELECT listing_id,\n date AS review_date,\n reviewer_name,\n comments AS review_text,\n sentiment AS review_sentiment\nFROM raw_reviews", - "extra_ctes_injected": true, - "extra_ctes": [], - "relation_name": "airbnb.dev.src_reviews" - } - }, - "sources": {}, - "macros": { - "macro.dbt_snowflake.snowflake__get_catalog": { - "unique_id": "macro.dbt_snowflake.snowflake__get_catalog", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/catalog.sql", - "original_file_path": "macros/catalog.sql", - "name": "snowflake__get_catalog", - "macro_sql": "{% macro snowflake__get_catalog(information_schema, schemas) -%}\n {% set query %}\n with tables as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n table_type as \"table_type\",\n comment as \"table_comment\",\n\n -- note: this is the _role_ that owns the table\n table_owner as \"table_owner\",\n\n 'Clustering Key' as \"stats:clustering_key:label\",\n clustering_key as \"stats:clustering_key:value\",\n 'The key used to cluster this table' as \"stats:clustering_key:description\",\n (clustering_key is not null) as \"stats:clustering_key:include\",\n\n 'Row Count' as \"stats:row_count:label\",\n row_count as \"stats:row_count:value\",\n 'An approximate count of rows in this table' as \"stats:row_count:description\",\n (row_count is not null) as \"stats:row_count:include\",\n\n 'Approximate Size' as \"stats:bytes:label\",\n bytes as \"stats:bytes:value\",\n 'Approximate size of the table as reported by Snowflake' as \"stats:bytes:description\",\n (bytes is not null) as \"stats:bytes:include\",\n\n 'Last Modified' as \"stats:last_modified:label\",\n to_varchar(convert_timezone('UTC', last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as \"stats:last_modified:value\",\n 'The timestamp for last update/change' as \"stats:last_modified:description\",\n (last_altered is not null and table_type='BASE TABLE') as \"stats:last_modified:include\"\n\n from {{ information_schema }}.tables\n\n ),\n\n columns as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n\n column_name as \"column_name\",\n ordinal_position as \"column_index\",\n data_type as \"column_type\",\n comment as \"column_comment\"\n\n from {{ information_schema }}.columns\n )\n\n select *\n from tables\n join columns using (\"table_database\", \"table_schema\", \"table_name\")\n where (\n {%- for schema in schemas -%}\n upper(\"table_schema\") = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n order by \"column_index\"\n {%- endset -%}\n\n {{ return(run_query(query)) }}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.854675 - }, - "macro.dbt_snowflake.snowflake__create_table_as": { - "unique_id": "macro.dbt_snowflake.snowflake__create_table_as", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__create_table_as", - "macro_sql": "{% macro snowflake__create_table_as(temporary, relation, sql) -%}\n {%- set transient = config.get('transient', default=true) -%}\n {%- set cluster_by_keys = config.get('cluster_by', default=none) -%}\n {%- set enable_automatic_clustering = config.get('automatic_clustering', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n\n {%- if cluster_by_keys is not none and cluster_by_keys is string -%}\n {%- set cluster_by_keys = [cluster_by_keys] -%}\n {%- endif -%}\n {%- if cluster_by_keys is not none -%}\n {%- set cluster_by_string = cluster_by_keys|join(\", \")-%}\n {% else %}\n {%- set cluster_by_string = none -%}\n {%- endif -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create or replace {% if temporary -%}\n temporary\n {%- elif transient -%}\n transient\n {%- endif %} table {{ relation }} {% if copy_grants and not temporary -%} copy grants {%- endif %} as\n (\n {%- if cluster_by_string is not none -%}\n select * from(\n {{ sql }}\n ) order by ({{ cluster_by_string }})\n {%- else -%}\n {{ sql }}\n {%- endif %}\n );\n {% if cluster_by_string is not none and not temporary -%}\n alter table {{relation}} cluster by ({{cluster_by_string}});\n {%- endif -%}\n {% if enable_automatic_clustering and cluster_by_string is not none and not temporary -%}\n alter table {{relation}} resume recluster;\n {%- endif -%}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.869168 - }, - "macro.dbt_snowflake.get_column_comment_sql": { - "unique_id": "macro.dbt_snowflake.get_column_comment_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "get_column_comment_sql", - "macro_sql": "{% macro get_column_comment_sql(column_name, column_dict) %}\n {{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} COMMENT $${{ column_dict[column_name]['description'] | replace('$', '[$]') }}$$\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8695621 - }, - "macro.dbt_snowflake.get_persist_docs_column_list": { - "unique_id": "macro.dbt_snowflake.get_persist_docs_column_list", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "get_persist_docs_column_list", - "macro_sql": "{% macro get_persist_docs_column_list(model_columns, query_columns) %}\n(\n {% for column_name in query_columns %}\n {% if (column_name|upper in model_columns) or (column_name in model_columns) %}\n {{ get_column_comment_sql(column_name, model_columns) }}\n {% else %}\n {{column_name}}\n {% endif %}\n {{ \", \" if not loop.last else \"\" }}\n {% endfor %}\n)\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.get_column_comment_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.870075 - }, - "macro.dbt_snowflake.snowflake__create_view_as": { - "unique_id": "macro.dbt_snowflake.snowflake__create_view_as", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__create_view_as", - "macro_sql": "{% macro snowflake__create_view_as(relation, sql) -%}\n {%- set secure = config.get('secure', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create or replace {% if secure -%}\n secure\n {%- endif %} view {{ relation }} \n {% if config.persist_column_docs() -%}\n {% set model_columns = model.columns %}\n {% set query_columns = get_columns_in_query(sql) %}\n {{ get_persist_docs_column_list(model_columns, query_columns) }}\n \n {%- endif %}\n {% if copy_grants -%} copy grants {%- endif %} as (\n {{ sql }}\n );\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_columns_in_query", - "macro.dbt_snowflake.get_persist_docs_column_list" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.870923 - }, - "macro.dbt_snowflake.snowflake__get_columns_in_relation": { - "unique_id": "macro.dbt_snowflake.snowflake__get_columns_in_relation", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__get_columns_in_relation", - "macro_sql": "{% macro snowflake__get_columns_in_relation(relation) -%}\n {%- set sql -%}\n describe table {{ relation }}\n {%- endset -%}\n {%- set result = run_query(sql) -%}\n\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many columns in relation {{ relation }}! dbt can only get\n information about relations with fewer than {{ maximum }} columns.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n\n {% set columns = [] %}\n {% for row in result %}\n {% do columns.append(api.Column.from_description(row['name'], row['type'])) %}\n {% endfor %}\n {% do return(columns) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.871804 - }, - "macro.dbt_snowflake.snowflake__list_schemas": { - "unique_id": "macro.dbt_snowflake.snowflake__list_schemas", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__list_schemas", - "macro_sql": "{% macro snowflake__list_schemas(database) -%}\n {# 10k limit from here: https://docs.snowflake.net/manuals/sql-reference/sql/show-schemas.html#usage-notes #}\n {% set maximum = 10000 %}\n {% set sql -%}\n show terse schemas in database {{ database }}\n limit {{ maximum }}\n {%- endset %}\n {% set result = run_query(sql) %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in database {{ database }}! dbt can only get\n information about databases with fewer than {{ maximum }} schemas.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {{ return(result) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.872472 - }, - "macro.dbt_snowflake.snowflake__list_relations_without_caching": { - "unique_id": "macro.dbt_snowflake.snowflake__list_relations_without_caching", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__list_relations_without_caching", - "macro_sql": "{% macro snowflake__list_relations_without_caching(schema_relation) %}\n {%- set sql -%}\n show terse objects in {{ schema_relation }}\n {%- endset -%}\n\n {%- set result = run_query(sql) -%}\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in schema {{ schema_relation }}! dbt can only get\n information about schemas with fewer than {{ maximum }} objects.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {%- do return(result) -%}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873039 - }, - "macro.dbt_snowflake.snowflake__check_schema_exists": { - "unique_id": "macro.dbt_snowflake.snowflake__check_schema_exists", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__check_schema_exists", - "macro_sql": "{% macro snowflake__check_schema_exists(information_schema, schema) -%}\n {% call statement('check_schema_exists', fetch_result=True) -%}\n select count(*)\n from {{ information_schema }}.schemata\n where upper(schema_name) = upper('{{ schema }}')\n and upper(catalog_name) = upper('{{ information_schema.database }}')\n {%- endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873407 - }, - "macro.dbt_snowflake.snowflake__current_timestamp": { - "unique_id": "macro.dbt_snowflake.snowflake__current_timestamp", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__current_timestamp", - "macro_sql": "{% macro snowflake__current_timestamp() -%}\n convert_timezone('UTC', current_timestamp())\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873486 - }, - "macro.dbt_snowflake.snowflake__snapshot_string_as_time": { - "unique_id": "macro.dbt_snowflake.snowflake__snapshot_string_as_time", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__snapshot_string_as_time", - "macro_sql": "{% macro snowflake__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"to_timestamp_ntz('\" ~ timestamp ~ \"')\" -%}\n {{ return(result) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873683 - }, - "macro.dbt_snowflake.snowflake__snapshot_get_time": { - "unique_id": "macro.dbt_snowflake.snowflake__snapshot_get_time", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__snapshot_get_time", - "macro_sql": "{% macro snowflake__snapshot_get_time() -%}\n to_timestamp_ntz({{ current_timestamp() }})\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.873798 - }, - "macro.dbt_snowflake.snowflake__rename_relation": { - "unique_id": "macro.dbt_snowflake.snowflake__rename_relation", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__rename_relation", - "macro_sql": "{% macro snowflake__rename_relation(from_relation, to_relation) -%}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ to_relation }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.874016 - }, - "macro.dbt_snowflake.snowflake__alter_column_type": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_column_type", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_column_type", - "macro_sql": "{% macro snowflake__alter_column_type(relation, column_name, new_column_type) -%}\n {% call statement('alter_column_type') %}\n alter table {{ relation }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }};\n {% endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.874296 - }, - "macro.dbt_snowflake.snowflake__alter_relation_comment": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_comment", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_relation_comment", - "macro_sql": "{% macro snowflake__alter_relation_comment(relation, relation_comment) -%}\n comment on {{ relation.type }} {{ relation }} IS $${{ relation_comment | replace('$', '[$]') }}$$;\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8745751 - }, - "macro.dbt_snowflake.snowflake__alter_column_comment": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_column_comment", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_column_comment", - "macro_sql": "{% macro snowflake__alter_column_comment(relation, column_dict) -%}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n alter {{ relation.type }} {{ relation }} alter\n {% for column_name in column_dict if (column_name in existing_columns) or (column_name|upper in existing_columns) %}\n {{ get_column_comment_sql(column_name, column_dict) }} {{ ',' if not loop.last else ';' }}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.get_column_comment_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.875154 - }, - "macro.dbt_snowflake.get_current_query_tag": { - "unique_id": "macro.dbt_snowflake.get_current_query_tag", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "get_current_query_tag", - "macro_sql": "{% macro get_current_query_tag() -%}\n {{ return(run_query(\"show parameters like 'query_tag' in session\").rows[0]['value']) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.875356 - }, - "macro.dbt_snowflake.set_query_tag": { - "unique_id": "macro.dbt_snowflake.set_query_tag", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "set_query_tag", - "macro_sql": "{% macro set_query_tag() -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% set original_query_tag = get_current_query_tag() %}\n {{ log(\"Setting query_tag to '\" ~ new_query_tag ~ \"'. Will reset to '\" ~ original_query_tag ~ \"' after materialization.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(new_query_tag)) %}\n {{ return(original_query_tag)}}\n {% endif %}\n {{ return(none)}}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.get_current_query_tag", - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8758762 - }, - "macro.dbt_snowflake.unset_query_tag": { - "unique_id": "macro.dbt_snowflake.unset_query_tag", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "unset_query_tag", - "macro_sql": "{% macro unset_query_tag(original_query_tag) -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% if original_query_tag %}\n {{ log(\"Resetting query_tag to '\" ~ original_query_tag ~ \"'.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(original_query_tag)) %}\n {% else %}\n {{ log(\"No original query_tag, unsetting parameter.\") }}\n {% do run_query(\"alter session unset query_tag\") %}\n {% endif %}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.876409 - }, - "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns": { - "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__alter_relation_add_remove_columns", - "macro_sql": "{% macro snowflake__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns %}\n \n {% set sql -%}\n alter {{ relation.type }} {{ relation }} add column\n {% for column in add_columns %}\n {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n {% endif %}\n\n {% if remove_columns %}\n \n {% set sql -%}\n alter {{ relation.type }} {{ relation }} drop column\n {% for column in remove_columns %}\n {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n {%- endset -%}\n \n {% do run_query(sql) %}\n \n {% endif %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.877253 - }, - "macro.dbt_snowflake.snowflake_dml_explicit_transaction": { - "unique_id": "macro.dbt_snowflake.snowflake_dml_explicit_transaction", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake_dml_explicit_transaction", - "macro_sql": "{% macro snowflake_dml_explicit_transaction(dml) %}\n {#\n Use this macro to wrap all INSERT, MERGE, UPDATE, DELETE, and TRUNCATE \n statements before passing them into run_query(), or calling in the 'main' statement\n of a materialization\n #}\n {% set dml_transaction -%}\n begin;\n {{ dml }};\n commit;\n {%- endset %}\n \n {% do return(dml_transaction) %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.877481 - }, - "macro.dbt_snowflake.snowflake__truncate_relation": { - "unique_id": "macro.dbt_snowflake.snowflake__truncate_relation", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/adapters.sql", - "original_file_path": "macros/adapters.sql", - "name": "snowflake__truncate_relation", - "macro_sql": "{% macro snowflake__truncate_relation(relation) -%}\n {% set truncate_dml %}\n truncate table {{ relation }}\n {% endset %}\n {% call statement('truncate_relation') -%}\n {{ snowflake_dml_explicit_transaction(truncate_dml) }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.877745 - }, - "macro.dbt_snowflake.snowflake__get_merge_sql": { - "unique_id": "macro.dbt_snowflake.snowflake__get_merge_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/merge.sql", - "original_file_path": "macros/materializations/merge.sql", - "name": "snowflake__get_merge_sql", - "macro_sql": "{% macro snowflake__get_merge_sql(target, source_sql, unique_key, dest_columns, predicates) -%}\n\n {#\n Workaround for Snowflake not being happy with a merge on a constant-false predicate.\n When no unique_key is provided, this macro will do a regular insert. If a unique_key\n is provided, then this macro will do a proper merge instead.\n #}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute='name')) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {%- set dml -%}\n {%- if unique_key is none -%}\n\n {{ sql_header if sql_header is not none }}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source_sql }}\n )\n\n {%- else -%}\n\n {{ default__get_merge_sql(target, source_sql, unique_key, dest_columns, predicates) }}\n\n {%- endif -%}\n {%- endset -%}\n \n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv", - "macro.dbt.default__get_merge_sql", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.879235 - }, - "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql": { - "unique_id": "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/merge.sql", - "original_file_path": "macros/materializations/merge.sql", - "name": "snowflake__get_delete_insert_merge_sql", - "macro_sql": "{% macro snowflake__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) %}\n {% set dml = default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) %}\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_delete_insert_merge_sql", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.879544 - }, - "macro.dbt_snowflake.snowflake__snapshot_merge_sql": { - "unique_id": "macro.dbt_snowflake.snowflake__snapshot_merge_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/merge.sql", - "original_file_path": "macros/materializations/merge.sql", - "name": "snowflake__snapshot_merge_sql", - "macro_sql": "{% macro snowflake__snapshot_merge_sql(target, source, insert_cols) %}\n {% set dml = default__snapshot_merge_sql(target, source, insert_cols) %}\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__snapshot_merge_sql", - "macro.dbt_snowflake.snowflake_dml_explicit_transaction" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.879824 - }, - "macro.dbt_snowflake.snowflake__load_csv_rows": { - "unique_id": "macro.dbt_snowflake.snowflake__load_csv_rows", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/seed.sql", - "original_file_path": "macros/materializations/seed.sql", - "name": "snowflake__load_csv_rows", - "macro_sql": "{% macro snowflake__load_csv_rows(model, agate_table) %}\n {% set batch_size = get_batch_size() %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query('BEGIN', auto_begin=False) %}\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n {% do adapter.add_query('COMMIT', auto_begin=False) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_batch_size", - "macro.dbt.get_seed_column_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.882269 - }, - "macro.dbt_snowflake.materialization_seed_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_seed_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/seed.sql", - "original_file_path": "macros/materializations/seed.sql", - "name": "materialization_seed_snowflake", - "macro_sql": "{% materialization seed, adapter='snowflake' %}\n {% set original_query_tag = set_query_tag() %}\n\n {% set relations = materialization_seed_default() %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return(relations) }}\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.materialization_seed_default", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8825972 - }, - "macro.dbt_snowflake.materialization_view_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_view_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/view.sql", - "original_file_path": "macros/materializations/view.sql", - "name": "materialization_view_snowflake", - "macro_sql": "{% materialization view, adapter='snowflake' -%}\n\n {% set original_query_tag = set_query_tag() %}\n {% set to_return = create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n {% do persist_docs(target_relation, model, for_columns=false) %}\n\n {% do return(to_return) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n{%- endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.create_or_replace_view", - "macro.dbt.persist_docs", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.883271 - }, - "macro.dbt_snowflake.materialization_table_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_table_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/table.sql", - "original_file_path": "macros/materializations/table.sql", - "name": "materialization_table_snowflake", - "macro_sql": "{% materialization table, adapter='snowflake' %}\n\n {% set original_query_tag = set_query_tag() %}\n\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database, type='table') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n {#-- Drop the relation if it was a view to \"convert\" it in a table. This may lead to\n -- downtime, but it should be a relatively infrequent occurrence #}\n {% if old_relation is not none and not old_relation.is_table %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ drop_relation_if_exists(old_relation) }}\n {% endif %}\n\n --build model\n {% call statement('main') -%}\n {{ create_table_as(false, target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.run_hooks", - "macro.dbt.drop_relation_if_exists", - "macro.dbt.statement", - "macro.dbt.create_table_as", - "macro.dbt.persist_docs", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8848581 - }, - "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy": { - "unique_id": "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/incremental.sql", - "original_file_path": "macros/materializations/incremental.sql", - "name": "dbt_snowflake_validate_get_incremental_strategy", - "macro_sql": "{% macro dbt_snowflake_validate_get_incremental_strategy(config) %}\n {#-- Find and validate the incremental strategy #}\n {%- set strategy = config.get(\"incremental_strategy\", default=\"merge\") -%}\n\n {% set invalid_strategy_msg -%}\n Invalid incremental strategy provided: {{ strategy }}\n Expected one of: 'merge', 'delete+insert'\n {%- endset %}\n {% if strategy not in ['merge', 'delete+insert'] %}\n {% do exceptions.raise_compiler_error(invalid_strategy_msg) %}\n {% endif %}\n\n {% do return(strategy) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.886385 - }, - "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql": { - "unique_id": "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/incremental.sql", - "original_file_path": "macros/materializations/incremental.sql", - "name": "dbt_snowflake_get_incremental_sql", - "macro_sql": "{% macro dbt_snowflake_get_incremental_sql(strategy, tmp_relation, target_relation, unique_key, dest_columns) %}\n {% if strategy == 'merge' %}\n {% do return(get_merge_sql(target_relation, tmp_relation, unique_key, dest_columns)) %}\n {% elif strategy == 'delete+insert' %}\n {% do return(get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns)) %}\n {% else %}\n {% do exceptions.raise_compiler_error('invalid strategy: ' ~ strategy) %}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_merge_sql", - "macro.dbt.get_delete_insert_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.88697 - }, - "macro.dbt_snowflake.materialization_incremental_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_incremental_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/incremental.sql", - "original_file_path": "macros/materializations/incremental.sql", - "name": "materialization_incremental_snowflake", - "macro_sql": "{% materialization incremental, adapter='snowflake' -%}\n \n {% set original_query_tag = set_query_tag() %}\n\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set target_relation = this %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(this) %}\n\n {#-- Validate early so we don't run SQL if the strategy is invalid --#}\n {% set strategy = dbt_snowflake_validate_get_incremental_strategy(config) -%}\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {{ run_hooks(pre_hooks) }}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n \n {% elif existing_relation.is_view %}\n {#-- Can't overwrite a view with a table - we must drop --#}\n {{ log(\"Dropping relation \" ~ target_relation ~ \" because it is a view and this model is a table.\") }}\n {% do adapter.drop_relation(existing_relation) %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n \n {% elif full_refresh_mode %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n \n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = dbt_snowflake_get_incremental_sql(strategy, tmp_relation, target_relation, unique_key, dest_columns) %}\n \n {% endif %}\n\n {%- call statement('main') -%}\n {{ build_sql }}\n {%- endcall -%}\n\n {{ run_hooks(post_hooks) }}\n\n {% set target_relation = target_relation.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.should_full_refresh", - "macro.dbt.load_relation", - "macro.dbt.make_temp_relation", - "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy", - "macro.dbt.incremental_validate_on_schema_change", - "macro.dbt.run_hooks", - "macro.dbt.create_table_as", - "macro.dbt.run_query", - "macro.dbt.process_schema_changes", - "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql", - "macro.dbt.statement", - "macro.dbt.persist_docs", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.889248 - }, - "macro.dbt_snowflake.materialization_snapshot_snowflake": { - "unique_id": "macro.dbt_snowflake.materialization_snapshot_snowflake", - "package_name": "dbt_snowflake", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/snowflake", - "path": "macros/materializations/snapshot.sql", - "original_file_path": "macros/materializations/snapshot.sql", - "name": "materialization_snapshot_snowflake", - "macro_sql": "{% materialization snapshot, adapter='snowflake' %}\n {% set original_query_tag = set_query_tag() %}\n\n {% set relations = materialization_snapshot_default() %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return(relations) }}\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.set_query_tag", - "macro.dbt.materialization_snapshot_default", - "macro.dbt_snowflake.unset_query_tag" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.88973 - }, - "macro.dbt.run_hooks": { - "unique_id": "macro.dbt.run_hooks", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "run_hooks", - "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.890977 - }, - "macro.dbt.make_hook_config": { - "unique_id": "macro.dbt.make_hook_config", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "make_hook_config", - "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891188 - }, - "macro.dbt.before_begin": { - "unique_id": "macro.dbt.before_begin", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "before_begin", - "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_hook_config" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891339 - }, - "macro.dbt.in_transaction": { - "unique_id": "macro.dbt.in_transaction", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "in_transaction", - "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_hook_config" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891488 - }, - "macro.dbt.after_commit": { - "unique_id": "macro.dbt.after_commit", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/hooks.sql", - "original_file_path": "macros/materializations/hooks.sql", - "name": "after_commit", - "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_hook_config" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.891636 - }, - "macro.dbt.set_sql_header": { - "unique_id": "macro.dbt.set_sql_header", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/configs.sql", - "original_file_path": "macros/materializations/configs.sql", - "name": "set_sql_header", - "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8921118 - }, - "macro.dbt.should_full_refresh": { - "unique_id": "macro.dbt.should_full_refresh", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/configs.sql", - "original_file_path": "macros/materializations/configs.sql", - "name": "should_full_refresh", - "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.892443 - }, - "macro.dbt.should_store_failures": { - "unique_id": "macro.dbt.should_store_failures", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/configs.sql", - "original_file_path": "macros/materializations/configs.sql", - "name": "should_store_failures", - "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8928242 - }, - "macro.dbt.snapshot_merge_sql": { - "unique_id": "macro.dbt.snapshot_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/snapshot_merge.sql", - "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", - "name": "snapshot_merge_sql", - "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__snapshot_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.893415 - }, - "macro.dbt.default__snapshot_merge_sql": { - "unique_id": "macro.dbt.default__snapshot_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/snapshot_merge.sql", - "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", - "name": "default__snapshot_merge_sql", - "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8937159 - }, - "macro.dbt.strategy_dispatch": { - "unique_id": "macro.dbt.strategy_dispatch", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "strategy_dispatch", - "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.897434 - }, - "macro.dbt.snapshot_hash_arguments": { - "unique_id": "macro.dbt.snapshot_hash_arguments", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_hash_arguments", - "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__snapshot_hash_arguments" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8976178 - }, - "macro.dbt.default__snapshot_hash_arguments": { - "unique_id": "macro.dbt.default__snapshot_hash_arguments", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "default__snapshot_hash_arguments", - "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.8978539 - }, - "macro.dbt.snapshot_get_time": { - "unique_id": "macro.dbt.snapshot_get_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_get_time", - "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__snapshot_get_time" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.898006 - }, - "macro.dbt.default__snapshot_get_time": { - "unique_id": "macro.dbt.default__snapshot_get_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "default__snapshot_get_time", - "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.898108 - }, - "macro.dbt.snapshot_timestamp_strategy": { - "unique_id": "macro.dbt.snapshot_timestamp_strategy", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_timestamp_strategy", - "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.snapshot_hash_arguments" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.898955 - }, - "macro.dbt.snapshot_string_as_time": { - "unique_id": "macro.dbt.snapshot_string_as_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_string_as_time", - "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__snapshot_string_as_time" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.89913 - }, - "macro.dbt.default__snapshot_string_as_time": { - "unique_id": "macro.dbt.default__snapshot_string_as_time", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "default__snapshot_string_as_time", - "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.899321 - }, - "macro.dbt.snapshot_check_all_get_existing_columns": { - "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_check_all_get_existing_columns", - "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_columns_in_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9003892 - }, - "macro.dbt.snapshot_check_strategy": { - "unique_id": "macro.dbt.snapshot_check_strategy", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/strategies.sql", - "original_file_path": "macros/materializations/snapshots/strategies.sql", - "name": "snapshot_check_strategy", - "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.snapshot_get_time", - "macro.dbt.run_query", - "macro.dbt.snapshot_string_as_time", - "macro.dbt.snapshot_check_all_get_existing_columns", - "macro.dbt.snapshot_hash_arguments" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9024189 - }, - "macro.dbt.create_columns": { - "unique_id": "macro.dbt.create_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "create_columns", - "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_columns" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906004 - }, - "macro.dbt.default__create_columns": { - "unique_id": "macro.dbt.default__create_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__create_columns", - "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906307 - }, - "macro.dbt.post_snapshot": { - "unique_id": "macro.dbt.post_snapshot", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "post_snapshot", - "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__post_snapshot" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906485 - }, - "macro.dbt.default__post_snapshot": { - "unique_id": "macro.dbt.default__post_snapshot", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__post_snapshot", - "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906583 - }, - "macro.dbt.snapshot_staging_table": { - "unique_id": "macro.dbt.snapshot_staging_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "snapshot_staging_table", - "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__snapshot_staging_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.906805 - }, - "macro.dbt.default__snapshot_staging_table": { - "unique_id": "macro.dbt.default__snapshot_staging_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__snapshot_staging_table", - "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.snapshot_get_time" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.907786 - }, - "macro.dbt.build_snapshot_table": { - "unique_id": "macro.dbt.build_snapshot_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "build_snapshot_table", - "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__build_snapshot_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.907994 - }, - "macro.dbt.default__build_snapshot_table": { - "unique_id": "macro.dbt.default__build_snapshot_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "default__build_snapshot_table", - "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.908271 - }, - "macro.dbt.build_snapshot_staging_table": { - "unique_id": "macro.dbt.build_snapshot_staging_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/helpers.sql", - "original_file_path": "macros/materializations/snapshots/helpers.sql", - "name": "build_snapshot_staging_table", - "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.make_temp_relation", - "macro.dbt.snapshot_staging_table", - "macro.dbt.statement", - "macro.dbt.create_table_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.908737 - }, - "macro.dbt.materialization_snapshot_default": { - "unique_id": "macro.dbt.materialization_snapshot_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/snapshots/snapshot.sql", - "original_file_path": "macros/materializations/snapshots/snapshot.sql", - "name": "materialization_snapshot_default", - "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_schema", - "macro.dbt.get_or_create_relation", - "macro.dbt.run_hooks", - "macro.dbt.strategy_dispatch", - "macro.dbt.build_snapshot_table", - "macro.dbt.create_table_as", - "macro.dbt.build_snapshot_staging_table", - "macro.dbt.create_columns", - "macro.dbt.snapshot_merge_sql", - "macro.dbt.statement", - "macro.dbt.persist_docs", - "macro.dbt.create_indexes", - "macro.dbt.post_snapshot" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.915369 - }, - "macro.dbt.materialization_test_default": { - "unique_id": "macro.dbt.materialization_test_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/test.sql", - "original_file_path": "macros/materializations/tests/test.sql", - "name": "materialization_test_default", - "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.should_store_failures", - "macro.dbt.statement", - "macro.dbt.create_table_as", - "macro.dbt.get_test_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.917732 - }, - "macro.dbt.get_test_sql": { - "unique_id": "macro.dbt.get_test_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/helpers.sql", - "original_file_path": "macros/materializations/tests/helpers.sql", - "name": "get_test_sql", - "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_test_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.918294 - }, - "macro.dbt.default__get_test_sql": { - "unique_id": "macro.dbt.default__get_test_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/helpers.sql", - "original_file_path": "macros/materializations/tests/helpers.sql", - "name": "default__get_test_sql", - "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.918626 - }, - "macro.dbt.get_where_subquery": { - "unique_id": "macro.dbt.get_where_subquery", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/where_subquery.sql", - "original_file_path": "macros/materializations/tests/where_subquery.sql", - "name": "get_where_subquery", - "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_where_subquery" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.919103 - }, - "macro.dbt.default__get_where_subquery": { - "unique_id": "macro.dbt.default__get_where_subquery", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/tests/where_subquery.sql", - "original_file_path": "macros/materializations/tests/where_subquery.sql", - "name": "default__get_where_subquery", - "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.919512 - }, - "macro.dbt.get_quoted_csv": { - "unique_id": "macro.dbt.get_quoted_csv", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/column_helpers.sql", - "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", - "name": "get_quoted_csv", - "macro_sql": "{% macro get_quoted_csv(column_names) %}\n \n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.920457 - }, - "macro.dbt.diff_columns": { - "unique_id": "macro.dbt.diff_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/column_helpers.sql", - "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", - "name": "diff_columns", - "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n \n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n \n {{ return(result) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.921053 - }, - "macro.dbt.diff_column_data_types": { - "unique_id": "macro.dbt.diff_column_data_types", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/column_helpers.sql", - "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", - "name": "diff_column_data_types", - "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n \n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} \n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.921717 - }, - "macro.dbt.get_merge_sql": { - "unique_id": "macro.dbt.get_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "get_merge_sql", - "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9248629 - }, - "macro.dbt.default__get_merge_sql": { - "unique_id": "macro.dbt.default__get_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "default__get_merge_sql", - "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.926126 - }, - "macro.dbt.get_delete_insert_merge_sql": { - "unique_id": "macro.dbt.get_delete_insert_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "get_delete_insert_merge_sql", - "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.926379 - }, - "macro.dbt.default__get_delete_insert_merge_sql": { - "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "default__get_delete_insert_merge_sql", - "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.926843 - }, - "macro.dbt.get_insert_overwrite_merge_sql": { - "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "get_insert_overwrite_merge_sql", - "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_insert_overwrite_merge_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.927128 - }, - "macro.dbt.default__get_insert_overwrite_merge_sql": { - "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/merge.sql", - "original_file_path": "macros/materializations/models/incremental/merge.sql", - "name": "default__get_insert_overwrite_merge_sql", - "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_quoted_csv" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.927835 - }, - "macro.dbt.is_incremental": { - "unique_id": "macro.dbt.is_incremental", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/is_incremental.sql", - "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", - "name": "is_incremental", - "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.should_full_refresh" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.928639 - }, - "macro.dbt.materialization_incremental_default": { - "unique_id": "macro.dbt.materialization_incremental_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/incremental.sql", - "original_file_path": "macros/materializations/models/incremental/incremental.sql", - "name": "materialization_incremental_default", - "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) %} \n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns) %}\n \n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.load_relation", - "macro.dbt.make_temp_relation", - "macro.dbt.should_full_refresh", - "macro.dbt.incremental_validate_on_schema_change", - "macro.dbt.drop_relation_if_exists", - "macro.dbt.run_hooks", - "macro.dbt.create_table_as", - "macro.dbt.run_query", - "macro.dbt.process_schema_changes", - "macro.dbt.get_delete_insert_merge_sql", - "macro.dbt.statement", - "macro.dbt.persist_docs", - "macro.dbt.create_indexes" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9340842 - }, - "macro.dbt.incremental_validate_on_schema_change": { - "unique_id": "macro.dbt.incremental_validate_on_schema_change", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "incremental_validate_on_schema_change", - "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n \n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n \n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n \n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n \n {% endif %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.93955 - }, - "macro.dbt.check_for_schema_changes": { - "unique_id": "macro.dbt.check_for_schema_changes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "check_for_schema_changes", - "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n \n {% set schema_changed = False %}\n \n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n \n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n \n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.diff_columns", - "macro.dbt.diff_column_data_types" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9409401 - }, - "macro.dbt.sync_column_schemas": { - "unique_id": "macro.dbt.sync_column_schemas", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "sync_column_schemas", - "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n \n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n \n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} \n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n \n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n \n {% do log(schema_change_message) %}\n \n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.alter_relation_add_remove_columns", - "macro.dbt.alter_column_type" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.94226 - }, - "macro.dbt.process_schema_changes": { - "unique_id": "macro.dbt.process_schema_changes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/incremental/on_schema_change.sql", - "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", - "name": "process_schema_changes", - "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n \n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n \n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n \n {% if schema_changes_dict['schema_changed'] %}\n \n {% if on_schema_change == 'fail' %}\n \n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways: \n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n \n {% do exceptions.raise_compiler_error(fail_msg) %}\n \n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n \n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {% endif %}\n \n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n \n {% endif %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.check_for_schema_changes", - "macro.dbt.sync_column_schemas" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9430559 - }, - "macro.dbt.materialization_table_default": { - "unique_id": "macro.dbt.materialization_table_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/table.sql", - "original_file_path": "macros/materializations/models/table/table.sql", - "name": "materialization_table_default", - "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.drop_relation_if_exists", - "macro.dbt.run_hooks", - "macro.dbt.statement", - "macro.dbt.get_create_table_as_sql", - "macro.dbt.create_indexes", - "macro.dbt.persist_docs" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.947229 - }, - "macro.dbt.get_create_table_as_sql": { - "unique_id": "macro.dbt.get_create_table_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "get_create_table_as_sql", - "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_create_table_as_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.947777 - }, - "macro.dbt.default__get_create_table_as_sql": { - "unique_id": "macro.dbt.default__get_create_table_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "default__get_create_table_as_sql", - "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_table_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.947985 - }, - "macro.dbt.create_table_as": { - "unique_id": "macro.dbt.create_table_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "create_table_as", - "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__create_table_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.948208 - }, - "macro.dbt.default__create_table_as": { - "unique_id": "macro.dbt.default__create_table_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/table/create_table_as.sql", - "original_file_path": "macros/materializations/models/table/create_table_as.sql", - "name": "default__create_table_as", - "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n \n {{ sql_header if sql_header is not none }}\n \n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.94865 - }, - "macro.dbt.materialization_view_default": { - "unique_id": "macro.dbt.materialization_view_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/view.sql", - "original_file_path": "macros/materializations/models/view/view.sql", - "name": "materialization_view_default", - "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_hooks", - "macro.dbt.drop_relation_if_exists", - "macro.dbt.statement", - "macro.dbt.create_view_as", - "macro.dbt.persist_docs" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.952714 - }, - "macro.dbt.handle_existing_table": { - "unique_id": "macro.dbt.handle_existing_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/helpers.sql", - "original_file_path": "macros/materializations/models/view/helpers.sql", - "name": "handle_existing_table", - "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__handle_existing_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.953145 - }, - "macro.dbt.default__handle_existing_table": { - "unique_id": "macro.dbt.default__handle_existing_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/helpers.sql", - "original_file_path": "macros/materializations/models/view/helpers.sql", - "name": "default__handle_existing_table", - "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.953383 - }, - "macro.dbt.create_or_replace_view": { - "unique_id": "macro.dbt.create_or_replace_view", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_or_replace_view.sql", - "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", - "name": "create_or_replace_view", - "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_hooks", - "macro.dbt.handle_existing_table", - "macro.dbt.should_full_refresh", - "macro.dbt.statement", - "macro.dbt.get_create_view_as_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.954754 - }, - "macro.dbt.get_create_view_as_sql": { - "unique_id": "macro.dbt.get_create_view_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "get_create_view_as_sql", - "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_create_view_as_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955221 - }, - "macro.dbt.default__get_create_view_as_sql": { - "unique_id": "macro.dbt.default__get_create_view_as_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "default__get_create_view_as_sql", - "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_view_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955395 - }, - "macro.dbt.create_view_as": { - "unique_id": "macro.dbt.create_view_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "create_view_as", - "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__create_view_as" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955586 - }, - "macro.dbt.default__create_view_as": { - "unique_id": "macro.dbt.default__create_view_as", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/models/view/create_view_as.sql", - "original_file_path": "macros/materializations/models/view/create_view_as.sql", - "name": "default__create_view_as", - "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.955868 - }, - "macro.dbt.materialization_seed_default": { - "unique_id": "macro.dbt.materialization_seed_default", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/seed.sql", - "original_file_path": "macros/materializations/seeds/seed.sql", - "name": "materialization_seed_default", - "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.should_full_refresh", - "macro.dbt.run_hooks", - "macro.dbt.reset_csv_table", - "macro.dbt.create_csv_table", - "macro.dbt.load_csv_rows", - "macro.dbt.noop_statement", - "macro.dbt.persist_docs", - "macro.dbt.create_indexes" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9590452 - }, - "macro.dbt.create_csv_table": { - "unique_id": "macro.dbt.create_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "create_csv_table", - "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_csv_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.963721 - }, - "macro.dbt.default__create_csv_table": { - "unique_id": "macro.dbt.default__create_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__create_csv_table", - "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.964705 - }, - "macro.dbt.reset_csv_table": { - "unique_id": "macro.dbt.reset_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "reset_csv_table", - "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__reset_csv_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.96496 - }, - "macro.dbt.default__reset_csv_table": { - "unique_id": "macro.dbt.default__reset_csv_table", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__reset_csv_table", - "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.create_csv_table" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9654882 - }, - "macro.dbt.get_binding_char": { - "unique_id": "macro.dbt.get_binding_char", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "get_binding_char", - "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_binding_char" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.965648 - }, - "macro.dbt.default__get_binding_char": { - "unique_id": "macro.dbt.default__get_binding_char", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__get_binding_char", - "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.965769 - }, - "macro.dbt.get_batch_size": { - "unique_id": "macro.dbt.get_batch_size", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "get_batch_size", - "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_batch_size" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.965937 - }, - "macro.dbt.default__get_batch_size": { - "unique_id": "macro.dbt.default__get_batch_size", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__get_batch_size", - "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.966062 - }, - "macro.dbt.get_seed_column_quoted_csv": { - "unique_id": "macro.dbt.get_seed_column_quoted_csv", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "get_seed_column_quoted_csv", - "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9665868 - }, - "macro.dbt.load_csv_rows": { - "unique_id": "macro.dbt.load_csv_rows", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "load_csv_rows", - "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__load_csv_rows" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.966848 - }, - "macro.dbt.default__load_csv_rows": { - "unique_id": "macro.dbt.default__load_csv_rows", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/materializations/seeds/helpers.sql", - "original_file_path": "macros/materializations/seeds/helpers.sql", - "name": "default__load_csv_rows", - "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_batch_size", - "macro.dbt.get_seed_column_quoted_csv", - "macro.dbt.get_binding_char" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.968174 - }, - "macro.dbt.generate_alias_name": { - "unique_id": "macro.dbt.generate_alias_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_alias.sql", - "original_file_path": "macros/get_custom_name/get_custom_alias.sql", - "name": "generate_alias_name", - "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__generate_alias_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.968678 - }, - "macro.dbt.default__generate_alias_name": { - "unique_id": "macro.dbt.default__generate_alias_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_alias.sql", - "original_file_path": "macros/get_custom_name/get_custom_alias.sql", - "name": "default__generate_alias_name", - "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9689212 - }, - "macro.dbt.generate_schema_name": { - "unique_id": "macro.dbt.generate_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_schema.sql", - "original_file_path": "macros/get_custom_name/get_custom_schema.sql", - "name": "generate_schema_name", - "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__generate_schema_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.969553 - }, - "macro.dbt.default__generate_schema_name": { - "unique_id": "macro.dbt.default__generate_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_schema.sql", - "original_file_path": "macros/get_custom_name/get_custom_schema.sql", - "name": "default__generate_schema_name", - "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.969832 - }, - "macro.dbt.generate_schema_name_for_env": { - "unique_id": "macro.dbt.generate_schema_name_for_env", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_schema.sql", - "original_file_path": "macros/get_custom_name/get_custom_schema.sql", - "name": "generate_schema_name_for_env", - "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.97014 - }, - "macro.dbt.generate_database_name": { - "unique_id": "macro.dbt.generate_database_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_database.sql", - "original_file_path": "macros/get_custom_name/get_custom_database.sql", - "name": "generate_database_name", - "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__generate_database_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9706528 - }, - "macro.dbt.default__generate_database_name": { - "unique_id": "macro.dbt.default__generate_database_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/get_custom_name/get_custom_database.sql", - "original_file_path": "macros/get_custom_name/get_custom_database.sql", - "name": "default__generate_database_name", - "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.970923 - }, - "macro.dbt.default__test_relationships": { - "unique_id": "macro.dbt.default__test_relationships", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/relationships.sql", - "original_file_path": "macros/generic_test_sql/relationships.sql", - "name": "default__test_relationships", - "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.971371 - }, - "macro.dbt.default__test_not_null": { - "unique_id": "macro.dbt.default__test_not_null", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/not_null.sql", - "original_file_path": "macros/generic_test_sql/not_null.sql", - "name": "default__test_not_null", - "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.971654 - }, - "macro.dbt.default__test_unique": { - "unique_id": "macro.dbt.default__test_unique", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/unique.sql", - "original_file_path": "macros/generic_test_sql/unique.sql", - "name": "default__test_unique", - "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9720042 - }, - "macro.dbt.default__test_accepted_values": { - "unique_id": "macro.dbt.default__test_accepted_values", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/generic_test_sql/accepted_values.sql", - "original_file_path": "macros/generic_test_sql/accepted_values.sql", - "name": "default__test_accepted_values", - "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.972692 - }, - "macro.dbt.statement": { - "unique_id": "macro.dbt.statement", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/statement.sql", - "original_file_path": "macros/etc/statement.sql", - "name": "statement", - "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.973983 - }, - "macro.dbt.noop_statement": { - "unique_id": "macro.dbt.noop_statement", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/statement.sql", - "original_file_path": "macros/etc/statement.sql", - "name": "noop_statement", - "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9746199 - }, - "macro.dbt.run_query": { - "unique_id": "macro.dbt.run_query", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/statement.sql", - "original_file_path": "macros/etc/statement.sql", - "name": "run_query", - "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.974944 - }, - "macro.dbt.convert_datetime": { - "unique_id": "macro.dbt.convert_datetime", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "convert_datetime", - "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9769669 - }, - "macro.dbt.dates_in_range": { - "unique_id": "macro.dbt.dates_in_range", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "dates_in_range", - "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.convert_datetime" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.978344 - }, - "macro.dbt.partition_range": { - "unique_id": "macro.dbt.partition_range", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "partition_range", - "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.dates_in_range" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.979158 - }, - "macro.dbt.py_current_timestring": { - "unique_id": "macro.dbt.py_current_timestring", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/etc/datetime.sql", - "original_file_path": "macros/etc/datetime.sql", - "name": "py_current_timestring", - "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.979411 - }, - "macro.dbt.create_schema": { - "unique_id": "macro.dbt.create_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "create_schema", - "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_schema" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.979896 - }, - "macro.dbt.default__create_schema": { - "unique_id": "macro.dbt.default__create_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "default__create_schema", - "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.980093 - }, - "macro.dbt.drop_schema": { - "unique_id": "macro.dbt.drop_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "drop_schema", - "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__drop_schema" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.980265 - }, - "macro.dbt.default__drop_schema": { - "unique_id": "macro.dbt.default__drop_schema", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/schema.sql", - "original_file_path": "macros/adapters/schema.sql", - "name": "default__drop_schema", - "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9804611 - }, - "macro.dbt.get_create_index_sql": { - "unique_id": "macro.dbt.get_create_index_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "get_create_index_sql", - "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_create_index_sql" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.981035 - }, - "macro.dbt.default__get_create_index_sql": { - "unique_id": "macro.dbt.default__get_create_index_sql", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "default__get_create_index_sql", - "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.981178 - }, - "macro.dbt.create_indexes": { - "unique_id": "macro.dbt.create_indexes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "create_indexes", - "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__create_indexes" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.981345 - }, - "macro.dbt.default__create_indexes": { - "unique_id": "macro.dbt.default__create_indexes", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/indexes.sql", - "original_file_path": "macros/adapters/indexes.sql", - "name": "default__create_indexes", - "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.get_create_index_sql", - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9817758 - }, - "macro.dbt.make_temp_relation": { - "unique_id": "macro.dbt.make_temp_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "make_temp_relation", - "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__make_temp_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9835799 - }, - "macro.dbt.default__make_temp_relation": { - "unique_id": "macro.dbt.default__make_temp_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__make_temp_relation", - "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.983905 - }, - "macro.dbt.drop_relation": { - "unique_id": "macro.dbt.drop_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "drop_relation", - "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__drop_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.984101 - }, - "macro.dbt.default__drop_relation": { - "unique_id": "macro.dbt.default__drop_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__drop_relation", - "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9843352 - }, - "macro.dbt.truncate_relation": { - "unique_id": "macro.dbt.truncate_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "truncate_relation", - "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__truncate_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.98453 - }, - "macro.dbt.default__truncate_relation": { - "unique_id": "macro.dbt.default__truncate_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__truncate_relation", - "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.984697 - }, - "macro.dbt.rename_relation": { - "unique_id": "macro.dbt.rename_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "rename_relation", - "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__rename_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.984915 - }, - "macro.dbt.default__rename_relation": { - "unique_id": "macro.dbt.default__rename_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__rename_relation", - "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.985218 - }, - "macro.dbt.get_or_create_relation": { - "unique_id": "macro.dbt.get_or_create_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "get_or_create_relation", - "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_or_create_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.985485 - }, - "macro.dbt.default__get_or_create_relation": { - "unique_id": "macro.dbt.default__get_or_create_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "default__get_or_create_relation", - "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.986151 - }, - "macro.dbt.load_relation": { - "unique_id": "macro.dbt.load_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "load_relation", - "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9864 - }, - "macro.dbt.drop_relation_if_exists": { - "unique_id": "macro.dbt.drop_relation_if_exists", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/relation.sql", - "original_file_path": "macros/adapters/relation.sql", - "name": "drop_relation_if_exists", - "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.986603 - }, - "macro.dbt.current_timestamp": { - "unique_id": "macro.dbt.current_timestamp", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "current_timestamp", - "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.987173 - }, - "macro.dbt.default__current_timestamp": { - "unique_id": "macro.dbt.default__current_timestamp", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "default__current_timestamp", - "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9873269 - }, - "macro.dbt.collect_freshness": { - "unique_id": "macro.dbt.collect_freshness", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "collect_freshness", - "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__collect_freshness" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.987576 - }, - "macro.dbt.default__collect_freshness": { - "unique_id": "macro.dbt.default__collect_freshness", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/freshness.sql", - "original_file_path": "macros/adapters/freshness.sql", - "name": "default__collect_freshness", - "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement", - "macro.dbt.current_timestamp" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.98803 - }, - "macro.dbt.alter_column_comment": { - "unique_id": "macro.dbt.alter_column_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "alter_column_comment", - "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_column_comment" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9888341 - }, - "macro.dbt.default__alter_column_comment": { - "unique_id": "macro.dbt.default__alter_column_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "default__alter_column_comment", - "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.989012 - }, - "macro.dbt.alter_relation_comment": { - "unique_id": "macro.dbt.alter_relation_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "alter_relation_comment", - "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_relation_comment" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9892302 - }, - "macro.dbt.default__alter_relation_comment": { - "unique_id": "macro.dbt.default__alter_relation_comment", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "default__alter_relation_comment", - "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.989405 - }, - "macro.dbt.persist_docs": { - "unique_id": "macro.dbt.persist_docs", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "persist_docs", - "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__persist_docs" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.989704 - }, - "macro.dbt.default__persist_docs": { - "unique_id": "macro.dbt.default__persist_docs", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/persist_docs.sql", - "original_file_path": "macros/adapters/persist_docs.sql", - "name": "default__persist_docs", - "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query", - "macro.dbt.alter_relation_comment", - "macro.dbt.alter_column_comment" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.990229 - }, - "macro.dbt.get_catalog": { - "unique_id": "macro.dbt.get_catalog", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "get_catalog", - "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_catalog" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.991852 - }, - "macro.dbt.default__get_catalog": { - "unique_id": "macro.dbt.default__get_catalog", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__get_catalog", - "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992126 - }, - "macro.dbt.information_schema_name": { - "unique_id": "macro.dbt.information_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "information_schema_name", - "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__information_schema_name" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992324 - }, - "macro.dbt.default__information_schema_name": { - "unique_id": "macro.dbt.default__information_schema_name", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__information_schema_name", - "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992484 - }, - "macro.dbt.list_schemas": { - "unique_id": "macro.dbt.list_schemas", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "list_schemas", - "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__list_schemas" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.992687 - }, - "macro.dbt.default__list_schemas": { - "unique_id": "macro.dbt.default__list_schemas", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__list_schemas", - "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.information_schema_name", - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.99295 - }, - "macro.dbt.check_schema_exists": { - "unique_id": "macro.dbt.check_schema_exists", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "check_schema_exists", - "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__check_schema_exists" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9931688 - }, - "macro.dbt.default__check_schema_exists": { - "unique_id": "macro.dbt.default__check_schema_exists", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__check_schema_exists", - "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.993557 - }, - "macro.dbt.list_relations_without_caching": { - "unique_id": "macro.dbt.list_relations_without_caching", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "list_relations_without_caching", - "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__list_relations_without_caching" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9937558 - }, - "macro.dbt.default__list_relations_without_caching": { - "unique_id": "macro.dbt.default__list_relations_without_caching", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/metadata.sql", - "original_file_path": "macros/adapters/metadata.sql", - "name": "default__list_relations_without_caching", - "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9939241 - }, - "macro.dbt.get_columns_in_relation": { - "unique_id": "macro.dbt.get_columns_in_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "get_columns_in_relation", - "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__get_columns_in_relation" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.995882 - }, - "macro.dbt.default__get_columns_in_relation": { - "unique_id": "macro.dbt.default__get_columns_in_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__get_columns_in_relation", - "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.996049 - }, - "macro.dbt.sql_convert_columns_in_relation": { - "unique_id": "macro.dbt.sql_convert_columns_in_relation", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "sql_convert_columns_in_relation", - "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.996382 - }, - "macro.dbt.get_columns_in_query": { - "unique_id": "macro.dbt.get_columns_in_query", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "get_columns_in_query", - "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__get_columns_in_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.9965818 - }, - "macro.dbt.default__get_columns_in_query": { - "unique_id": "macro.dbt.default__get_columns_in_query", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__get_columns_in_query", - "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.996951 - }, - "macro.dbt.alter_column_type": { - "unique_id": "macro.dbt.alter_column_type", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "alter_column_type", - "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_column_type" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.997199 - }, - "macro.dbt.default__alter_column_type": { - "unique_id": "macro.dbt.default__alter_column_type", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__alter_column_type", - "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.statement" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.997854 - }, - "macro.dbt.alter_relation_add_remove_columns": { - "unique_id": "macro.dbt.alter_relation_add_remove_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "alter_relation_add_remove_columns", - "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.998132 - }, - "macro.dbt.default__alter_relation_add_remove_columns": { - "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "macros/adapters/columns.sql", - "original_file_path": "macros/adapters/columns.sql", - "name": "default__alter_relation_add_remove_columns", - "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n \n {% set sql -%}\n \n alter {{ relation.type }} {{ relation }}\n \n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n \n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n \n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.run_query" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.998991 - }, - "macro.dbt.test_unique": { - "unique_id": "macro.dbt.test_unique", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_unique", - "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_unique" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.999637 - }, - "macro.dbt.test_not_null": { - "unique_id": "macro.dbt.test_not_null", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_not_null", - "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_not_null" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204506.999885 - }, - "macro.dbt.test_accepted_values": { - "unique_id": "macro.dbt.test_accepted_values", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_accepted_values", - "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_accepted_values" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204507.000203 - }, - "macro.dbt.test_relationships": { - "unique_id": "macro.dbt.test_relationships", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "tests/generic/builtin.sql", - "original_file_path": "tests/generic/builtin.sql", - "name": "test_relationships", - "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", - "resource_type": "macro", - "tags": [], - "depends_on": { - "macros": [ - "macro.dbt.default__test_relationships" - ] - }, - "description": "", - "meta": {}, - "docs": { - "show": true - }, - "patch_path": null, - "arguments": [], - "created_at": 1651204507.000502 - } - }, - "docs": { - "dbt.__overview__": { - "unique_id": "dbt.__overview__", - "package_name": "dbt", - "root_path": "/Users/ananthpackkildurai/.pyenv/versions/3.9.9/envs/dbt_learn/lib/python3.9/site-packages/dbt/include/global_project", - "path": "overview.md", - "original_file_path": "docs/overview.md", - "name": "__overview__", - "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion" - } - }, - "exposures": {}, - "metrics": {}, - "selectors": {}, - "disabled": {}, - "parent_map": { - "model.dbtlearn.dim_listings_w_hosts": [ - "model.dbtlearn.dim_hosts_cleansed", - "model.dbtlearn.dim_listings_cleansed_changed" - ], - "model.dbtlearn.dim_listings_cleansed_changed": [ - "model.dbtlearn.src_listings" - ], - "model.dbtlearn.dim_hosts_cleansed": [ - "model.dbtlearn.src_hosts" - ], - "model.dbtlearn.fct_reviews": [ - "model.dbtlearn.src_reviews" - ], - "model.dbtlearn.src_listings": [], - "model.dbtlearn.src_hosts": [], - "model.dbtlearn.src_reviews": [] - }, - "child_map": { - "model.dbtlearn.dim_listings_w_hosts": [], - "model.dbtlearn.dim_listings_cleansed_changed": [ - "model.dbtlearn.dim_listings_w_hosts" - ], - "model.dbtlearn.dim_hosts_cleansed": [ - "model.dbtlearn.dim_listings_w_hosts" - ], - "model.dbtlearn.fct_reviews": [], - "model.dbtlearn.src_listings": [ - "model.dbtlearn.dim_listings_cleansed_changed" - ], - "model.dbtlearn.src_hosts": [ - "model.dbtlearn.dim_hosts_cleansed" - ], - "model.dbtlearn.src_reviews": [ - "model.dbtlearn.fct_reviews" - ] - } -} \ No newline at end of file diff --git a/src/test/resources/schema/entities.proto b/src/test/resources/schema/entities.proto deleted file mode 100644 index f02f9c7..0000000 --- a/src/test/resources/schema/entities.proto +++ /dev/null @@ -1,35 +0,0 @@ -syntax = "proto3"; - -package org.entities; - -import "google/protobuf/descriptor.proto"; -import "protocol.proto"; - -option java_package = "org.entities.proto"; -option java_outer_classname = "EntityBuilder"; - -message Department { - option(org.schemata.schema.type) = ENTITY; - int32 id = 1; - string name = 2; -} - -message Person { - option(org.schemata.schema.message_core).description = "This is the description of the users table"; - option(org.schemata.schema.message_core).comment = "The comment added after thought"; - option(org.schemata.schema.owner) = "Growth"; - option(org.schemata.schema.domain) = "Core"; - option(org.schemata.schema.type) = ENTITY; - - string name = 1 - [(org.schemata.schema.field_core).description = "person name"]; - - int32 id = 2 - [(org.schemata.schema.field_core).description = "unique identifier for a given person", (org.schemata.schema.is_primary_key) = true]; - - string email = 3 - [(org.schemata.schema.field_core).description = "official email address", (org.schemata.schema.is_classified) = true, (org.schemata.schema.classification_level) = "HIGH", (org.schemata.schema.product_type) = "email"]; - - Department dept = 4 - [(org.schemata.schema.field_core).description = "department name of the person"] ; -} diff --git a/user/build-user-trigger.yaml b/user/build-user-trigger.yaml new file mode 100644 index 0000000..84d3f43 --- /dev/null +++ b/user/build-user-trigger.yaml @@ -0,0 +1,35 @@ +steps: +- id: "tests" + name: gcr.io/cloud-builders/git + entrypoint: /bin/bash + args: + - -c + - | + echo "here we run all our unit tests" + waitFor: ['-'] +- id: "replace tag" + name: gcr.io/cloud-builders/git + entrypoint: /bin/bash + args: + - -c + - | + sed -e "s/tag/"$SHORT_SHA"/g" ./user/k8s_user.yaml > ./user/k8s_user_latest.yaml + waitFor: ['tests'] +- id: "build-user" + name: "gcr.io/cloud-builders/docker" + args: ["build", "-f", "Dockerfile.user", "-t", "us-central1-docker.pkg.dev/prodenv1/schematalabs-user-app/user:$SHORT_SHA", "."] + waitFor: ["replace tag"] +- id: "push-user" + name: "gcr.io/cloud-builders/docker" + args: ["push", "us-central1-docker.pkg.dev/prodenv1/schematalabs-user-app/user:$SHORT_SHA"] + waitFor: ["build-user"] + # deploy container image to GKE. +- id: "deploy-user" + name: "gcr.io/cloud-builders/gke-deploy" + args: + - run + - --filename=./user/k8s_user_latest.yaml + - --image=us-central1-docker.pkg.dev/prodenv1/schematalabs-user-app/user:$SHORT_SHA + - --location=us-central1 + - --cluster=schematalabs + waitFor: ["push-user"] diff --git a/user/k8s_user.yaml b/user/k8s_user.yaml new file mode 100644 index 0000000..7b2bd74 --- /dev/null +++ b/user/k8s_user.yaml @@ -0,0 +1,104 @@ +# Namespace +apiVersion: v1 +kind: Namespace +metadata: + name: schematalabs +--- +#secrets +apiVersion: v1 +kind: Secret +metadata: + name: user-app-secret + namespace: schematalabs +type: Opaque +data: + POSTGRES_USER: cG9zdGdyZXM= + POSTGRES_PASS: T3BzQmVAY2gxQDMk + SMTP_USER_NAME: c2NoZW1hdGFsYWJzQGdtYWlsLmNvbQ== + SMTP_PASSWORD: cGl2cWh6c2J6b2hyb3V4cA== +--- +# Deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: user-app-deployment + namespace: schematalabs +spec: + replicas: 1 + selector: + matchLabels: + app: dev + template: + metadata: + labels: + app: dev + spec: + containers: + - name: user-app + image: us-central1-docker.pkg.dev/prodenv1/schematalabs-user-app/user:tag + ports: + - containerPort: 7080 + resources: + limits: + cpu: 500m + memory: 2Gi + requests: + cpu: 500m + memory: 2Gi + env: + - name: POSTGRES_USER + valueFrom: + secretKeyRef: + name: user-app-secret + key: POSTGRES_USER + - name: POSTGRES_PASS + valueFrom: + secretKeyRef: + name: user-app-secret + key: POSTGRES_PASS + - name: SMTP_USER_NAME + valueFrom: + secretKeyRef: + name: user-app-secret + key: SMTP_USER_NAME + - name: SMTP_PASSWORD + valueFrom: + secretKeyRef: + name: user-app-secret + key: SMTP_PASSWORD +--- +# BackendConfig to do health check so above deployment will become ready +apiVersion: cloud.google.com/v1 +kind: BackendConfig +metadata: + name: health-check-config-user + namespace: schematalabs +spec: + healthCheck: + checkIntervalSec: 30 + port: 7080 + type: HTTP #case-sensitive + requestPath: /user/actuator/health +--- +# Service.yaml +apiVersion: v1 +kind: Service +metadata: + name: user-app-service + namespace: schematalabs + annotations: + cloud.google.com/backend-config: '{"default": "health-check-config-user"}' +spec: + type: ClusterIP + selector: + app: dev + ports: + - name: http + port: 7080 + targetPort: 7080 + protocol: TCP + - name: https + port: 443 + targetPort: 7080 + protocol: TCP +--- \ No newline at end of file diff --git a/user/pom.xml b/user/pom.xml new file mode 100644 index 0000000..01b927a --- /dev/null +++ b/user/pom.xml @@ -0,0 +1,175 @@ + + + + opsbeach + com.opsbeach + 1.0-SNAPSHOT + + 4.0.0 + user + user + + + org.springframework.boot + spring-boot-starter-actuator + + + org.postgresql + postgresql + + + org.springframework.boot + spring-boot-starter-data-jpa + + + org.springframework.boot + spring-boot-starter-json + + + org.springframework.boot + spring-boot-starter-web-services + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-data-rest + + + org.springframework.boot + spring-boot-starter-test + test + + + org.junit.vintage + junit-vintage-engine + + + + + + javax.validation + validation-api + + + + io.springfox + springfox-swagger2 + + + io.springfox + springfox-swagger-ui + + + io.springfox + springfox-data-rest + + + + org.flywaydb + flyway-core + + + + com.opsbeach + shared-lib + + + + org.projectlombok + lombok + + + io.micrometer + micrometer-registry-jmx + + + + org.junit.jupiter + junit-jupiter-api + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.apache.maven.plugins + maven-surefire-plugin + + + + org.jacoco + jacoco-maven-plugin + ${jacoco.version} + + + + prepare-agent + + + + report + prepare-package + + report + + + + + + + com/opsbeach/user/mapper/** + + com/opsbeach/user/entity/** + + com/opsbeach/user/dto/** + + com/opsbeach/user/task/** + + com/opsbeach/user/repository/** + + com/opsbeach/user/common/** + + + + + org.springframework.boot + spring-boot-maven-plugin + ${org.springframework.boot} + + + + repackage + + + + + + + + + + org.jacoco + jacoco-maven-plugin + ${jacoco.version} + + + + + report + + + + + + + \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/UserApplication.java b/user/src/main/java/com/opsbeach/user/UserApplication.java new file mode 100644 index 0000000..d5f6ccc --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/UserApplication.java @@ -0,0 +1,23 @@ +package com.opsbeach.user; + +import com.opsbeach.user.base.BaseRepositoryImpl; +import lombok.extern.slf4j.Slf4j; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.data.jpa.repository.config.EnableJpaRepositories; + +/** + *

+ * user Service Main class. + *

+ */ +@Slf4j +@SpringBootApplication(scanBasePackages = "com.opsbeach") +@EnableJpaRepositories(repositoryBaseClass = BaseRepositoryImpl.class) +public class UserApplication { + + public static void main(String[] args) { + SpringApplication.run(UserApplication.class, args); + log.info("User Service has been started."); + } +} diff --git a/user/src/main/java/com/opsbeach/user/base/App2AppConfig.java b/user/src/main/java/com/opsbeach/user/base/App2AppConfig.java new file mode 100644 index 0000000..d961946 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/App2AppConfig.java @@ -0,0 +1,36 @@ +package com.opsbeach.user.base; + +import com.opsbeach.sharedlib.utils.YamlPropertySourceFactory; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +import java.util.HashMap; +import java.util.Map; + +/** + *

+ * Fetches value from Application-url file. + *

+ */ +@Configuration +@EnableConfigurationProperties +@Getter +@Setter +@ConfigurationProperties(prefix = "application") +@PropertySource(value = "classpath:application-url.yml", factory = YamlPropertySourceFactory.class) +public class App2AppConfig { + private Map connect = new HashMap<>(); + private Map analytics = new HashMap<>(); + + public String getIntegrationBaseUrl() { + return this.getConnect().get(Constants.BASE_URL); + } + + public String getTransactionBaseUrl() { + return this.getAnalytics().get(Constants.BASE_URL); + } +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/BaseDto.java b/user/src/main/java/com/opsbeach/user/base/BaseDto.java new file mode 100644 index 0000000..d89fc5a --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/BaseDto.java @@ -0,0 +1,34 @@ +package com.opsbeach.user.base; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; + +/** + *

+ * Basic fields of a table which every table extends. + *

+ */ +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public abstract class BaseDto { + private Long id; + @JsonIgnore + private LocalDateTime createdAt; + @JsonIgnore + private LocalDateTime updatedAt; + @JsonIgnore + private Boolean isDeleted; + @JsonIgnore + private Long createdBy; + @JsonIgnore + private Long updatedBy; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/BaseMapper.java b/user/src/main/java/com/opsbeach/user/base/BaseMapper.java new file mode 100644 index 0000000..4376947 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/BaseMapper.java @@ -0,0 +1,15 @@ +package com.opsbeach.user.base; + +import org.springframework.stereotype.Component; + +/** + *

+ * BaseMapper for converting domainToDto and vice-versa. + *

+ */ +@Component +public interface BaseMapper { + D domainToDto(M baseModel); + + M dtoToDomain(D baseDto); +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/BaseModel.java b/user/src/main/java/com/opsbeach/user/base/BaseModel.java new file mode 100644 index 0000000..cf71282 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/BaseModel.java @@ -0,0 +1,53 @@ +package com.opsbeach.user.base; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import jakarta.persistence.Column; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.MappedSuperclass; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; +import org.hibernate.annotations.CreationTimestamp; +import org.hibernate.annotations.UpdateTimestamp; + +import java.time.LocalDateTime; + +/** + *

+ * Basic fields of a table which every table extends. + *

+ */ +@MappedSuperclass +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public abstract class BaseModel { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(updatable = false, nullable = false) + @JsonIgnore + private Long id; + @JsonIgnore + @Column(name = "is_deleted") + private Boolean isDeleted = Boolean.FALSE; + @JsonIgnore + @CreationTimestamp + @Column(name = "created_at", updatable = false, nullable = false) + private LocalDateTime createdAt; + @JsonIgnore + @UpdateTimestamp + @Column(name = "updated_at") + private LocalDateTime updatedAt; + @JsonIgnore + @Column(name = "created_by") + private long createdBy; + @JsonIgnore + @Column(name = "updated_by") + private long updatedBy; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/BaseRepository.java b/user/src/main/java/com/opsbeach/user/base/BaseRepository.java new file mode 100644 index 0000000..babba00 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/BaseRepository.java @@ -0,0 +1,19 @@ +package com.opsbeach.user.base; + +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.JpaSpecificationExecutor; +import org.springframework.data.repository.NoRepositoryBean; +import org.springframework.data.rest.core.annotation.RepositoryRestResource; +import org.springframework.transaction.annotation.Transactional; + +/** + *

+ * Base repository of a table which every table extends. + *

+ */ +@Transactional +@NoRepositoryBean +@RepositoryRestResource(exported = false) +public interface BaseRepository extends JpaRepository, JpaSpecificationExecutor { + void refresh(T t); +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/BaseRepositoryImpl.java b/user/src/main/java/com/opsbeach/user/base/BaseRepositoryImpl.java new file mode 100644 index 0000000..a9bc72b --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/BaseRepositoryImpl.java @@ -0,0 +1,65 @@ +package com.opsbeach.user.base; + +import jakarta.persistence.EntityManager; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.data.jpa.repository.support.JpaEntityInformation; +import org.springframework.data.jpa.repository.support.SimpleJpaRepository; +import org.springframework.lang.NonNull; +import org.springframework.transaction.annotation.Transactional; + +import java.util.Objects; +import java.util.Optional; + +/** + *

+ * Base repository implementation of a table which every table extends. + *

+ */ +public class BaseRepositoryImpl extends SimpleJpaRepository implements BaseRepository { + + private final EntityManager entityManager; + private final Specification notDeleted = (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.IS_DELETED), Boolean.FALSE); + + public BaseRepositoryImpl(JpaEntityInformation entityInformation, EntityManager entityManager) { + super(entityInformation, entityManager); + this.entityManager = entityManager; + } + + @Override + @NonNull + public Optional findOne(Specification specification) { + Specification baseSpec = Objects.requireNonNull(specification).and(notDeleted); + return super.findOne(baseSpec); + } + + @Override + @NonNull + public Page findAll(@NonNull Pageable pageable) { + return super.findAll(notDeleted, pageable); + } + + @Override + @NonNull + public Page findAll(Specification specification, @NonNull Pageable pageable) { + Specification baseSpec = notDeleted.and(specification); + return super.findAll(baseSpec, Objects.requireNonNull(pageable)); + } + + @Override + public long count() { + return super.count(notDeleted); + } + + @Override + public long count(Specification spec) { + return super.count(notDeleted.and(spec)); + } + + @Override + @Transactional + public void refresh(T t) { + entityManager.refresh(t); + } +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/BaseService.java b/user/src/main/java/com/opsbeach/user/base/BaseService.java new file mode 100644 index 0000000..e6b3dfc --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/BaseService.java @@ -0,0 +1,141 @@ +package com.opsbeach.user.base; + +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.user.base.specification.IdSpecifications; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.http.HttpStatus; +import org.springframework.web.server.ResponseStatusException; + +import java.lang.reflect.ParameterizedType; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + *

+ * Base service which application service extends. + *

+ */ +public abstract class BaseService { + + BaseRepository baseRepository; + IdSpecifications idSpecifications; + BaseMapper baseMapper; + Class modelType; + @Autowired + private ResponseMessage responseMessage; + + protected BaseService(BaseRepository baseRepository, BaseMapper baseMapper, + IdSpecifications idSpecifications) { + this.baseRepository = baseRepository; + this.idSpecifications = idSpecifications; + this.baseMapper = baseMapper; + this.modelType = (Class) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0]; + } + + public void validateAdd(D incomingDto) { + } + + public final D add(D incomingDto) { + validateAdd(incomingDto); + var incomingModel = baseMapper.dtoToDomain(incomingDto); + var savedModel = addModel(incomingModel); + return baseMapper.domainToDto(savedModel); + } + + public final M addModel(M incomingModel) { + return baseRepository.save(incomingModel); + } + + /** + * The extending services shall implement their logic to patch the + * `toUpdateModel` with the `incomingModel` + *

+ * This method should be abstract in which case all extending service classes + * must provide an implementation. + * + * @param incomingModel - Model with updated values. + * @param toUpdateModel - Model returned by Database which needs to be updated. + */ + public void doPatch(M incomingModel, M toUpdateModel) { + throw new ResponseStatusException(HttpStatus.NOT_IMPLEMENTED); + } + + public void validatePatch(D incomingDto) { + } + + public void patch(D incomingDto) { + Optional toUpdate = baseRepository.findOne(idSpecifications.findById(incomingDto.getId())); + if (toUpdate.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND)); + } + validatePatch(incomingDto); + var incomingModel = baseMapper.dtoToDomain(incomingDto); + var toUpdateModel = toUpdate.get(); + patchModel(incomingModel, toUpdateModel); + } + + public void patchModel(M incomingModel, M toUpdateModel) { + doPatch(incomingModel, toUpdateModel); + baseRepository.saveAndFlush(toUpdateModel); + } + + public D findById(Long id) { + Optional entity = findModelById(id); + if (entity.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, + responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, + String.format("There is no %s with id %d", modelType.getSimpleName(), id))); + } + return baseMapper.domainToDto(entity.get()); + } + + public D findOne(Specification specs) { + Optional entity = findOneModel(specs); + if (entity.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND)); + } + return baseMapper.domainToDto(entity.get()); + } + + public Optional findOneModel(Specification specs) { + return baseRepository.findOne(specs); + } + + + public Optional findModelById(Long id) { + return baseRepository.findById(id); + } + + public D findByClientName(String clientName) { + Specification baseSpecification = idSpecifications.findByName(clientName); + Optional entity = baseRepository.findOne(baseSpecification); + if (entity.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, + responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, + String.format(". There is no %s with %s", modelType.getSimpleName(), clientName))); + } + return baseMapper.domainToDto(entity.get()); + } + + public List findAll() { + Specification mSpecification = idSpecifications.notDeleted(); + List models = findAllModels(mSpecification); + return models.stream().map(m -> baseMapper.domainToDto(m)).collect(Collectors.toList()); + } + + public List findAllModels(Specification specifications) { + return baseRepository.findAll(specifications); + } + + public Optional findOneOrReturnEmpty(Specification specs) { + Optional entity = findOneModel(specs); + if (entity.isEmpty()) { + return Optional.empty(); + } + return Optional.of(baseMapper.domainToDto(entity.get())); + } +} diff --git a/user/src/main/java/com/opsbeach/user/base/Constants.java b/user/src/main/java/com/opsbeach/user/base/Constants.java new file mode 100644 index 0000000..f45bf36 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/Constants.java @@ -0,0 +1,43 @@ +package com.opsbeach.user.base; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + *

+ * Application constant values. + *

+ */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class Constants { + // Common + public static final String ID = "id"; + public static final String WEEK = "WEEK"; + public static final String MONTH = "MONTH"; + public static final String MOBILE = "mobile"; + public static final String TRUE_VALUE = "true"; + public static final String BASE_URL = "baseURL"; + public static final String TIME_ZONE = "timeZone"; + public static final String TENANT_ID = "tenantId"; + public static final String IS_DELETED = "isDeleted"; + public static final String SESSION_ID = "sessionId"; + public static final String ACCOUNT_ID = "accountId"; + public static final String LOGIN_MODE = "loginMode"; + public static final String IS_LOGGED_IN = "isLoggedIn"; + public static final String CUSTOMER_VUA = "customerVua"; + public static final String END_OF_DAY = "T23:59:59.999Z"; + public static final String START_OF_DAY = "T00:00:00.000Z"; + public static final String AUTHORIZATION = "Authorization"; + public static final String DEFAULT_CLIENT_SCHEMA = "user"; + public static final String CONSENT_HANDLE = "consentHandle"; + public static final String CURRENT_DEVICE = "currentDevice"; + public static final String USER_SESSION_ID = "userSessionId"; + public static final String IS_DATA_CATEGORIZATION_SUBSCRIBED = "isDataCategorizationSubscribed"; + public static final String ACCESS_TOKEN = "accessToken"; + public static final String REFRESH_TOKEN = "refreshToken"; + + //Table + public static final String TABLE_USER = "\"user\""; + public static final String ADMIN_USER_NAME = "schematalabs@gmail.com"; + public static final String ADMIN_CLIENT_NAME = "demo client"; +} diff --git a/user/src/main/java/com/opsbeach/user/base/hibernate/HibernateConfig.java b/user/src/main/java/com/opsbeach/user/base/hibernate/HibernateConfig.java new file mode 100644 index 0000000..42cc16c --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/hibernate/HibernateConfig.java @@ -0,0 +1,41 @@ +package com.opsbeach.user.base.hibernate; + +import org.hibernate.cfg.AvailableSettings; +import org.springframework.boot.autoconfigure.orm.jpa.JpaProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.orm.jpa.JpaVendorAdapter; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +@Configuration +@EnableTransactionManagement +public class HibernateConfig { + private final JpaProperties jpaProperties; + + public HibernateConfig(JpaProperties jpaProperties) { + this.jpaProperties = jpaProperties; + } + + @Bean + JpaVendorAdapter jpaVendorAdapter() { + return new HibernateJpaVendorAdapter(); + } + + @Bean + LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource) { + Map jpaPropertiesMap = new HashMap<>(jpaProperties.getProperties()); + jpaPropertiesMap.put(AvailableSettings.FORMAT_SQL, Boolean.TRUE); + var localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean(); + localContainerEntityManagerFactoryBean.setDataSource(dataSource); + localContainerEntityManagerFactoryBean.setPackagesToScan("com.opsbeach*"); + localContainerEntityManagerFactoryBean.setJpaVendorAdapter(this.jpaVendorAdapter()); + localContainerEntityManagerFactoryBean.setJpaPropertyMap(jpaPropertiesMap); + return localContainerEntityManagerFactoryBean; + } +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/base/specification/IdSpecifications.java b/user/src/main/java/com/opsbeach/user/base/specification/IdSpecifications.java new file mode 100644 index 0000000..1bbc49a --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/base/specification/IdSpecifications.java @@ -0,0 +1,97 @@ +package com.opsbeach.user.base.specification; + +import com.opsbeach.sharedlib.utils.Status; +import com.opsbeach.user.base.Constants; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Component; + +/** + *

+ * Specification for querying database. + *

+ */ +@Component +public class IdSpecifications { + + private static final String CLIENT_ID = "client_id"; + private static final String NAME = "name"; + private static final String TYPE = "type"; + private static final String ROLE_ID = "roleId"; + private static final String EMAIL_ID = "emailId"; + private static final String IS_RECENT = "isRecent"; + private static final String LOCATION = "locationName"; + private static final String IS_BATCH_USER = "isBatchUser"; + private static final String IS_SUBSCRIBED = "isSubscribed"; + private static final String USER_ACTIVITY = "userActivity"; + private static final String USER_ID = "userId"; + private static final String USERNAME = "username"; + + public Specification findById(long id) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.ID), id); + } + + public Specification findByMobileNumber(String mobileNumber) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.MOBILE), mobileNumber.toLowerCase()); + } + + public Specification notDeleted() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.IS_DELETED), Boolean.FALSE); + } + + public Specification getPermissionsByRoleId(long roleId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(ROLE_ID), roleId); + } + + public Specification findByUserId(long userId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(USER_ID), userId); + } + + public Specification findByUsername(String username) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(USERNAME), username); + } + + public Specification findByRecentAddress(long userId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.and(criteriaBuilder.equal(root.get(USER_ID), userId), criteriaBuilder.equal(root.get(IS_RECENT), Boolean.TRUE)); + } + + public Specification findByName(String name) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(NAME), name); + } + + public Specification findAddressByAccountId(long accountId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.and(criteriaBuilder.equal(root.get(Constants.ACCOUNT_ID), accountId)); + } + + public Specification getLocationDetails(String city) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(LOCATION), city); + } + + public Specification findByEmail(String emailId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(EMAIL_ID), emailId.toLowerCase()); + } + + public Specification findByBatchUser() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(IS_BATCH_USER), Boolean.TRUE); + } + + public Specification findByAccessToken(String accessToken) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.ACCESS_TOKEN), accessToken); + } + + public Specification findByRefreshToken(String refreshToken) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(Constants.REFRESH_TOKEN), refreshToken); + } + + public Specification getWidgetDetailsWithType(String type) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.and(criteriaBuilder.equal(root.get(IS_SUBSCRIBED), Boolean.TRUE), criteriaBuilder.equal(root.get(TYPE), type)); + } + + public Specification getWidgetDetails() { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(IS_SUBSCRIBED), Boolean.TRUE); + } + + public Specification findByOpenSession(Long userId) { + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.and(criteriaBuilder.equal(root.get(USER_ID), userId), criteriaBuilder.equal(root.get(USER_ACTIVITY), Status.ACTIVE)); + } + +} diff --git a/user/src/main/java/com/opsbeach/user/controller/AuthenticationController.java b/user/src/main/java/com/opsbeach/user/controller/AuthenticationController.java new file mode 100644 index 0000000..1fcd8c0 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/controller/AuthenticationController.java @@ -0,0 +1,122 @@ +package com.opsbeach.user.controller; + +import com.opsbeach.sharedlib.dto.AuthenticationResponseDto; +import com.opsbeach.sharedlib.dto.GenericResponseDto; +import com.opsbeach.sharedlib.dto.JwtDto; +import com.opsbeach.sharedlib.dto.LoginDto; +import com.opsbeach.sharedlib.dto.RefreshTokenDto; +import com.opsbeach.sharedlib.dto.RegisterClientDto; +import com.opsbeach.sharedlib.dto.RegistrationDto; +import com.opsbeach.sharedlib.dto.SessionDto; +import com.opsbeach.sharedlib.dto.UserDto; +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.user.dto.ClientDto; +import com.opsbeach.user.service.AuthenticationService; +import io.swagger.annotations.ApiOperation; +import jakarta.servlet.http.HttpServletRequest; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.validation.Valid; + +@RestController +@RequestMapping("v1/auth") +public class AuthenticationController { + + private final AuthenticationService authenticationService; + + + public AuthenticationController(AuthenticationService authenticationService) { + this.authenticationService = authenticationService; + } + + @PostMapping("/register/client") + public SuccessResponse registerClient(@RequestBody @Valid RegisterClientDto registerClientDto, HttpServletRequest httpServletRequest) { + return SuccessResponse.statusCreated(authenticationService.registerClient(registerClientDto)); + } + + @PostMapping("/add/client") + public SuccessResponse registerClient(@RequestParam("workEmail") String workEmail) { + return SuccessResponse.statusCreated(authenticationService.registerClient(workEmail)); + } + + @PostMapping("/register/user") + public SuccessResponse registerUser(@RequestBody @Valid RegistrationDto registrationDto) { + return SuccessResponse.statusCreated(authenticationService.registerUser(registrationDto)); + } + + @PostMapping("/otp") + public SuccessResponse getOtp(@RequestBody @Valid LoginDto.SendOTP sendOTP) { + return SuccessResponse.statusOk(authenticationService.sendOtp(sendOTP.getUsername())); + } + + @PostMapping("/login") + public SuccessResponse login(@RequestBody @Valid LoginDto loginDto) { + return SuccessResponse.statusCreated(authenticationService.login(loginDto)); + } + + @GetMapping("/github-token") + public SuccessResponse githubAuthentication() { + return SuccessResponse.statusCreated(authenticationService.githubAuthentication()); + } + + @PatchMapping("/refresh") + public SuccessResponse refresh(@RequestBody RefreshTokenDto refreshTokenDto) { + return SuccessResponse.statusOk(authenticationService.refresh(refreshTokenDto)); + } + + @PostMapping("/logout") + public SuccessResponse logout(HttpServletRequest httpServletRequest) { + return SuccessResponse.statusCreated(authenticationService.logout(httpServletRequest.getHeader(Constants.AUTHORIZATION_HEADER))); + } + + @Transactional + @GetMapping("/user") + @ApiOperation(value = "Get user by username") + public SuccessResponse findByUsername(@RequestParam("username") String username) { + return SuccessResponse.statusOk(authenticationService.findByUsername(username)); + } + + @Transactional + @PostMapping("/jwt") + @ApiOperation(value = "Add Jwt Token") + public SuccessResponse add(@RequestBody @Valid JwtDto jwtDto) { + return SuccessResponse.statusCreated(authenticationService.addJwt(jwtDto)); + } + + @Transactional + @PostMapping(path = "/access") + @ApiOperation(value = "Get Jwt Token by access token") + public SuccessResponse getByAccessToken(@RequestBody @Valid JwtDto jwtDto) { + return SuccessResponse.statusCreated(authenticationService.getByAccessToken(jwtDto.getAccessToken())); + } + + @Transactional + @PostMapping(path = "/refresh") + @ApiOperation(value = "Get Jwt Token by refresh token") + public SuccessResponse getByRefreshToken(@RequestBody @Valid JwtDto jwtDto) { + return SuccessResponse.statusCreated(authenticationService.getByRefreshToken(jwtDto.getRefreshToken())); + } + + @Transactional + @DeleteMapping("/jwt") + @ApiOperation(value = "Delete Jwt Token by access") + public SuccessResponse deleteToken(@RequestBody @Valid JwtDto jwtDto) { + return SuccessResponse.statusOk(authenticationService.deleteByAccessToken(jwtDto.getAccessToken())); + } + + @Transactional + @PostMapping("/audit") + @ApiOperation(value = "Add Jwt Token") + public SuccessResponse addSession(@RequestBody @Valid SessionDto sessionDto) { + return SuccessResponse.statusCreated(authenticationService.addSession(sessionDto)); + } +} diff --git a/user/src/main/java/com/opsbeach/user/controller/ClientController.java b/user/src/main/java/com/opsbeach/user/controller/ClientController.java new file mode 100644 index 0000000..47524c6 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/controller/ClientController.java @@ -0,0 +1,63 @@ +package com.opsbeach.user.controller; + +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.user.dto.ClientDto; +import com.opsbeach.user.service.ClientService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.validation.Valid; + +/** + *

+ * Client Controller + *

+ */ +@Api(tags = "Client Controller") +@RestController +@RequestMapping("v1/client") +public class ClientController { + + private final ClientService clientService; + + public ClientController(ClientService clientService) { + this.clientService = clientService; + } + + @Transactional + @PostMapping("/register") + @ApiOperation(value = "Creates a new client and their tables once a partner sign up") + public SuccessResponse add(@RequestBody @Valid ClientDto clientDto) { + return SuccessResponse.statusCreated(clientService.add(clientDto)); + } + + @Transactional + @GetMapping(path = "/{id}") + @ApiOperation(value = "Returns details of client detail by Id") + public SuccessResponse getClientById(@PathVariable long id) { + return SuccessResponse.statusOk(clientService.findById(id)); + } + + @Transactional + @GetMapping + @ApiOperation(value = "Returns details of client detail by name") + public SuccessResponse getClient(@RequestParam("name") String name) { + return SuccessResponse.statusOk(clientService.getClient(name)); + } + + @Transactional + @PutMapping(path = "/{id}") + @ApiOperation(value = "update onboarded status of client by Id") + public SuccessResponse updateOnBoardedStatus(@RequestParam("isOnboarded") boolean isOnboarded, @PathVariable long id) { + return SuccessResponse.statusOk(clientService.updateOnBoardedStatus(id, isOnboarded)); + } +} diff --git a/user/src/main/java/com/opsbeach/user/controller/JwtController.java b/user/src/main/java/com/opsbeach/user/controller/JwtController.java new file mode 100644 index 0000000..096e562 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/controller/JwtController.java @@ -0,0 +1,63 @@ +package com.opsbeach.user.controller; + +import com.opsbeach.sharedlib.dto.JwtDto; +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.user.service.JwtService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import javax.validation.Valid; + + +@Api(tags = "Jwt Controller") +@RestController +@RequestMapping("v1/jwt") +public class JwtController { + + private final JwtService jwtService; + + public JwtController(JwtService jwtService) { + this.jwtService = jwtService; + } + + @Transactional + @PostMapping() + @ApiOperation(value = "Add Jwt Token") + public SuccessResponse add(@RequestBody @Valid JwtDto jwtDto) { + return SuccessResponse.statusCreated(jwtService.add(jwtDto)); + } + + @GetMapping("/check") + public SuccessResponse checkTokenValid() { + return SuccessResponse.statusOk(Boolean.TRUE); + } + + @Transactional + @GetMapping(path = "/{authenticationToken}/access") + @ApiOperation(value = "Get Jwt Token by access token") + public SuccessResponse getByAccessToken(@PathVariable String authenticationToken) { + return SuccessResponse.statusOk(jwtService.getByAccessToken(authenticationToken)); + } + + @Transactional + @GetMapping(path = "/{refreshToken}/refresh") + @ApiOperation(value = "Get Jwt Token by refresh token") + public SuccessResponse getByRefreshToken(@PathVariable String refreshToken) { + return SuccessResponse.statusOk(jwtService.getByRefreshToken(refreshToken)); + } + + @Transactional + @DeleteMapping("/{accessToken}") + @ApiOperation(value = "Delete Jwt Token by access") + public SuccessResponse delete(@PathVariable(name = "accessToken") String accessToken) { + return SuccessResponse.statusOk(jwtService.delete(accessToken)); + } +} diff --git a/user/src/main/java/com/opsbeach/user/controller/SessionController.java b/user/src/main/java/com/opsbeach/user/controller/SessionController.java new file mode 100644 index 0000000..1c1c3c7 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/controller/SessionController.java @@ -0,0 +1,35 @@ +package com.opsbeach.user.controller; + +import com.opsbeach.sharedlib.dto.SessionDto; +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.user.service.SessionService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import javax.validation.Valid; + + +@Api(tags = "Session Controller") +@RestController +@RequestMapping("v1/session") +public class SessionController { + + private final SessionService sessionService; + + public SessionController(SessionService sessionService) { + this.sessionService = sessionService; + } + + @Transactional + @PostMapping() + @ApiOperation(value = "Add Jwt Token") + public SuccessResponse add(@RequestBody @Valid SessionDto sessionDto) { + return SuccessResponse.statusCreated(sessionService.add(sessionDto)); + } + +} diff --git a/user/src/main/java/com/opsbeach/user/controller/UserController.java b/user/src/main/java/com/opsbeach/user/controller/UserController.java new file mode 100644 index 0000000..4c6bcae --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/controller/UserController.java @@ -0,0 +1,57 @@ +package com.opsbeach.user.controller; + +import com.opsbeach.sharedlib.dto.GenericResponseDto; +import com.opsbeach.sharedlib.response.SuccessResponse; +import com.opsbeach.user.dto.UserDetailDto; +import com.opsbeach.user.service.UserService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.validation.Valid; + +/** + *

+ * User Registration + *

+ */ +@Api(tags = "User Controller") +@RestController +@RequestMapping("v1/user") +public class UserController { + + private final UserService userService; + + + public UserController(UserService userService) { + this.userService = userService; + } + + @Transactional + @PostMapping + @ApiOperation(value = "User Register") + public SuccessResponse registration(@RequestBody @Valid UserDetailDto userDto) { + return SuccessResponse.statusCreated(userService.add(userDto)); + } + + @Transactional + @GetMapping + @ApiOperation(value = "Get All Users") + public SuccessResponse getUser() { + return SuccessResponse.statusOk(userService.findAll()); + } + + @Transactional + @PatchMapping + @ApiOperation(value = "Update company name") + public SuccessResponse registerCompany(@RequestParam("companyname") String companyName) { + return SuccessResponse.statusOk(userService.registerCompany(companyName)); + } +} diff --git a/user/src/main/java/com/opsbeach/user/dto/ClientDto.java b/user/src/main/java/com/opsbeach/user/dto/ClientDto.java new file mode 100644 index 0000000..9f34d34 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/dto/ClientDto.java @@ -0,0 +1,24 @@ +package com.opsbeach.user.dto; + +import com.opsbeach.user.base.BaseDto; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Holds details of Tenant + *

+ */ +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class ClientDto extends BaseDto { + private String name; + private String description; + private boolean isOnboarded; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/dto/UserDetailDto.java b/user/src/main/java/com/opsbeach/user/dto/UserDetailDto.java new file mode 100644 index 0000000..04fe59c --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/dto/UserDetailDto.java @@ -0,0 +1,47 @@ +package com.opsbeach.user.dto; + +import com.opsbeach.sharedlib.utils.OnboardStatus; +import com.opsbeach.user.base.BaseDto; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDate; +import java.time.LocalDateTime; + +/** + *

+ * Basic Details of an User. + *

+ */ +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class UserDetailDto extends BaseDto { + private Long clientId; + private LocalDate dob; + private String emailId; + private String mobile; + private String username; + private String password; + private String firstName; + private String middleName; + private String lastName; + private String forgetPasswordKey; + private String gender; + private String type; + private String timeZone; + private OnboardStatus onboardStatus; + private Integer failureAttempts; + private LocalDateTime lockTime; + private Boolean accountLocked; + private LocalDateTime verificationTokenSentTime; + private LocalDateTime passwordChangedTime; + private String oldPassword; + private String otp; + private LocalDateTime otpSentTime; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/dto/UserDto.java b/user/src/main/java/com/opsbeach/user/dto/UserDto.java new file mode 100644 index 0000000..dd927bd --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/dto/UserDto.java @@ -0,0 +1,31 @@ +package com.opsbeach.user.dto; + +import com.opsbeach.sharedlib.dto.RoleDto; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import org.springframework.security.core.GrantedAuthority; + +import java.util.Collection; +import java.util.List; + +@Getter +@Setter +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class UserDto { + private long id; + private String userType; + private String email; + private String username; + private String mobile; + private Collection authorities; + private Boolean isDeleted; + private transient List roles; + private long clientId; + private String onboardStatus; + private String timeZone; +} diff --git a/user/src/main/java/com/opsbeach/user/entity/Client.java b/user/src/main/java/com/opsbeach/user/entity/Client.java new file mode 100644 index 0000000..8c7f74f --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/Client.java @@ -0,0 +1,31 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Holds details of Tenant + *

+ */ +@Entity +@Table +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class Client extends BaseModel { + private String name; + private String description; + @Column(name = "is_onboarded") + private boolean isOnboarded; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/entity/Jwt.java b/user/src/main/java/com/opsbeach/user/entity/Jwt.java new file mode 100644 index 0000000..848d1a5 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/Jwt.java @@ -0,0 +1,33 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; + +@Entity +@Table +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class Jwt extends BaseModel { + @Column(name = "public_key") + private String publicKey; + @Column(name = "private_key") + private String privateKey; + @Column(name = "access_token") + private String accessToken; + @Column(name = "refresh_token") + private String refreshToken; + @Column(name = "expiry_at") + private LocalDateTime expiryAt; +} diff --git a/user/src/main/java/com/opsbeach/user/entity/Permission.java b/user/src/main/java/com/opsbeach/user/entity/Permission.java new file mode 100644 index 0000000..d55f93f --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/Permission.java @@ -0,0 +1,26 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Holds Address of an user. + *

+ */ +@Entity +@Table +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class Permission extends BaseModel { + private String operation; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/entity/Role.java b/user/src/main/java/com/opsbeach/user/entity/Role.java new file mode 100644 index 0000000..ecbfc81 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/Role.java @@ -0,0 +1,27 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Holds Role of an user. + *

+ */ +@Entity +@Table +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class Role extends BaseModel { + private String name; + private String description; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/entity/RolePermission.java b/user/src/main/java/com/opsbeach/user/entity/RolePermission.java new file mode 100644 index 0000000..7e4c02a --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/RolePermission.java @@ -0,0 +1,30 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Join table for Role and Permission. + *

+ */ +@Entity +@Table(name = "role_permission") +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class RolePermission extends BaseModel { + @Column(name = "role_id") + private long roleId; + @Column(name = "permission_id") + private long permissionId; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/entity/Session.java b/user/src/main/java/com/opsbeach/user/entity/Session.java new file mode 100644 index 0000000..3d54151 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/Session.java @@ -0,0 +1,36 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Holds session details of an user. + *

+ */ +@Entity +@Table +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class Session extends BaseModel { + @Column(name = "user_id") + private long userId; + private String uri; + private String type; + private String action; + private String module; + @Column(name = "ip_address") + private String ipAddress; + @Column(name = "success_login") + private Boolean successLogin; +} diff --git a/user/src/main/java/com/opsbeach/user/entity/User.java b/user/src/main/java/com/opsbeach/user/entity/User.java new file mode 100644 index 0000000..6bb9f72 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/User.java @@ -0,0 +1,79 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.sharedlib.utils.OnboardStatus; +import com.opsbeach.user.base.BaseModel; +import com.opsbeach.user.utils.Constants; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; +import org.hibernate.annotations.Filter; +import org.hibernate.annotations.FilterDef; +import org.hibernate.annotations.ParamDef; + +import javax.validation.constraints.NotNull; +import java.time.LocalDate; +import java.time.LocalDateTime; + +/** + *

+ * Holds details of an user. + *

+ */ +@Entity +@Table(name = Constants.TABLE_USER) +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@FilterDef(name = "clientFilter", parameters = {@ParamDef(name = "clientId", type = Long.class)}) +@Filter(name = "clientFilter", condition = ":clientId = client_id") +public class User extends BaseModel { + @Column(name = "client_id") + private Long clientId; + private LocalDate dob; + @Column(name = "email_id") + private String emailId; + private String username; + @Column(unique = true) + private String mobile; + @Column(name = "first_name") + private String firstName; + @Column(name = "middle_name") + private String middleName; + @Column(name = "last_name") + private String lastName; + @NotNull + private String password; + @Column(name = "forget_password_key") + private String forgetPasswordKey; + private String gender; + private String type; + @Column(name = "time_zone") + private String timeZone; + @Enumerated(EnumType.STRING) + @Column(name = "onboard_status") + private OnboardStatus onboardStatus; + @Column(name = "failure_attempts") + private Integer failureAttempts; + @Column(name = "lock_time") + private LocalDateTime lockTime; + @Column(name = "account_locked") + private Boolean accountLocked; + @Column(name = "verification_token_sent_time") + private LocalDateTime verificationTokenSentTime; + @Column(name = "password_changed_time") + private LocalDateTime passwordChangedTime; + @Column(name = "old_password") + private String oldPassword; + private String otp; + @Column(name = "otp_sent_time") + private LocalDateTime otpSentTime; +} diff --git a/user/src/main/java/com/opsbeach/user/entity/UserRole.java b/user/src/main/java/com/opsbeach/user/entity/UserRole.java new file mode 100644 index 0000000..0138dc7 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/entity/UserRole.java @@ -0,0 +1,30 @@ +package com.opsbeach.user.entity; + +import com.opsbeach.user.base.BaseModel; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.experimental.SuperBuilder; + +/** + *

+ * Join table for User and Role. + *

+ */ +@Entity +@Table(name = "user_role") +@Getter +@Setter +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class UserRole extends BaseModel { + @Column(name = "user_id") + private long userId; + @Column(name = "role_id") + private long roleId; +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/mapper/ClientMapper.java b/user/src/main/java/com/opsbeach/user/mapper/ClientMapper.java new file mode 100644 index 0000000..164cad1 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/mapper/ClientMapper.java @@ -0,0 +1,35 @@ +package com.opsbeach.user.mapper; + +import com.opsbeach.user.base.BaseMapper; +import com.opsbeach.user.dto.ClientDto; +import com.opsbeach.user.entity.Client; +import org.springframework.stereotype.Component; + + +/** + *

+ * Converts TenantDto to Tenant Entity and vice versa. + *

+ */ +@Component +public class ClientMapper implements BaseMapper { + + public ClientDto domainToDto(Client tenant) { + return ClientDto.builder().name(tenant.getName()) + .description(tenant.getDescription()) + .id(tenant.getId()) + .isOnboarded(tenant.isOnboarded()) + .isDeleted(tenant.getIsDeleted()) + .createdAt(tenant.getCreatedAt()) + .updatedAt(tenant.getUpdatedAt()) + .createdBy(tenant.getCreatedBy()) + .updatedBy(tenant.getUpdatedBy()).build(); + } + + public Client dtoToDomain(ClientDto clientDto) { + return Client.builder().name(clientDto.getName()) + .description(clientDto.getDescription()) + .isOnboarded(clientDto.isOnboarded()) + .isDeleted(clientDto.getIsDeleted()).build(); + } +} diff --git a/user/src/main/java/com/opsbeach/user/mapper/JwtMapper.java b/user/src/main/java/com/opsbeach/user/mapper/JwtMapper.java new file mode 100644 index 0000000..b8a06c9 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/mapper/JwtMapper.java @@ -0,0 +1,30 @@ +package com.opsbeach.user.mapper; + +import com.opsbeach.sharedlib.dto.JwtDto; +import com.opsbeach.user.entity.Jwt; +import org.springframework.stereotype.Component; + +@Component +public class JwtMapper { + + public JwtDto domainToDto(Jwt jwt) { + return JwtDto.builder().accessToken(jwt.getAccessToken()) + .refreshToken(jwt.getRefreshToken()) + .privateKey(jwt.getPrivateKey()) + .publicKey(jwt.getPublicKey()) + .id(jwt.getId()) + .isDeleted(jwt.getIsDeleted()) + .expireAt(jwt.getExpiryAt()) + .build(); + } + + public Jwt dtoToDomain(JwtDto jwtDto) { + return Jwt.builder().accessToken(jwtDto.getAccessToken()) + .refreshToken(jwtDto.getRefreshToken()) + .privateKey(jwtDto.getPrivateKey()) + .publicKey(jwtDto.getPublicKey()) + .isDeleted(jwtDto.getIsDeleted()) + .expiryAt(jwtDto.getExpireAt()) + .build(); + } +} diff --git a/user/src/main/java/com/opsbeach/user/mapper/SessionMapper.java b/user/src/main/java/com/opsbeach/user/mapper/SessionMapper.java new file mode 100644 index 0000000..d76c4c4 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/mapper/SessionMapper.java @@ -0,0 +1,32 @@ +package com.opsbeach.user.mapper; + +import com.opsbeach.sharedlib.dto.SessionDto; +import com.opsbeach.user.entity.Session; +import org.springframework.stereotype.Component; + +@Component +public class SessionMapper { + + public SessionDto domainToDto(Session session) { + return SessionDto.builder().userId(session.getUserId()) + .action(session.getAction()) + .ipAddress(session.getIpAddress()) + .module(session.getModule()) + .type(session.getType()) + .uri(session.getUri()) + .id(session.getId()) + .successLogin(session.getSuccessLogin()) + .build(); + } + + public Session dtoToDomain(SessionDto sessionDto) { + return Session.builder().userId(sessionDto.getUserId()) + .action(sessionDto.getAction()) + .ipAddress(sessionDto.getIpAddress()) + .module(sessionDto.getModule()) + .type(sessionDto.getType()) + .uri(sessionDto.getUri()) + .successLogin(sessionDto.getSuccessLogin()) + .build(); + } +} diff --git a/user/src/main/java/com/opsbeach/user/mapper/UserMapper.java b/user/src/main/java/com/opsbeach/user/mapper/UserMapper.java new file mode 100644 index 0000000..1a6cd6c --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/mapper/UserMapper.java @@ -0,0 +1,75 @@ +package com.opsbeach.user.mapper; + +import com.opsbeach.sharedlib.dto.RegistrationDto; +import com.opsbeach.user.base.BaseMapper; +import com.opsbeach.user.dto.UserDetailDto; +import com.opsbeach.user.entity.User; +import org.springframework.stereotype.Component; + +/** + *

+ * Converts UserDto to User Entity and vice versa. + *

+ */ +@Component +public class UserMapper implements BaseMapper { + + @Override + public UserDetailDto domainToDto(User user) { + return UserDetailDto.builder().clientId(user.getClientId()) + .isDeleted(user.getIsDeleted()) + .firstName(user.getFirstName()) + .onboardStatus(user.getOnboardStatus()) + .username(user.getUsername()) + .emailId(user.getEmailId()) + .accountLocked(user.getAccountLocked()) + .lockTime(user.getLockTime()) + .mobile(user.getMobile()) + .createdAt(user.getCreatedAt()) + .updatedAt(user.getUpdatedAt()) + .password(user.getPassword()) + .id(user.getId()) + .createdBy(user.getCreatedBy()) + .updatedBy(user.getUpdatedBy()) + .otp(user.getOtp()) + .otpSentTime(user.getOtpSentTime()) + .failureAttempts(user.getFailureAttempts()) + .build(); + } + + @Override + public User dtoToDomain(UserDetailDto userDetailDto) { + return User.builder().clientId(userDetailDto.getClientId()) + .isDeleted(Boolean.FALSE) + .firstName(userDetailDto.getFirstName()) + .lastName(userDetailDto.getLastName()) + .onboardStatus(userDetailDto.getOnboardStatus()) + .username(userDetailDto.getUsername()) + .emailId(userDetailDto.getEmailId()) + .password(userDetailDto.getPassword()) + .accountLocked(userDetailDto.getAccountLocked()) + .lockTime(userDetailDto.getLockTime()) + .mobile(userDetailDto.getMobile()) + .otp(userDetailDto.getOtp()) + .otpSentTime(userDetailDto.getOtpSentTime()) + .failureAttempts(userDetailDto.getFailureAttempts()) + .build(); + } + + public UserDetailDto registerToDto(RegistrationDto registrationDto) { + return UserDetailDto.builder() + .clientId(registrationDto.getClientId()) + .isDeleted(Boolean.FALSE) + .accountLocked(Boolean.FALSE) + .firstName(registrationDto.getFirstName()) + .lastName(registrationDto.getLastName()) + .onboardStatus(registrationDto.getOnboardStatus()) + .username(registrationDto.getUsername()) + .emailId(registrationDto.getUsername()) + .password(registrationDto.getPassword()) + .mobile(registrationDto.getMobile()) + .oldPassword(registrationDto.getOldPassword()) + .build(); + } + +} diff --git a/user/src/main/java/com/opsbeach/user/repository/ClientRepository.java b/user/src/main/java/com/opsbeach/user/repository/ClientRepository.java new file mode 100644 index 0000000..98d1fc9 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/ClientRepository.java @@ -0,0 +1,16 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.Client; +import org.springframework.stereotype.Repository; + +/** + *

+ * Repository for Client Entity. + *

+ */ +@Repository +public interface ClientRepository extends BaseRepository { + /*@Query(value = "SELECT s.schema_name FROM information_schema.schemata s WHERE s.schema_name = :tenant", nativeQuery = true) + String checkTenant(@Param(value = "tenant") String tenant);*/ +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/repository/JwtRespository.java b/user/src/main/java/com/opsbeach/user/repository/JwtRespository.java new file mode 100644 index 0000000..ed31dd9 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/JwtRespository.java @@ -0,0 +1,10 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.Jwt; +import org.springframework.stereotype.Repository; + +@Repository +public interface JwtRespository extends BaseRepository { + +} diff --git a/user/src/main/java/com/opsbeach/user/repository/PermissionRepository.java b/user/src/main/java/com/opsbeach/user/repository/PermissionRepository.java new file mode 100644 index 0000000..3a5f1bd --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/PermissionRepository.java @@ -0,0 +1,14 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.Permission; +import org.springframework.stereotype.Repository; + +/** + *

+ * Repository for Permission Entity. + *

+ */ +@Repository +public interface PermissionRepository extends BaseRepository { +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/repository/RolePermissionRepository.java b/user/src/main/java/com/opsbeach/user/repository/RolePermissionRepository.java new file mode 100644 index 0000000..d01862c --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/RolePermissionRepository.java @@ -0,0 +1,14 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.RolePermission; +import org.springframework.stereotype.Repository; + +/** + *

+ * Repository for RolePermission Entity. + *

+ */ +@Repository +public interface RolePermissionRepository extends BaseRepository { +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/repository/RoleRepository.java b/user/src/main/java/com/opsbeach/user/repository/RoleRepository.java new file mode 100644 index 0000000..79518e3 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/RoleRepository.java @@ -0,0 +1,14 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.Role; +import org.springframework.stereotype.Repository; + +/** + *

+ * Repository for Role Entity. + *

+ */ +@Repository +public interface RoleRepository extends BaseRepository { +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/repository/SessionRepository.java b/user/src/main/java/com/opsbeach/user/repository/SessionRepository.java new file mode 100644 index 0000000..265550f --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/SessionRepository.java @@ -0,0 +1,10 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.Session; +import org.springframework.stereotype.Repository; + +@Repository +public interface SessionRepository extends BaseRepository { + +} diff --git a/user/src/main/java/com/opsbeach/user/repository/UserRepository.java b/user/src/main/java/com/opsbeach/user/repository/UserRepository.java new file mode 100644 index 0000000..4753f71 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/UserRepository.java @@ -0,0 +1,15 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.User; +import org.springframework.stereotype.Repository; + +/** + *

+ * Repository for User Entity. + *

+ */ +@Repository +public interface UserRepository extends BaseRepository { + +} diff --git a/user/src/main/java/com/opsbeach/user/repository/UserRoleRepository.java b/user/src/main/java/com/opsbeach/user/repository/UserRoleRepository.java new file mode 100644 index 0000000..670043e --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/repository/UserRoleRepository.java @@ -0,0 +1,14 @@ +package com.opsbeach.user.repository; + +import com.opsbeach.user.base.BaseRepository; +import com.opsbeach.user.entity.UserRole; +import org.springframework.stereotype.Repository; + +/** + *

+ * Repository for UserRole Entity. + *

+ */ +@Repository +public interface UserRoleRepository extends BaseRepository { +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/service/AuthenticationService.java b/user/src/main/java/com/opsbeach/user/service/AuthenticationService.java new file mode 100644 index 0000000..69da23f --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/AuthenticationService.java @@ -0,0 +1,299 @@ +package com.opsbeach.user.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.nimbusds.jose.JOSEException; +import com.opsbeach.sharedlib.dto.AuthenticationResponseDto; +import com.opsbeach.sharedlib.dto.GenericResponseDto; +import com.opsbeach.sharedlib.dto.JweDto; +import com.opsbeach.sharedlib.dto.JwtDto; +import com.opsbeach.sharedlib.dto.KeyStoreDto; +import com.opsbeach.sharedlib.dto.LoginDto; +import com.opsbeach.sharedlib.dto.RefreshTokenDto; +import com.opsbeach.sharedlib.dto.RegisterClientDto; +import com.opsbeach.sharedlib.dto.RegistrationDto; +import com.opsbeach.sharedlib.dto.RoleDto; +import com.opsbeach.sharedlib.dto.SessionDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.InvalidDataException; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.security.HMac; +import com.opsbeach.sharedlib.security.JweLibrary; +import com.opsbeach.sharedlib.security.RSAKeyGen; +import com.opsbeach.sharedlib.security.RSAMechanism; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.DateUtil; +import com.opsbeach.sharedlib.utils.OnboardStatus; +import com.opsbeach.sharedlib.utils.StringUtil; +import com.opsbeach.user.dto.ClientDto; +import com.opsbeach.user.dto.UserDetailDto; +import com.opsbeach.user.dto.UserDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import java.security.interfaces.RSAPublicKey; +import java.text.ParseException; +import java.util.Base64; +import java.util.List; +import java.util.Objects; + +@Slf4j +@Service +public class AuthenticationService { + + private final HMac hMac; + private final JweLibrary jweLibrary; + private final ClientService clientService; + private final UserService userService; + private final JwtService jwtService; + private final RSAMechanism rsaMechanism; + private final ResponseMessage responseMessage; + private final RoleService roleService; + private final SessionService sessionService; + + public AuthenticationService(RSAMechanism rsaMechanism, + RoleService roleService, ResponseMessage responseMessage, HMac hMac, + JweLibrary jweLibrary, JwtService jwtService, UserService userService, + ClientService clientService, SessionService sessionService) { + this.hMac = hMac; + this.jweLibrary = jweLibrary; + this.rsaMechanism = rsaMechanism; + this.responseMessage = responseMessage; + this.roleService = roleService; + this.jwtService = jwtService; + this.userService = userService; + this.clientService = clientService; + this.sessionService = sessionService; + } + + /*public JweDto decryptToken(String authToken) { + log.info("Decrypting the token"); + try { + //var jwtTokenDto = (JwtDto) cacheService.get(authToken, Constants.TOKEN); + var jwtTokenDto = jwtService.getByAccessToken(authToken); + return jweLibrary.decrypt(authToken, RSAKeyGen.getStringToKeys(jwtTokenDto.getPublicKey(), jwtTokenDto.getPrivateKey())); + } catch (JOSEException | JsonProcessingException | ParseException jsonProcessingException) { + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + } + + public String decryptClient(String encryptedClient) { + log.info("Decrypting the tenant"); + try { + return rsaMechanism.decrypt(encryptedClient); + } catch (JOSEException | ParseException e) { + log.error("Error in decrypting the Tenant - {}", e.getMessage()); + return Constants.EMPTY; + } + }*/ + + /*private TenantDto validateTenant(String tenant) { + log.info("Validating the incoming Tenant"); + if (StringUtil.isBlank(tenant) || StringUtil.isEmpty(tenant)) { + throw new TenantEmptyException(ErrorCode.EMPTY_TENANT, responseMessage.getErrorMessage(ErrorCode.EMPTY_TENANT)); + } + String tenantDetailsUrl = applicationConfig.getUserService().get(Constants.BASE_URL) + TENANT; + return app2AppService.httpGet(tenantDetailsUrl, app2AppService.setHeaders(App2AppService.formTenantHeader(tenant), null), TenantDto.class); + }*/ + + private UserDetailDto getUser(String username) { + log.info("Fetching the Details of the User [{}]", username); + + return userService.findByUsername(username); + /*String getUserDetailsUrl = StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), applicationConfig.getUser().get(USER), "?username=", username); + return app2AppService.httpGet(getUserDetailsUrl, app2AppService.setHeaders(null, null), UserDto.class);*/ + } + + private ClientDto getClient(long clientId) { + log.info("Fetching the Details of the Client [{}]", clientId); + return clientService.findById(clientId); + /*String getUserDetailsUrl = StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), applicationConfig.getUser().get(CLIENT), "/", String.valueOf(clientId)); + return app2AppService.httpGet(getUserDetailsUrl, app2AppService.setHeaders(new HashMap<>(), null), ClientDto.class);*/ + } + + public ClientDto registerClient(RegisterClientDto registerClientDto) { + log.info("Client Registration name: [{}] ", registerClientDto.getName()); + var clientDto = ClientDto.builder().name(registerClientDto.getName()).description(registerClientDto.getDescription()).build(); + return clientService.add(clientDto); + } + + public ClientDto registerClient(String workEmail) { + var clientName = workEmail.substring(workEmail.indexOf("@")+1, workEmail.lastIndexOf(".")); + log.info("Client Registration name: [{}] ", clientName); + return clientService.add(clientName); + } + + public GenericResponseDto registerUser(RegistrationDto registrationDto, String client) { + log.info("User registration username: [{}] ", registrationDto.getUsername()); + return userService.registration(registrationDto, client, "USER"); + /*return app2AppService.httpPost(StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), + applicationConfig.getUser().get(USER_REGISTER)), app2AppService.setHeaders(App2AppService.formClientHeader(client), registrationDto), Object.class);*/ + } + + public GenericResponseDto registerUser(RegistrationDto registrationDto) { + if (registrationDto.getOnboardStatus().equals(OnboardStatus.DEMO_USER)) { + registrationDto.setCompanyName(com.opsbeach.user.base.Constants.ADMIN_CLIENT_NAME); + } + clientService.add(registrationDto.getCompanyName()); + log.info("Client Registration name: [{}] ", registrationDto.getCompanyName()); + return registerUser(registrationDto, registrationDto.getCompanyName()); + } + + public String sendOtp(String email) { + log.info("Creating OTP for user [{}]", email); + return userService.sendOtp(email); + } + public AuthenticationResponseDto login(LoginDto loginDto) { + log.info("Customer [{}] log in with OTP", loginDto.getUsername()); + /*var userDto = app2AppService.httpPost(StringUtil.constructStringEmptySeparator(getUserServiceBaseUrl(), + applicationConfig.getUser().get(USER_LOGIN)), app2AppService.setHeaders(new HashMap<>(), loginDto), UserDto.class);*/ + var userDetailDto = userService.login(loginDto); + try { + return authenticationResponse(userDetailDto, Boolean.FALSE); + } catch (JOSEException | JsonProcessingException jsonProcessingException) { + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + } + + public AuthenticationResponseDto githubAuthentication() { + var userDetailDto = getUser(com.opsbeach.user.base.Constants.ADMIN_USER_NAME); + try { + return authenticationResponse(userDetailDto, Boolean.FALSE); + } catch (JOSEException | JsonProcessingException jsonProcessingException) { + throw new InvalidDataException(ErrorCode.INVALID_JSON_PARSE, responseMessage.getErrorMessage(ErrorCode.INVALID_JSON_PARSE)); + } + } + + public AuthenticationResponseDto authenticationResponse(UserDetailDto userDto, Boolean isRefreshToken) throws JOSEException, JsonProcessingException { + log.info("Get Authentication response for the User [{}]", userDto.getUsername()); + var clientDto = getClient(userDto.getClientId()); + var jweDto = JweDto.builder().userId(userDto.getId()).isRefresh(Boolean.FALSE).client(clientDto.getName()).username(userDto.getUsername()).build(); + var keyStoreDto = RSAKeyGen.getPublicAndPrivateKey(); + String accessToken = jweLibrary.encrypt(jweDto, keyStoreDto.getPublicKey()); + var token = AuthenticationResponseDto.Token.builder().accessToken(accessToken).refreshToken(refreshResponse(userDto, clientDto, keyStoreDto.getPublicKey())).expiresIn(2592000).tokenType(Constants.BEARER).build(); + if (isRefreshToken.equals(Boolean.FALSE)) { + var jwtTokenDto = formJwtTokenDto(token, keyStoreDto); + jwtService.add(jwtTokenDto); + //cacheService.save(token.getAccessToken(), Constants.TOKEN, jwtTokenDto); + } + return AuthenticationResponseDto.builder().token(token).isOnboarded(clientDto.isOnboarded()).build(); + } + + private String refreshResponse(UserDetailDto userDto, ClientDto clientDto, RSAPublicKey publicKey) throws JsonProcessingException, JOSEException { + log.info("Get Authentication response for the User [{}]", userDto.getUsername()); + //ClientDto clientDto = getClient(userDto.getClientId()); + var jweDto = JweDto.builder().userId(userDto.getId()).isRefresh(Boolean.TRUE).client(clientDto.getName()).username(userDto.getUsername()).build(); + return jweLibrary.encrypt(jweDto, publicKey); + } + + private JwtDto formJwtTokenDto(AuthenticationResponseDto.Token token, KeyStoreDto keyStoreDto) { + Base64.Encoder encoder = Base64.getEncoder(); + return JwtDto.builder().accessToken(token.getAccessToken()).refreshToken(token.getRefreshToken()) + .publicKey(encoder.encodeToString(keyStoreDto.getPublicKey().getEncoded())) + .privateKey(encoder.encodeToString(keyStoreDto.getPrivateKey().getEncoded())) + .expireAt(DateUtil.plusDays(DateUtil.currentDateTime(), 30)) + .build(); + } + + public AuthenticationResponseDto verifyRefreshToken(RefreshTokenDto refreshTokenDto) throws ParseException, JOSEException, JsonProcessingException { + //var jwtDto = (JwtDto) cacheService.get(SecurityUtil.getHashKey(), Constants.TOKEN); + var jwtDto = jwtService.getByRefreshToken(refreshTokenDto.getRefreshToken()); + + var refreshJweDto = jweLibrary.decrypt(refreshTokenDto.getRefreshToken(), RSAKeyGen.getStringToKeys(jwtDto.getPublicKey(), jwtDto.getPrivateKey())); + if (refreshJweDto.getIsRefresh().equals(Boolean.FALSE)) { + throw new UnAuthorizedException(ErrorCode.INVALID_REFRESH_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_REFRESH_TOKEN)); + } + var userDto = getUser(refreshJweDto.getUsername()); + if (Objects.isNull(userDto)) { + throw new UnAuthorizedException(ErrorCode.INVALID_USER, responseMessage.getErrorMessage(ErrorCode.INVALID_USER)); + } + var hoursBetweenTwoTime = DateUtil.hoursBetweenTime(jwtDto.getExpireAt(), DateUtil.currentDateTime()); + if (hoursBetweenTwoTime > 24) { + throw new UnAuthorizedException(ErrorCode.INVALID_REFRESH_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_REFRESH_TOKEN)); + } + if (userDto.getIsDeleted().equals(Boolean.TRUE)) { + throw new UnAuthorizedException(ErrorCode.INVALID_USER, responseMessage.getErrorMessage(ErrorCode.INVALID_USER)); + } + var accessJweDto = jweLibrary.decrypt(refreshTokenDto.getAccessToken(), RSAKeyGen.getStringToKeys(jwtDto.getPublicKey(), jwtDto.getPrivateKey())); + if (!accessJweDto.getUsername().equals(refreshJweDto.getUsername())) { + throw new UnAuthorizedException(ErrorCode.INVALID_USER, responseMessage.getErrorMessage(ErrorCode.INVALID_USER)); + } + if (!jwtDto.getRefreshToken().equals(refreshTokenDto.getRefreshToken())) { + throw new UnAuthorizedException(ErrorCode.INVALID_REFRESH_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_REFRESH_TOKEN)); + } + //userDto.setSecret(CacheUtil.getSecret()); + var authenticationResponseDto = authenticationResponse(userDto, Boolean.TRUE); + authenticationResponseDto.getToken().setRefreshToken(refreshTokenDto.getRefreshToken()); + jwtDto.setAccessToken(authenticationResponseDto.getToken().getAccessToken()); + //cacheService.save(userDto.getSecret(), Constants.TOKEN, jwtDto); + jwtService.add(jwtDto); + return authenticationResponseDto; + } + + public AuthenticationResponseDto refresh(RefreshTokenDto refreshTokenDto) { + log.info("Generating refresh token"); + try { + return verifyRefreshToken(refreshTokenDto); + } catch (ParseException | JOSEException | JsonProcessingException exception) { + throw new UnAuthorizedException(ErrorCode.INVALID_REFRESH_TOKEN, responseMessage.getErrorMessage(ErrorCode.INVALID_REFRESH_TOKEN)); + } + } + + + public Boolean logout(String authToken) { + log.info("Logging out the user from the system"); + if (!authToken.startsWith(Constants.PREFIX) || StringUtil.isEmpty(authToken).equals(Boolean.TRUE)) { + throw new UnAuthorizedException(ErrorCode.UNABLE_VERIFY_ACCESS_TOKEN, responseMessage.getErrorMessage(ErrorCode.UNABLE_VERIFY_ACCESS_TOKEN)); + } + var accessToken = authToken.substring(Constants.PREFIX.length()); + //var jweDto = decryptToken(accessToken); + //var hashKey = SecurityUtil.getHashKey(); + /*if (CacheUtil.checkAlreadyLoggedIn(hashKey).equals(Boolean.TRUE)) { + //cacheService.save(hashKey, Constants.IS_ALREADY_LOGGED_IN, Boolean.FALSE); + + throw new LoggedOutException(ErrorCode.ALREADY_LOGGED_OUT_ERROR, responseMessage.getErrorMessage(ErrorCode.ALREADY_LOGGED_OUT_ERROR)); + }*/ + return jwtService.delete(accessToken); + } + + public UserDto findByUsername(String username) { + var userDetailDto = userService.findByUsername(username); + List roleDtos = roleService.getRoleByUserId(userDetailDto.getId()); + UserDto userDto = mapperUser(userDetailDto); + userDto.setRoles(roleDtos); + return userDto; + } + private UserDto mapperUser(UserDetailDto userDetailDto) { + return UserDto.builder() + .id(userDetailDto.getId()) + .userType(userDetailDto.getType()) + .clientId(userDetailDto.getClientId()) + .email(userDetailDto.getEmailId()) + .mobile(userDetailDto.getMobile()) + .isDeleted(userDetailDto.getIsDeleted()) + .onboardStatus(userDetailDto.getOnboardStatus().name()) + .username(userDetailDto.getUsername()) + .timeZone(userDetailDto.getTimeZone()) + .build(); + } + + public JwtDto addJwt(JwtDto jwtDto) { + return jwtService.add(jwtDto); + } + + public JwtDto getByAccessToken(String authenticationToken) { + return jwtService.getByAccessToken(authenticationToken); + } + + public JwtDto getByRefreshToken(String refreshToken) { + return jwtService.getByRefreshToken(refreshToken); + } + + public Boolean deleteByAccessToken(String accessToken) { + return jwtService.delete(accessToken); + } + + public SessionDto addSession(SessionDto sessionDto) { + return sessionService.add(sessionDto); + } +} diff --git a/user/src/main/java/com/opsbeach/user/service/ClientService.java b/user/src/main/java/com/opsbeach/user/service/ClientService.java new file mode 100644 index 0000000..1a9e0b1 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/ClientService.java @@ -0,0 +1,94 @@ +package com.opsbeach.user.service; + +import com.fasterxml.jackson.databind.JsonNode; +import com.opsbeach.sharedlib.service.App2AppService; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.user.base.BaseMapper; +import com.opsbeach.user.base.BaseService; +import com.opsbeach.user.base.specification.IdSpecifications; +import com.opsbeach.user.dto.ClientDto; +import com.opsbeach.user.entity.Client; +import com.opsbeach.user.repository.ClientRepository; +import lombok.extern.slf4j.Slf4j; + +import java.util.Map; +import java.util.Objects; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; + + +/** + *

+ * Implements the CRUD operation for the Tenant of an user. + *

+ */ +@Slf4j +@Service +public class ClientService extends BaseService { + + private final App2AppService app2AppService; + + protected ClientService(ClientRepository clientRepository, BaseMapper baseMapper, + IdSpecifications tenantIdSpecifications, App2AppService app2AppService) { + super(clientRepository, baseMapper, tenantIdSpecifications); + this.app2AppService = app2AppService; + } + + @Value("${application.connect.create-organization-node}") + private String createOrganizationUrl; + + @Override + public void validateAdd(ClientDto incomingDto) { + if (incomingDto.getIsDeleted() == null) { + incomingDto.setIsDeleted(Boolean.FALSE); + } + } + + @Override + public void doPatch(Client incomingTenant, Client toUpdateTenant) { + if (Objects.nonNull(incomingTenant.isOnboarded())) { + toUpdateTenant.setOnboarded(incomingTenant.isOnboarded()); + } + if (Objects.nonNull(incomingTenant.getDescription())) { + toUpdateTenant.setDescription(incomingTenant.getDescription()); + } + } + + /* + * Returns details of Tenant. + * + * @return TenantDto - Details of Tenant. + */ + public ClientDto getClient(String name) { + log.info("Fetching Tenant Details"); + return findByClientName(name.toUpperCase()); + } + + public ClientDto add(String clientName) { + try { + // if client is present already then return. + var clientDto = getClient(clientName); + return clientDto; + } catch (Exception e) { + // if client is not present the exception is thrown. then create new client and add. + var clientDto = ClientDto.builder().name(clientName.toUpperCase()).build(); + clientDto = add(clientDto); + createOrganizationNode(clientDto); + return clientDto; + } + } + + private void createOrganizationNode(ClientDto clientDto) { + var url = createOrganizationUrl.replace("{clientName}", clientDto.getName()); + var entity = app2AppService.setHeaders(Map.of(Constants.CLIENT_ID_HEADER, clientDto.getId().toString()), null); + app2AppService.httpPost(url, entity, JsonNode.class); + } + + public ClientDto updateOnBoardedStatus(Long id, boolean isOnboarded) { + var clientDto = findById(id); + clientDto.setOnboarded(isOnboarded); + patch(clientDto); + return clientDto; + } +} diff --git a/user/src/main/java/com/opsbeach/user/service/JwtService.java b/user/src/main/java/com/opsbeach/user/service/JwtService.java new file mode 100644 index 0000000..fe190e7 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/JwtService.java @@ -0,0 +1,73 @@ +package com.opsbeach.user.service; + +import com.opsbeach.sharedlib.dto.JwtDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.user.base.specification.IdSpecifications; +import com.opsbeach.user.entity.Jwt; +import com.opsbeach.user.mapper.JwtMapper; +import com.opsbeach.user.repository.JwtRespository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; + +import java.util.Optional; + +@Slf4j +@Service +public class JwtService { + + private final JwtRespository jwtRespository; + private final JwtMapper jwtMapper; + private final IdSpecifications jwtIdSpecifications; + private final ResponseMessage responseMessage; + + public JwtService(JwtRespository jwtRespository, JwtMapper jwtMapper, IdSpecifications jwtIdSpecifications, + ResponseMessage responseMessage) { + this.jwtRespository = jwtRespository; + this.jwtMapper = jwtMapper; + this.jwtIdSpecifications = jwtIdSpecifications; + this.responseMessage = responseMessage; + } + + public JwtDto add(JwtDto jwtDto) { + Jwt jwt = jwtMapper.dtoToDomain(jwtDto); + jwt.setIsDeleted(Boolean.FALSE); + return jwtMapper.domainToDto(addModel(jwt)); + } + + public Jwt addModel(Jwt jwt) { + return jwtRespository.save(jwt); + } + + public Jwt getByAccessTokenModel(String authenticationToken) { + Specification baseSpecification = jwtIdSpecifications.findByAccessToken(authenticationToken); + Optional jwtOptional = jwtRespository.findOne(baseSpecification); + if (jwtOptional.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, + responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, "There access token")); + } + return jwtOptional.get(); + } + + public JwtDto getByAccessToken(String authenticationToken) { + return jwtMapper.domainToDto(getByAccessTokenModel(authenticationToken)); + } + + public JwtDto getByRefreshToken(String authenticationToken) { + Specification baseSpecification = jwtIdSpecifications.findByRefreshToken(authenticationToken); + Optional jwtOptional = jwtRespository.findOne(baseSpecification); + if (jwtOptional.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, + responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, "There access token")); + } + return jwtMapper.domainToDto(jwtOptional.get()); + } + + public Boolean delete(String accessToken) { + var jwt = getByAccessTokenModel(accessToken); + jwtRespository.delete(jwt); + return Boolean.TRUE; + } +} diff --git a/user/src/main/java/com/opsbeach/user/service/RolePermissionService.java b/user/src/main/java/com/opsbeach/user/service/RolePermissionService.java new file mode 100644 index 0000000..bcdbf46 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/RolePermissionService.java @@ -0,0 +1,74 @@ +package com.opsbeach.user.service; + +import com.opsbeach.sharedlib.dto.PermissionDto; +import com.opsbeach.sharedlib.utils.FutureUtil; +import com.opsbeach.user.base.specification.IdSpecifications; +import com.opsbeach.user.entity.Permission; +import com.opsbeach.user.entity.RolePermission; +import com.opsbeach.user.repository.PermissionRepository; +import com.opsbeach.user.repository.RolePermissionRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; +import java.util.stream.Collectors; + +/** + *

+ * Operations on Permissions of an user. + *

+ */ +@Slf4j +@Service +public class RolePermissionService { + + private final FutureUtil futureUtil; + private final Executor fhirExecutor; + private final PermissionRepository permissionRepository; + private final RolePermissionRepository rolePermissionRepository; + private final IdSpecifications permissionIdSpecifications; + private final IdSpecifications rolePermissionIdSpecifications; + + public RolePermissionService(FutureUtil futureUtil, Executor fhirExecutor, RolePermissionRepository rolePermissionRepository, IdSpecifications rolePermissionIdSpecifications, IdSpecifications permissionIdSpecifications, PermissionRepository permissionRepository) { + this.futureUtil = futureUtil; + this.fhirExecutor = fhirExecutor; + this.permissionRepository = permissionRepository; + this.rolePermissionRepository = rolePermissionRepository; + this.permissionIdSpecifications = permissionIdSpecifications; + this.rolePermissionIdSpecifications = rolePermissionIdSpecifications; + } + + /** + * Returns the list of permissions of a user based on the Role. + * + * @param roleId - Role ID of a user. + * @return permissions - List of Permissions. + */ + public List getPermissionsByRole(long roleId) { + log.info("Fetching Permissions based for the roleId - {}", roleId); + Specification roleSpecification = rolePermissionIdSpecifications.getPermissionsByRoleId(roleId).and(rolePermissionIdSpecifications.notDeleted()); + List rolePermissions = rolePermissionRepository.findAll(roleSpecification); + List> permissionFutures = rolePermissions.stream().map(this::getRolePermissionCompletableFuture).collect(Collectors.toList()); + List permissions; + try { + permissions = permissionFutures.stream().map(futureUtil::safeGet).filter(Objects::nonNull).collect(Collectors.toList()); + } finally { + permissionFutures.forEach(futureUtil::tryCancelFuture); + } + return permissions; + } + + private CompletableFuture getRolePermissionCompletableFuture(RolePermission rolePermission) { + return CompletableFuture.supplyAsync(() -> { + Specification permissionSpecification = permissionIdSpecifications.findById(rolePermission.getPermissionId()).and(permissionIdSpecifications.notDeleted()); + Optional permission = permissionRepository.findOne(permissionSpecification); + return permission.map(value -> PermissionDto.builder().id(value.getId()) + .operation(value.getOperation()).isDeleted(value.getIsDeleted()).build()).orElse(null); + }, fhirExecutor); + } +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/service/RoleService.java b/user/src/main/java/com/opsbeach/user/service/RoleService.java new file mode 100644 index 0000000..931dfc9 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/RoleService.java @@ -0,0 +1,130 @@ +package com.opsbeach.user.service; + +import com.opsbeach.sharedlib.dto.RoleDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.utils.FutureUtil; +import com.opsbeach.user.base.specification.IdSpecifications; +import com.opsbeach.user.entity.Role; +import com.opsbeach.user.entity.UserRole; +import com.opsbeach.user.repository.RoleRepository; +import com.opsbeach.user.repository.UserRoleRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; +import java.util.stream.Collectors; + +/** + *

+ * Operations on Roles of an user. + *

+ */ +@Slf4j +@Service +public class RoleService { + + private final FutureUtil futureUtil; + private final Executor fhirExecutor; + private final RoleRepository roleRepository; + private final UserRoleRepository userRoleRepository; + private final RolePermissionService rolePermissionService; + private final IdSpecifications userRoleIdSpecifications; + private final IdSpecifications roleIdSpecifications; + private final ResponseMessage responseMessage; + + public RoleService(FutureUtil futureUtil, Executor fhirExecutor, RoleRepository roleRepository, IdSpecifications userRoleIdSpecifications, + UserRoleRepository userRoleRepository, RolePermissionService rolePermissionService, IdSpecifications roleIdSpecifications, + ResponseMessage responseMessage) { + this.futureUtil = futureUtil; + this.fhirExecutor = fhirExecutor; + this.roleRepository = roleRepository; + this.userRoleRepository = userRoleRepository; + this.rolePermissionService = rolePermissionService; + this.userRoleIdSpecifications = userRoleIdSpecifications; + this.roleIdSpecifications = roleIdSpecifications; + this.responseMessage = responseMessage; + } + + /** + * Return the role of a user by role id. + * + * @param roleId - Role ID of a user. + * @return Optional Role. + */ + public Optional getRoleById(long roleId) { + return roleRepository.findById(roleId); + } + + /** + * To get the roles by user ID. + * + * @param userId - User Id + * @return roleDto - Roles and Permissions of a user. + */ + public List getRoleByUserId(long userId) { + log.info("Fetching Roles by user Id - {}", userId); + Specification userRoleSpecification = userRoleIdSpecifications.findByUserId(userId); + List userRoles = userRoleRepository.findAll(userRoleSpecification); + List> roleFutures = userRoles.stream().map(this::getRoleCompletableFuture).collect(Collectors.toList()); + List roleDtos; + try { + roleDtos = roleFutures.stream().map(futureUtil::safeGet).filter(Objects::nonNull).collect(Collectors.toList()); + } finally { + roleFutures.forEach(futureUtil::tryCancelFuture); + } + return roleDtos; + } + + private CompletableFuture getRoleCompletableFuture(UserRole userRole) { + return CompletableFuture.supplyAsync(() -> { + if (Objects.nonNull(userRole)) { + Optional roleOptional = getRoleById(userRole.getRoleId()); + if (roleOptional.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND_ID, + responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND_ID, "Role Id : " + userRole.getRoleId())); + } + Role role = roleOptional.get(); + return RoleDto.builder() + .id(role.getId()).name(role.getName()) + .description(role.getDescription()).isDeleted(role.getIsDeleted()) + .permissions(rolePermissionService.getPermissionsByRole(role.getId())).build(); + } else { + return null; + } + }, fhirExecutor); + } + + private CompletableFuture getRoleCompletableFuture(Role role) { + return CompletableFuture.supplyAsync(() -> { + if (Objects.nonNull(role)) { + return RoleDto.builder() + .id(role.getId()).name(role.getName()) + .description(role.getDescription()).isDeleted(role.getIsDeleted()) + .permissions(rolePermissionService.getPermissionsByRole(role.getId())).build(); + } else { + return null; + } + }, fhirExecutor); + } + + public List getRoleByName(String name) { + log.info("Fetching Roles by user Id - {}", name); + Specification roleSpecification = roleIdSpecifications.findByName(name).and(roleIdSpecifications.notDeleted()); + List roles = roleRepository.findAll(roleSpecification); + List> roleFutures = roles.stream().map(this::getRoleCompletableFuture).collect(Collectors.toList()); + List roleDtos; + try { + roleDtos = roleFutures.stream().map(futureUtil::safeGet).filter(Objects::nonNull).collect(Collectors.toList()); + } finally { + roleFutures.forEach(futureUtil::tryCancelFuture); + } + return roleDtos; + } +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/service/SessionService.java b/user/src/main/java/com/opsbeach/user/service/SessionService.java new file mode 100644 index 0000000..03ca458 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/SessionService.java @@ -0,0 +1,35 @@ +package com.opsbeach.user.service; + +import com.opsbeach.sharedlib.dto.SessionDto; +import com.opsbeach.user.entity.Session; +import com.opsbeach.user.mapper.SessionMapper; +import com.opsbeach.user.repository.SessionRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +/** + *

+ * CRUD implementation for UserSession. + *

+ */ +@Slf4j +@Service +public class SessionService { + private final SessionRepository sessionRepository; + private final SessionMapper sessionMapper; + + public SessionService(SessionRepository sessionRepository, SessionMapper sessionMapper) { + this.sessionRepository = sessionRepository; + this.sessionMapper = sessionMapper; + } + + public SessionDto add(SessionDto sessionDto) { + Session session = sessionMapper.dtoToDomain(sessionDto); + return sessionMapper.domainToDto(addModel(session)); + } + + public Session addModel(Session session) { + return (Session)sessionRepository.save(session); + } + +} \ No newline at end of file diff --git a/user/src/main/java/com/opsbeach/user/service/UserRoleService.java b/user/src/main/java/com/opsbeach/user/service/UserRoleService.java new file mode 100644 index 0000000..50d1680 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/UserRoleService.java @@ -0,0 +1,19 @@ +package com.opsbeach.user.service; + +import com.opsbeach.user.entity.UserRole; +import com.opsbeach.user.repository.UserRoleRepository; +import org.springframework.stereotype.Service; + +@Service +public class UserRoleService { + + private final UserRoleRepository userRoleRepository; + + public UserRoleService(UserRoleRepository userRoleRepository) { + this.userRoleRepository = userRoleRepository; + } + + public UserRole save(UserRole userRole) { + return userRoleRepository.save(userRole); + } +} diff --git a/user/src/main/java/com/opsbeach/user/service/UserService.java b/user/src/main/java/com/opsbeach/user/service/UserService.java new file mode 100644 index 0000000..88fd829 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/service/UserService.java @@ -0,0 +1,362 @@ +package com.opsbeach.user.service; + +import com.opsbeach.sharedlib.dto.GenericResponseDto; +import com.opsbeach.sharedlib.dto.LoginDto; +import com.opsbeach.sharedlib.dto.RegistrationDto; +import com.opsbeach.sharedlib.dto.RoleDto; +import com.opsbeach.sharedlib.exception.ErrorCode; +import com.opsbeach.sharedlib.exception.RecordNotFoundException; +import com.opsbeach.sharedlib.exception.UnAuthorizedException; +import com.opsbeach.sharedlib.exception.UserExistException; +import com.opsbeach.sharedlib.response.ResponseMessage; +import com.opsbeach.sharedlib.response.SuccessCode; +import com.opsbeach.sharedlib.security.SecurityUtil; +import com.opsbeach.sharedlib.service.EmailService; +import com.opsbeach.sharedlib.utils.Constants; +import com.opsbeach.sharedlib.utils.DateUtil; +import com.opsbeach.sharedlib.utils.FutureUtil; +import com.opsbeach.sharedlib.utils.OnboardStatus; +import com.opsbeach.sharedlib.utils.StringUtil; +import com.opsbeach.user.base.BaseService; +import com.opsbeach.user.base.specification.IdSpecifications; +import com.opsbeach.user.dto.ClientDto; +import com.opsbeach.user.dto.UserDetailDto; +import com.opsbeach.user.entity.User; +import com.opsbeach.user.entity.UserRole; +import com.opsbeach.user.mapper.UserMapper; +import com.opsbeach.user.repository.UserRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; +import org.springframework.security.crypto.password.PasswordEncoder; +import org.springframework.stereotype.Service; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Random; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; +import java.util.stream.Collectors; + +/** + *

+ * Implements the CRUD operation of an user. + *

+ */ +@Slf4j +@Service +public class UserService extends BaseService { + + private static final String LOGIN_FAILED_MESSAGE = "Login failed. Invalid username or password"; + private static final String PASSWORD_SHOULD_NOT_BE_EMPTY = "Password should not be empty"; + private static final String ACCOUNT_LOCKED_MESSAGE = "Your Account has been locked due to 3 failed attempts. It will be unlocked after 24 hours."; + private final FutureUtil futureUtil; + private final Executor fhirExecutor; + private final RoleService roleService; + private final ClientService clientService; + private final UserRepository userRepository; + private final ResponseMessage responseMessage; + private final UserRoleService userRoleService; + private final IdSpecifications userIdSpecifications; + private final UserMapper userMapper; + private static final PasswordEncoder passwordEncoder = new BCryptPasswordEncoder(); + private final EmailService emailService; + + public UserService(UserRepository userRepository, UserMapper userMapper, FutureUtil futureUtil, Executor fhirExecutor, IdSpecifications userIdSpecifications, RoleService roleService, ResponseMessage responseMessage, + ClientService clientService, UserRoleService userRoleService, EmailService emailService) { + super(userRepository, userMapper, userIdSpecifications); + this.futureUtil = futureUtil; + this.roleService = roleService; + this.fhirExecutor = fhirExecutor; + this.clientService = clientService; + this.userRepository = userRepository; + this.responseMessage = responseMessage; + this.userRoleService = userRoleService; + this.userIdSpecifications = userIdSpecifications; + this.userMapper = userMapper; + this.emailService = emailService; + } + + @Override + public void doPatch(User incomingUser, User toUpdateUser) { + if (Objects.nonNull(incomingUser.getOnboardStatus())) { + toUpdateUser.setOnboardStatus(incomingUser.getOnboardStatus()); + } + if (Objects.nonNull(incomingUser.getFirstName())) { + toUpdateUser.setFirstName(incomingUser.getFirstName()); + } + if (Objects.nonNull(incomingUser.getDob())) { + toUpdateUser.setDob(incomingUser.getDob()); + } + if (Objects.nonNull(incomingUser.getEmailId())) { + toUpdateUser.setEmailId(incomingUser.getEmailId()); + } + + if (Objects.nonNull(incomingUser.getType())) { + toUpdateUser.setType(incomingUser.getType()); + } + + if (Objects.nonNull(incomingUser.getClientId())) { + toUpdateUser.setClientId(incomingUser.getClientId()); + } + + continuePatch(incomingUser, toUpdateUser); + toUpdateUser.setForgetPasswordKey(incomingUser.getForgetPasswordKey()); + toUpdateUser.setCreatedBy(incomingUser.getCreatedBy()); + toUpdateUser.setUpdatedBy(incomingUser.getUpdatedBy()); + } + + private void continuePatch(User incomingUser, User toUpdateUser) { + if (Objects.nonNull(incomingUser.getPassword())) { + toUpdateUser.setPassword(incomingUser.getPassword()); + } + if (Objects.nonNull(incomingUser.getVerificationTokenSentTime())) { + toUpdateUser.setVerificationTokenSentTime(incomingUser.getVerificationTokenSentTime()); + } + if (Objects.nonNull(incomingUser.getAccountLocked())) { + toUpdateUser.setAccountLocked(incomingUser.getAccountLocked()); + } + if (Objects.nonNull(incomingUser.getFailureAttempts())) { + toUpdateUser.setFailureAttempts(incomingUser.getFailureAttempts()); + } + if (Objects.nonNull(incomingUser.getLockTime())) { + toUpdateUser.setLockTime(incomingUser.getLockTime()); + } + if (Objects.nonNull(incomingUser.getPasswordChangedTime())) { + toUpdateUser.setPasswordChangedTime(incomingUser.getPasswordChangedTime()); + } + if (Objects.nonNull(incomingUser.getOldPassword())) { + toUpdateUser.setOldPassword(incomingUser.getOldPassword()); + } + if (Objects.nonNull(incomingUser.getOtp())) { + toUpdateUser.setOtp(incomingUser.getOtp()); + } + if (Objects.nonNull(incomingUser.getOtpSentTime())) { + toUpdateUser.setOtpSentTime(incomingUser.getOtpSentTime()); + } + } + + @Override + public void validateAdd(UserDetailDto userDetailDto) { + userDetailDto.setIsDeleted(Boolean.FALSE); + } + + @Override + public void validatePatch(UserDetailDto incomingDto) { + incomingDto.setCreatedBy(incomingDto.getId()); + incomingDto.setUpdatedBy(incomingDto.getId()); + } + + public static String getEncodedPassword(String password) { + return passwordEncoder.encode(password); + } + + public static Boolean isPasswordEqual(String password1, String password2) { + return passwordEncoder.matches(password1, password2); + } + + public GenericResponseDto registration(RegistrationDto registrationDto, String clientName, String role) { + checkUserExists(registrationDto.getUsername()); + ClientDto clientDto = clientService.getClient(clientName); + registrationDto.setClientId(clientDto.getId()); + // var encodedPassword = getEncodedPassword(registrationDto.getPassword()); + // registrationDto.setPassword(encodedPassword); + // registrationDto.setOldPassword(encodedPassword); + UserDetailDto userDetailDto = userMapper.registerToDto(registrationDto); + userDetailDto.setAccountLocked(Boolean.FALSE); + userDetailDto.setFailureAttempts(0); + saveUserRole(add(userDetailDto), role); + return GenericResponseDto.builder().status(Constants.SUCCESS).build(); + } + + public GenericResponseDto registerCompany(String companyName) { + var clientDto = clientService.add(companyName); + var userDetailDto = findById(SecurityUtil.getLoggedInUserDetail().getId()); + userDetailDto.setClientId(clientDto.getId()); + userDetailDto.setOnboardStatus(OnboardStatus.ONBOARDED); + patch(userDetailDto); + return GenericResponseDto.builder().status(Constants.SUCCESS).build(); + } + + public String sendOtp(String email) { + var otp = Integer.toString(new Random().nextInt(100000, 1000000)); // generate 6 digit otp. + var userDataDto = findByUsername(email); + userDataDto.setOtp(otp); + emailService.sendMail(email, "Schemata Labs login OTP", + StringUtil.constructStringEmptySeparator("Your OTP for login is ", otp, " Valid for 15 minutes")); + userDataDto.setOtpSentTime(DateUtil.currentDateTime()); + patch(userDataDto); + return responseMessage.getSuccessMessage(SuccessCode.OTP_SENT_SUCCESSFULLY); + } + + public UserDetailDto login(LoginDto loginDto) { + log.info("User login with username"); + UserDetailDto userDataDto = findByUsername(loginDto.getUsername()); + + if (userDataDto.getAccountLocked().equals(Boolean.TRUE)) { + if (DateUtil.currentDateTime().isAfter(DateUtil.plusDays(userDataDto.getLockTime(), 1))) { + userDataDto.setFailureAttempts(0); + userDataDto.setAccountLocked(Boolean.FALSE); + userDataDto.setLockTime(DateUtil.currentDateTime()); + patch(userDataDto); + } else { + throw new UnAuthorizedException(ErrorCode.INVALID_PASSWORD, ACCOUNT_LOCKED_MESSAGE); + } + } + + if (Objects.isNull(loginDto.getPassword())) { + throw new UnAuthorizedException(ErrorCode.INVALID_PASSWORD, PASSWORD_SHOULD_NOT_BE_EMPTY); + } + + // if (isPasswordEqual(loginDto.getPassword(), userDataDto.getPassword()).equals(Boolean.FALSE)) { + if (Objects.isNull(userDataDto.getOtp()) || Boolean.FALSE.equals(loginDto.getPassword().equals(userDataDto.getOtp()))) { + userDataDto.setFailureAttempts(userDataDto.getFailureAttempts() + 1); + if (userDataDto.getFailureAttempts() >= 3) { + userDataDto.setAccountLocked(Boolean.TRUE); + userDataDto.setLockTime(DateUtil.currentDateTime()); + patch(userDataDto); + throw new UnAuthorizedException(ErrorCode.INVALID_PASSWORD, ACCOUNT_LOCKED_MESSAGE); + } + patch(userDataDto); + throw new UnAuthorizedException(ErrorCode.INVALID_PASSWORD, LOGIN_FAILED_MESSAGE); + } + + if (DateUtil.currentDateTime().isAfter(DateUtil.plusMinutes(userDataDto.getOtpSentTime(), 15))) { + throw new UnAuthorizedException(ErrorCode.OTP_EXPIRED, responseMessage.getErrorMessage(ErrorCode.OTP_EXPIRED)); + } + + userDataDto.setFailureAttempts(0); + userDataDto.setAccountLocked(Boolean.FALSE); + patch(userDataDto); + return userDataDto; + } + + public void saveUserRole(UserDetailDto userDetailDto, String role) { + List roles = roleService.getRoleByName(role); + List> userRoleServiceFutures = + roles.stream().map(roleDto -> getUserRoleServiceFuture(userDetailDto, roleDto.getId())).collect(Collectors.toList()); + try { + userRoleServiceFutures.forEach(futureUtil::safeGet); + } finally { + userRoleServiceFutures.forEach(futureUtil::tryCancelFuture); + } + log.info("User Role has been saved in the table"); + } + + private CompletableFuture getUserRoleServiceFuture(UserDetailDto userDetailDto, Long roleId) { + return CompletableFuture.runAsync(() -> { + var userRole = UserRole.builder().userId(userDetailDto.getId()).roleId(roleId).isDeleted(Boolean.FALSE).createdBy(userDetailDto.getId()).updatedBy(userDetailDto.getId()).build(); + userRoleService.save(userRole); + }, fhirExecutor); + } + + public UserDetailDto findByUsername(String username) { + Specification baseSpecification = userIdSpecifications.findByUsername(username); + Optional entity = userRepository.findOne(baseSpecification); + if (entity.isEmpty()) { + throw new RecordNotFoundException(ErrorCode.RECORD_NOT_FOUND, + responseMessage.getErrorMessage(ErrorCode.RECORD_NOT_FOUND, String.format("Username: %s ", username))); + } + return userMapper.domainToDto(entity.get()); + } + + /** + * Prepares UserDto based on UserDataDto for the Authorization. + * + * @return userDto - Details required for Authorization. + */ + public UserDetailDto getUserDto(UserDetailDto userDetailDto) { + + /*log.info("Get User Details and save values in Cache"); + var userSessionDto = userSessionService.findByOpenSession(userDataDto.getId(), httpServletRequest, userDataDto.getUserType()); + var hashKey = userSessionDto.getSecret(); + var isAlreadyLoggedIn = cacheService.delete(hashKey); + if (StringUtils.isEmpty(sessionId).equals(Boolean.FALSE)) { + cacheService.save(hashKey, Constants.SESSION_ID, sessionId); + } + saveCacheValues(userDataDto, userSessionDto, hashKey, isAlreadyLoggedIn); + return userService.getUserDto(userDataDto, Boolean.FALSE, userSessionDto);*/ + + + return userDetailDto; + } + + /** + * Updates User Details of a User with the details received from OneMoney. + * + * @param userId - The respective User ID. + * @param holders - Details of the User received from OneMoney. + * @return successResponseDto - Success message if the user details has been updated properly. + */ + /*public GenericResponseDto updateUserDetails(long userId, List holders) { + log.info("Updating the User table"); + var holder = holders.get(0).getHolder(); + var userDto = findById(userId); + userDto.setDob(holder.getDob()); + userDto.setGender("MALE"); // change once original value received from OneMoney. As of now Gender is not received from OneMoney. + userDto.setPan(holder.getPan()); + if (Objects.nonNull(holder.getDob())) { + userDto.setAge(Period.between(holder.getDob(), DateUtil.currentTimeZoneDate()).getYears()); + } + userDto.setEmail(holder.getEmail()); + if (Objects.nonNull(holder.getCkycCompliance())) { + userDto.setIsKycVerified(holder.getCkycCompliance().equalsIgnoreCase(Constants.TRUE_VALUE)); + } + userDto.setType(holders.get(0).getType()); + patch(userDto); + log.info("User Details has been updated in the Database."); + return GenericResponseDto.builder().status(SUCCESS).build(); + }*/ + + /** + * Returns details of a user. + * + * @return Object - Details of user. + */ + /*public UserDetailDto getUserDetails() { + log.info("Fetching User Details"); + return findById(CacheUtil.getUserId()); + }*/ + + /** + * Returns details of a user by mobile number. + * + * @param searchValue - Mobile number of a user. + * @return Object - Details of user. + */ + public UserDetailDto getUserDetails(String searchValue, String type) { + log.info("Fetching User Details"); + Specification baseSpecification = userIdSpecifications.findByBatchUser(); + var userDto = findOne(baseSpecification); + return getUserDto(userDto); + } + + public Optional userExists(String searchValue) { + Specification baseSpecification = userIdSpecifications.findByEmail(searchValue); + return userRepository.findOne(baseSpecification); + } + + /** + * Check whether the user already exists in database during registration. + * + * @param searchValue - Mobile number or email of user. + * @return Boolean - True (or) false. + */ + public Object checkUserExists(String searchValue) { + log.info("Validating whether the user is already registered"); + Optional user = userExists(searchValue); + if (user.isPresent()) { + throw new UserExistException(ErrorCode.USER_ALREADY_EXISTS, responseMessage.getErrorMessage(ErrorCode.USER_ALREADY_EXISTS)); + } + return Boolean.FALSE; + } + + /*public GenericResponseDto updateUserOnboardStatus(int status) { + log.info("Updating the User On-board status into table"); + var userDataDto = findById(CacheUtil.getUserId()); + userDataDto.setOnboardStatus(OnboardStatus.valueOf(status)); + patch(userDataDto); + return GenericResponseDto.builder().status(Constants.SUCCESS).build(); + }*/ +} diff --git a/user/src/main/java/com/opsbeach/user/utils/Constants.java b/user/src/main/java/com/opsbeach/user/utils/Constants.java new file mode 100644 index 0000000..d360c35 --- /dev/null +++ b/user/src/main/java/com/opsbeach/user/utils/Constants.java @@ -0,0 +1,16 @@ +package com.opsbeach.user.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + *

+ * Application constant values. + *

+ */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class Constants { + + //Table + public static final String TABLE_USER = "\"user\""; +} diff --git a/user/src/main/resources/application-url.yml b/user/src/main/resources/application-url.yml new file mode 100644 index 0000000..452f071 --- /dev/null +++ b/user/src/main/resources/application-url.yml @@ -0,0 +1,6 @@ +application: + connect: + baseURL: https://api.aldefi.io/connect/v1/ + create-organization-node: https://api.aldefi.io/connect/v1/schema/organization?clientName={clientName} + analytics: + baseURL: https://api.aldefi.io/analytics/v1/ \ No newline at end of file diff --git a/user/src/main/resources/application.yml b/user/src/main/resources/application.yml new file mode 100644 index 0000000..23b6199 --- /dev/null +++ b/user/src/main/resources/application.yml @@ -0,0 +1,38 @@ +spring: + datasource: + url: jdbc:postgresql://34.27.156.232:5432/opsbeach?serverTimeZone=UTC&characterEncoding=UTF-8 + driver-class-name: org.postgresql.Driver + username: ${POSTGRES_USER} + password: ${POSTGRES_PASS} + hikari: + schema: ops_user + jpa: + hibernate: + ddl-auto: validate + properties: + hibernate: + default_schema: ops_user + dialect: org.hibernate.dialect.PostgreSQLDialect + ddl-auto: none + #format_sql: true + #show_sql: true + application: + name: user + smtp: + username: ${SMTP_USER_NAME} + password: ${SMTP_PASSWORD} +server: + port: 7080 + servlet: + contextPath: /user +flyway: + url: jdbc:postgresql://34.27.156.232:5432/opsbeach + schemas: PUBLIC + user: ${POSTGRES_USER} + password: ${POSTGRES_PASS} + +management: + endpoints: + web: + exposure: + include: health diff --git a/user/src/main/resources/db/migration/V1__Init.sql b/user/src/main/resources/db/migration/V1__Init.sql new file mode 100644 index 0000000..4f1ea30 --- /dev/null +++ b/user/src/main/resources/db/migration/V1__Init.sql @@ -0,0 +1,149 @@ +create schema if not exists ops_user; + +CREATE TABLE client ( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + name varchar(512) NOT NULL UNIQUE, + description varchar(512) NOT NULL +); + +-- INSERT INTO client(created_at, updated_at, created_by, updated_by, name, description) +-- VALUES ('2022-03-19 00:00:00', '2022-03-19 00:00:00', 1, 1, 'OPSBEACH', 'opsBeach'); + +CREATE TABLE "user" ( + id bigserial PRIMARY KEY, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint, + is_deleted boolean DEFAULT false, + client_id bigserial, + dob date, + email_id character varying(255), + mobile character varying(255), + first_name character varying(255), + middle_name character varying(255), + last_name character varying(255), + username character varying(255), + password character varying(255), + forget_password_key character varying(255), + gender character varying(50), + type character varying(255), + time_zone character varying(255), + onboard_status character varying(255), + failure_attempts integer, + lock_time timestamp, + account_locked boolean, + verification_token_sent_time timestamp, + password_changed_time timestamp, + old_password character varying(500), + otp character varying(6), + otp_sent_time timestamp +); + +-- INSERT INTO "user"( +-- email_id, username, mobile, first_name, client_id, onboard_status, created_at, updated_at, created_by, updated_by, forget_password_key, password, time_zone, failure_attempts, account_locked, password_changed_time) +-- VALUES ('schematalabs@gmail.com', 'schematalabs@gmail.com', '1234567891', 'Admin', 1, 'COMPLETED', '2022-03-17 00:00:00', '2022-03-19 00:00:00', 1, 1, null, '$2a$10$zo7hGv0u1MNBcpDLWhfageHpHMj.JW.chuU05xy8VdhSOaGc7iNKa', 'Asia/Kolkata', 0, FALSE, null); + +CREATE TABLE role ( + id bigserial PRIMARY KEY, + name varchar(512) NOT NULL, + description varchar(512) NOT NULL, + is_deleted boolean, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +CREATE TABLE permission ( + id bigserial PRIMARY KEY, + operation varchar(256) NOT NULL, + is_deleted boolean, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +CREATE TABLE role_permission ( + id bigserial PRIMARY KEY, + role_id bigint NOT NULL, + permission_id bigint NOT NULL, + is_deleted boolean, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +CREATE TABLE user_role ( + id bigserial PRIMARY KEY, + role_id bigint NOT NULL, + user_id bigint NOT NULL, + is_deleted boolean, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +INSERT INTO role (name, description, created_at, updated_at, created_by, updated_by, is_deleted) + VALUES ('ORG_ADMIN', 'Admin Role', '2022-04-02 00:00:00', '2021-04-02 00:00:00', 0, 0, FALSE), + ('SERVICE_ADMIN', 'Client Admin Role', '2022-04-02 00:00:00', '2021-04-02 00:00:00', 0, 0, FALSE), + ('ACCOUNT_ADMIN', 'Client User Role', '2022-06-04 00:00:00', '2021-04-02 00:00:00', 0, 0, FALSE), + ('USER', 'Client User Role', '2022-06-04 00:00:00', '2021-04-02 00:00:00', 0, 0, FALSE); + +-- INSERT INTO user_role (role_id, user_id, created_at, updated_at, created_by, updated_by, is_deleted) +-- VALUES ((SELECT id FROM "ops_user"."role" WHERE name = 'USER'), (SELECT id FROM "ops_user"."user" WHERE username = 'schematalabs@gmail.com'), '2022-04-02 00:00:00', '2021-04-02 00:00:00', 0, 0, FALSE); + +CREATE TABLE system_configuration ( + id bigserial PRIMARY KEY, + key varchar(256) NOT NULL, + value varchar(256) NOT NULL, + data_type varchar(50) NOT NULL, + description varchar(256) NOT NULL, + client_id bigserial, + is_deleted boolean, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +CREATE TABLE jwt ( + id bigserial PRIMARY KEY, + public_key text NOT NULL, + private_key text NOT NULL, + access_token text NOT NULL, + refresh_token text NOT NULL, + is_deleted boolean, + expiry_at timestamp, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +CREATE TABLE session ( + id bigserial PRIMARY KEY, + user_id bigserial, + uri varchar(512) NOT NULL, + type varchar(512) NOT NULL, + action varchar(512) NOT NULL, + module varchar(512) NOT NULL, + ip_address varchar(512) NOT NULL, + success_login boolean, + is_deleted boolean, + created_at timestamp, + updated_at timestamp, + created_by bigint, + updated_by bigint +); + +ALTER TABLE client ADD is_onboarded boolean DEFAULT false; +ALTER TABLE client ALTER COLUMN description DROP NOT NULL; \ No newline at end of file diff --git a/user/test-user-trigger.yaml b/user/test-user-trigger.yaml new file mode 100644 index 0000000..6635509 --- /dev/null +++ b/user/test-user-trigger.yaml @@ -0,0 +1,10 @@ +steps: +# https://stackoverflow.com/questions/52518141/deploying-on-merge-not-on-pull-request-with-google-cloud-build +- id: "tests" + name: gcr.io/cloud-builders/git + entrypoint: /bin/bash + args: + - -c + - | + echo "here we run all our unit tests" + waitFor: ['-'] \ No newline at end of file diff --git a/validate.sh b/validate.sh deleted file mode 100755 index 6c41adb..0000000 --- a/validate.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -java -jar target/schemata-1.0.jar validate -s=src/test/resources/descriptors/entities.desc -p=PROTOBUF