Skip to content

Commit

Permalink
docs
Browse files Browse the repository at this point in the history
  • Loading branch information
callicles committed Feb 23, 2025
1 parent 00a666a commit e1d66d5
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 10 deletions.
52 changes: 52 additions & 0 deletions apps/framework-cli/src/cli/local_webserver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1298,6 +1298,15 @@ impl IntegrationError {
}
}

/// Validates the admin authentication by checking the provided bearer token against the admin API key.
///
/// # Arguments
/// * `auth_header` - Optional HeaderValue containing the Authorization header
/// * `admin_api_key` - Optional String containing the configured admin API key
///
/// # Returns
/// * `Ok(())` if authentication is successful
/// * `Err(IntegrationError)` if authentication fails or admin API key is not configured
async fn validate_admin_auth(
auth_header: Option<&HeaderValue>,
admin_api_key: &Option<String>,
Expand All @@ -1324,6 +1333,16 @@ async fn validate_admin_auth(
}
}

/// Searches for a table definition in the provided discrepancies based on the table name.
/// This function looks for the table in unmapped tables, added tables, updated tables, and removed tables.
///
/// # Arguments
/// * `table_name` - Name of the table to find
/// * `discrepancies` - InfraDiscrepancies containing the differences between reality and infrastructure map
///
/// # Returns
/// * `Some(Table)` if the table definition is found
/// * `None` if the table is not found or is marked for removal
fn find_table_definition(table_name: &str, discrepancies: &InfraDiscrepancies) -> Option<Table> {
debug!("Looking for table definition: {}", table_name);

Expand Down Expand Up @@ -1380,6 +1399,16 @@ fn find_table_definition(table_name: &str, discrepancies: &InfraDiscrepancies) -
}
}

/// Updates the infrastructure map with the provided tables based on the discrepancies.
/// This function handles adding new tables, updating existing ones, and removing tables as needed.
///
/// # Arguments
/// * `tables_to_update` - Vector of table names to update
/// * `discrepancies` - InfraDiscrepancies containing the differences between reality and infrastructure map
/// * `infra_map` - Mutable reference to the infrastructure map to update
///
/// # Returns
/// * Vector of strings containing the names of tables that were successfully updated
async fn update_inframap_tables(
tables_to_update: Vec<String>,
discrepancies: &InfraDiscrepancies,
Expand Down Expand Up @@ -1432,6 +1461,16 @@ async fn update_inframap_tables(
updated_tables
}

/// Stores the updated infrastructure map in both Redis and ClickHouse.
///
/// # Arguments
/// * `infra_map` - Reference to the infrastructure map to store
/// * `redis_guard` - Reference to the Redis client
/// * `project` - Reference to the project configuration
///
/// # Returns
/// * `Ok(())` if storage is successful
/// * `Err(IntegrationError)` if storage fails in either Redis or ClickHouse
async fn store_updated_inframap(
infra_map: &InfrastructureMap,
redis_guard: &RedisClient,
Expand Down Expand Up @@ -1475,6 +1514,19 @@ async fn store_updated_inframap(
Ok(())
}

/// Handles the admin integration changes route, which allows administrators to integrate
/// infrastructure changes into the system. This route validates authentication, processes
/// the requested table changes, and updates both the in-memory infrastructure map and
/// persisted storage (Redis and ClickHouse).
///
/// # Arguments
/// * `req` - The incoming HTTP request
/// * `admin_api_key` - Optional admin API key for authentication
/// * `project` - Reference to the project configuration
/// * `redis_client` - Reference to the Redis client wrapped in Arc<Mutex>
///
/// # Returns
/// * Result containing the HTTP response with either success or error information
async fn admin_integrate_changes_route(
req: Request<hyper::body::Incoming>,
admin_api_key: &Option<String>,
Expand Down
24 changes: 22 additions & 2 deletions apps/framework-cli/src/cli/routines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,17 @@ async fn leadership_tasks(
Ok(())
}

// Starts the file watcher and the webserver
/// Starts the application in development mode.
/// This mode is optimized for development workflows and includes additional debugging features.
///
/// # Arguments
/// * `project` - Arc wrapped Project instance containing configuration
/// * `metrics` - Arc wrapped Metrics instance for monitoring
/// * `redis_client` - Arc and Mutex wrapped RedisClient for caching
/// * `settings` - Reference to application Settings
///
/// # Returns
/// * `anyhow::Result<()>` - Success or error result
pub async fn start_development_mode(
project: Arc<Project>,
metrics: Arc<Metrics>,
Expand Down Expand Up @@ -461,7 +471,17 @@ pub async fn start_development_mode(
Ok(())
}

// Starts the webserver in production mode
/// Starts the application in production mode.
/// This mode is optimized for production use with appropriate security and performance settings.
///
/// # Arguments
/// * `settings` - Reference to application Settings
/// * `project` - Arc wrapped Project instance containing configuration
/// * `metrics` - Arc wrapped Metrics instance for monitoring
/// * `redis_client` - Arc and Mutex wrapped RedisClient for caching
///
/// # Returns
/// * `anyhow::Result<()>` - Success or error result
pub async fn start_production_mode(
settings: &Settings,
project: Arc<Project>,
Expand Down
16 changes: 14 additions & 2 deletions apps/framework-cli/src/framework/core/infra_reality_checker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,27 @@ use log::debug;
use std::collections::HashMap;
use thiserror::Error;

/// Represents errors that can occur during infrastructure reality checking.
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum RealityCheckError {
/// Error occurred while checking OLAP infrastructure
#[error("Failed to check OLAP infrastructure: {0}")]
OlapCheck(#[from] OlapChangesError),

/// Error occurred while loading the infrastructure map
#[error("Failed to load infrastructure map: {0}")]
InfraMapLoad(#[from] std::io::Error),

/// Error occurred during database operations
#[error("Database error: {0}")]
DatabaseError(String),
}

/// Represents discrepancies found between actual infrastructure and documented map
/// Represents discrepancies found between actual infrastructure and documented map.
/// This struct holds information about tables that exist in reality but not in the map,
/// tables that are in the map but don't exist in reality, and tables that exist in both
/// but have structural differences.
#[derive(Debug)]
pub struct InfraDiscrepancies {
/// Tables that exist in reality but are not in the map
Expand All @@ -31,14 +40,17 @@ pub struct InfraDiscrepancies {
}

impl InfraDiscrepancies {
/// Returns true if there are no discrepancies between reality and the infrastructure map
pub fn is_empty(&self) -> bool {
self.unmapped_tables.is_empty()
&& self.missing_tables.is_empty()
&& self.mismatched_tables.is_empty()
}
}

/// The Infrastructure Reality Checker compares actual infrastructure state with the infrastructure map
/// The Infrastructure Reality Checker compares actual infrastructure state with the infrastructure map.
/// It uses an OLAP client to query the actual state of the infrastructure and compares it with
/// the documented state in the infrastructure map.
pub struct InfraRealityChecker<T: OlapOperations> {
olap_client: T,
}
Expand Down
27 changes: 21 additions & 6 deletions apps/framework-cli/src/framework/core/infrastructure_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,15 @@ pub struct InfrastructureMap {
}

impl InfrastructureMap {
/// Compare tables between two infrastructure maps and compute the differences.
/// This function identifies added, removed, and updated tables by comparing
/// the source and target table maps. Changes are collected in the provided
/// changes vector.
///
/// # Arguments
/// * `source_tables` - HashMap of source tables to compare from
/// * `target_tables` - HashMap of target tables to compare against
/// * `changes` - Mutable vector to collect the identified changes
pub fn diff_tables(
self_tables: &HashMap<String, Table>,
target_tables: &HashMap<String, Table>,
Expand Down Expand Up @@ -395,9 +404,13 @@ impl InfrastructureMap {
let mut function_processes = HashMap::new();
let mut initial_data_loads = HashMap::new();

let mut data_models_that_have_not_changed_with_new_version = Vec::new();
// Process data models that have changes in their latest version
// This ensures we create new infrastructure for updated data models first
let mut data_models_that_have_not_changed_with_new_version = vec![];

// Iterate through data models and process those that have changes
for data_model in primitive_map.data_models_iter() {
// Check if the data model has changed compared to its previous version
if primitive_map
.datamodels
.has_data_model_changed_with_previous_version(
Expand All @@ -408,6 +421,7 @@ impl InfrastructureMap {
let topic = Topic::from_data_model(data_model);
let api_endpoint = ApiEndpoint::from_data_model(data_model, &topic);

// If storage is enabled for this data model, create necessary infrastructure
if data_model.config.storage.enabled {
let table = data_model.to_table();
let topic_to_table_sync_process = TopicToTableSyncProcess::new(&topic, &table);
Expand All @@ -419,20 +433,21 @@ impl InfrastructureMap {
);
}

// If streaming engine is enabled, create topics and API endpoints
if project.features.streaming_engine {
topics.insert(topic.id(), topic);
api_endpoints.insert(api_endpoint.id(), api_endpoint);
}
} else {
// We wait to have processed all the datamodels to process the ones that don't have changes
// That way we can refer to infrastructure that was created by those older versions.
// Store unchanged data models for later processing
// This allows us to reference infrastructure created by older versions
data_models_that_have_not_changed_with_new_version.push(data_model);
}
}

// We process the data models that have not changed with their registered versions.
// For the ones that require storage, we have views that points to the oldest table that has the data
// with the same schema. We also reused the same topic that was created for the previous version.
// Process data models that haven't changed with their registered versions
// For those requiring storage, we create views pointing to the oldest table
// that has the data with the same schema. We also reuse existing topics.
for data_model in data_models_that_have_not_changed_with_new_version {
match primitive_map
.datamodels
Expand Down

0 comments on commit e1d66d5

Please sign in to comment.