diff --git a/bun.lockb b/bun.lockb
index 5311eff..d9daa39 100755
Binary files a/bun.lockb and b/bun.lockb differ
diff --git a/docs/content/docs/reference/config-reference.mdx b/docs/content/docs/reference/config-reference.mdx
index e4aa01e..976805b 100644
--- a/docs/content/docs/reference/config-reference.mdx
+++ b/docs/content/docs/reference/config-reference.mdx
@@ -81,3 +81,66 @@ This allows you to run:
```bash
apx add @animate-ui/fade-in
```
+
+### `[tool.apx.dev]`
+
+Development server configuration options.
+
+- **log_config_file**: Path to an external Python logging configuration file (relative to pyproject.toml). Mutually exclusive with `[tool.apx.dev.logging]`.
+
+Example:
+
+```toml
+[tool.apx.dev]
+log_config_file = "logging_config.py"
+```
+
+### `[tool.apx.dev.logging]`
+
+Inline Python logging configuration using the standard [logging.dictConfig](https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig) format. This configuration is **merged** with the default uvicorn logging setup, allowing you to add custom loggers or override specific settings while preserving the standard uvicorn logging behavior.
+
+When you specify loggers, formatters, or handlers, they are merged with the defaults:
+
+- New entries are added
+- Existing entries with the same name are overridden
+
+Example with inline tables:
+
+```toml
+[tool.apx.dev.logging]
+version = 1
+disable_existing_loggers = false
+
+[tool.apx.dev.logging.formatters]
+default = { format = "%(levelname)s %(name)s %(message)s" }
+
+[tool.apx.dev.logging.handlers]
+console = { class = "logging.StreamHandler", formatter = "default", stream = "ext://sys.stdout" }
+
+[tool.apx.dev.logging.loggers]
+"uvicorn" = { level = "DEBUG", handlers = ["console"], propagate = false }
+"myapp" = { level = "DEBUG", handlers = ["console"], propagate = false }
+```
+
+**Default loggers provided by apx:**
+
+| Logger | Level | Description |
+| ---------------- | ----- | ------------------------- |
+| `uvicorn` | INFO | Main uvicorn logger |
+| `uvicorn.error` | INFO | Uvicorn error logger |
+| `uvicorn.access` | INFO | HTTP access logs |
+| `{app_slug}` | DEBUG | Your application's logger |
+
+**Configuration options:**
+
+- **version**: Must be `1` (required by Python's dictConfig)
+- **disable_existing_loggers**: Whether to disable existing loggers (default: `false`)
+- **formatters**: Log message formatters
+- **handlers**: Output handlers (console, file, etc.)
+- **loggers**: Logger configurations by name
+- **root**: Root logger configuration
+
+
+ You cannot use both `log_config_file` and `[tool.apx.dev.logging]` at the same
+ time.
+
diff --git a/package.json b/package.json
index 374a044..8eff92b 100644
--- a/package.json
+++ b/package.json
@@ -14,18 +14,18 @@
"@opentelemetry/exporter-logs-otlp-http": "^0.211.0",
"@opentelemetry/resources": "^2.5.0",
"@opentelemetry/sdk-logs": "^0.211.0",
- "@tailwindcss/vite": "^4.1.15",
+ "@tailwindcss/vite": "^4.1.18",
"@tanstack/react-query": "^5.90.20",
- "@tanstack/react-router": "^1.157.16",
- "@tanstack/router-plugin": "^1.133.21",
+ "@tanstack/react-router": "^1.157.18",
+ "@tanstack/router-plugin": "^1.157.18",
"@types/bun": "latest",
- "@types/node": "^24.7.2",
- "@vitejs/plugin-react": "^5.0.4",
- "axios": "^1.13.1",
- "react": "^19.2.0",
- "smol-toml": "^1.4.2",
+ "@types/node": "^24.10.9",
+ "@vitejs/plugin-react": "^5.1.3",
+ "axios": "^1.13.4",
+ "react": "^19.2.4",
+ "smol-toml": "^1.6.0",
"typescript": "^5.9.3",
- "vite": "^7.1.9"
+ "vite": "^7.3.1"
},
"peerDependencies": {
"typescript": "^5.9.3"
diff --git a/src/common.rs b/src/common.rs
index 25d794d..9bc35f9 100644
--- a/src/common.rs
+++ b/src/common.rs
@@ -9,6 +9,7 @@ use tokio::process::Command;
use crate::bun_binary_path;
use crate::generate_openapi;
+use crate::python_logging::{DevConfig, parse_dev_config};
/// Dev dependencies required by apx frontend entrypoint.ts
/// These must be installed before running any frontend command
@@ -201,6 +202,7 @@ pub struct ProjectMetadata {
pub metadata_path: PathBuf,
pub ui_root: PathBuf,
pub ui_registries: HashMap,
+ pub dev_config: DevConfig,
}
impl ProjectMetadata {
@@ -261,6 +263,9 @@ pub fn read_project_metadata(project_root: &Path) -> Result Result>>,
+ dev_config: DevConfig,
}
impl ProcessManager {
@@ -90,6 +92,7 @@ impl ProcessManager {
let dotenv_vars = Arc::new(Mutex::new(dotenv.get_vars()));
let app_slug = metadata.app_slug.clone();
let app_entrypoint = metadata.app_entrypoint.clone();
+ let dev_config = metadata.dev_config.clone();
let dev_token = Self::generate_dev_token();
let db_password = Self::generate_dev_token(); // Random password for PGlite
@@ -119,6 +122,7 @@ impl ProcessManager {
app_slug,
app_entrypoint,
dotenv_vars,
+ dev_config,
})
}
@@ -301,8 +305,10 @@ impl ProcessManager {
// 2026-01-28 14:09:02.413 | app | INFO: Uvicorn running...
// ============================================================================
- // Create uvicorn logging config for consistent log format
- let log_config = self.create_uvicorn_log_config(app_dir).await?;
+ // Resolve uvicorn logging config (inline TOML, external Python file, or default)
+ let log_config_result =
+ resolve_log_config(&self.dev_config, &self.app_slug, app_dir).await?;
+ let log_config = log_config_result.to_string_path();
// Run uvicorn via uv to ensure correct Python environment
let mut cmd = UvCommand::new("uvicorn").tokio_command();
@@ -377,72 +383,6 @@ impl ProcessManager {
Ok(())
}
- /// Create a uvicorn logging config file (JSON format, no pyyaml dependency).
- /// Always overwrites the existing config to ensure format updates are applied.
- async fn create_uvicorn_log_config(&self, app_dir: &Path) -> Result {
- let config_dir = app_dir.join(".apx");
- tokio::fs::create_dir_all(&config_dir)
- .await
- .map_err(|e| format!("Failed to create .apx directory: {e}"))?;
-
- let config_path = config_dir.join("uvicorn_logging.json");
- // APX adds: timestamp | source | channel |
- // So we only need: location | message
- //
- // IMPORTANT: Uvicorn's access logger passes values as positional args, not named fields.
- // Use %(message)s to get the pre-formatted message, not %(client_addr)s etc.
- let config_content = r#"{
- "version": 1,
- "disable_existing_loggers": false,
- "formatters": {
- "default": {
- "format": "%(module)s.%(funcName)s | %(message)s"
- },
- "access": {
- "format": "%(message)s"
- }
- },
- "handlers": {
- "default": {
- "class": "logging.StreamHandler",
- "stream": "ext://sys.stderr",
- "formatter": "default"
- },
- "access": {
- "class": "logging.StreamHandler",
- "stream": "ext://sys.stdout",
- "formatter": "access"
- }
- },
- "loggers": {
- "uvicorn": {
- "handlers": ["default"],
- "level": "INFO",
- "propagate": false
- },
- "uvicorn.error": {
- "level": "INFO",
- "propagate": true
- },
- "uvicorn.access": {
- "handlers": ["access"],
- "level": "INFO",
- "propagate": false
- }
- },
- "root": {
- "level": "INFO",
- "handlers": ["default"]
- }
-}"#;
-
- tokio::fs::write(&config_path, config_content)
- .await
- .map_err(|e| format!("Failed to write uvicorn logging config: {e}"))?;
-
- Ok(config_path.display().to_string())
- }
-
async fn spawn_pglite(&self, bun: &BunCommand) -> Result<(), String> {
let child = self
.spawn_process(
@@ -581,6 +521,7 @@ impl ProcessManager {
let dev_server_port = self.dev_server_port;
let dev_token = self.dev_token.clone();
let db_password = self.db_password.clone();
+ let dev_config = self.dev_config.clone();
tokio::spawn(async move {
let (tx, mut rx) = tokio::sync::mpsc::channel::(100);
@@ -700,6 +641,7 @@ impl ProcessManager {
&db_password,
&dotenv_vars,
&backend_child,
+ &dev_config,
)
.await
{
@@ -804,6 +746,7 @@ impl ProcessManager {
db_password: &str,
dotenv_vars: &Arc>>,
backend_child: &Arc>>,
+ dev_config: &DevConfig,
) -> Result<(), String> {
// ============================================================================
// Backend logs are captured via stdout/stderr and forwarded to flux.
@@ -811,9 +754,9 @@ impl ProcessManager {
// See spawn_uvicorn() for detailed explanation.
// ============================================================================
- // Reuse the existing log config file (created by spawn_uvicorn)
- let log_config = app_dir.join(".apx").join("uvicorn_logging.json");
- let log_config_str = log_config.display().to_string();
+ // Resolve uvicorn logging config (inline TOML, external Python file, or default)
+ let log_config_result = resolve_log_config(dev_config, app_slug, app_dir).await?;
+ let log_config_str = log_config_result.to_string_path();
// Run uvicorn via uv to ensure correct Python environment
let mut cmd = UvCommand::new("uvicorn").tokio_command();
diff --git a/src/lib.rs b/src/lib.rs
index cc78587..62ba6c2 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -29,6 +29,7 @@ mod flux;
mod interop;
mod mcp;
mod openapi;
+mod python_logging;
mod registry;
mod search;
mod sources;
diff --git a/src/openapi/mod.rs b/src/openapi/mod.rs
index 35917f5..4a88c45 100644
--- a/src/openapi/mod.rs
+++ b/src/openapi/mod.rs
@@ -2096,7 +2096,8 @@ mod tests {
"name": "apx-ts-typecheck",
"private": true,
"dependencies": {
- "@tanstack/react-query": "^5"
+ "@tanstack/react-query": "^5",
+ "typescript": "^5"
}
}
"#;
@@ -2152,7 +2153,7 @@ mod tests {
// Run tsc from the test environment directory with explicit compiler options
// Using `bun x` which is equivalent to `bunx`
let output = Command::new("bun")
- .arg("x")
+ .arg("run")
.args([
"tsc",
"--noEmit",
diff --git a/src/python_logging.rs b/src/python_logging.rs
new file mode 100644
index 0000000..a1e789a
--- /dev/null
+++ b/src/python_logging.rs
@@ -0,0 +1,792 @@
+//! Python logging configuration for uvicorn.
+//!
+//! Supports two configuration modes:
+//! 1. Inline TOML config in [tool.apx.dev.logging]
+//! 2. External Python file via log_config_file setting
+//!
+//! When neither is specified, generates a default logging configuration.
+
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+
+/// Dev configuration from [tool.apx.dev]
+#[derive(Debug, Clone, Default)]
+pub struct DevConfig {
+ /// Inline TOML logging config
+ pub logging: Option,
+ /// External Python file for logging config
+ pub log_config_file: Option,
+}
+
+/// Python logging.dictConfig format
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct LoggingConfig {
+ pub version: i32,
+ #[serde(default)]
+ pub disable_existing_loggers: bool,
+ #[serde(default)]
+ pub formatters: HashMap,
+ #[serde(default)]
+ pub handlers: HashMap,
+ #[serde(default)]
+ pub loggers: HashMap,
+ #[serde(default)]
+ pub root: Option,
+}
+
+/// Formatter configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct FormatterConfig {
+ #[serde(default)]
+ pub format: Option,
+ #[serde(default)]
+ pub datefmt: Option,
+ #[serde(default, rename = "class")]
+ pub class_name: Option,
+}
+
+/// Handler configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct HandlerConfig {
+ #[serde(rename = "class")]
+ pub class_name: String,
+ #[serde(default)]
+ pub level: Option,
+ #[serde(default)]
+ pub formatter: Option,
+ #[serde(default)]
+ pub stream: Option,
+ #[serde(default)]
+ pub filename: Option,
+ #[serde(default)]
+ pub filters: Option>,
+}
+
+/// Logger configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct LoggerConfig {
+ #[serde(default)]
+ pub handlers: Option>,
+ #[serde(default)]
+ pub level: Option,
+ #[serde(default)]
+ pub propagate: Option,
+}
+
+/// Root logger configuration
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct RootLoggerConfig {
+ #[serde(default)]
+ pub level: Option,
+ #[serde(default)]
+ pub handlers: Option>,
+}
+
+/// Result of resolving log configuration
+#[derive(Debug, Clone)]
+pub enum LogConfigResult {
+ /// JSON config file path (.apx/uvicorn_logging.json)
+ JsonConfig(PathBuf),
+ /// External Python file path
+ PythonFile(PathBuf),
+}
+
+impl LogConfigResult {
+ /// Get the path as a string for passing to uvicorn
+ pub fn to_string_path(&self) -> String {
+ match self {
+ LogConfigResult::JsonConfig(p) | LogConfigResult::PythonFile(p) => {
+ p.display().to_string()
+ }
+ }
+ }
+}
+
+/// Parse [tool.apx.dev] section from pyproject.toml
+pub fn parse_dev_config(
+ pyproject_value: &toml::Value,
+ project_root: &Path,
+) -> Result {
+ let dev_section = pyproject_value
+ .get("tool")
+ .and_then(|tool| tool.get("apx"))
+ .and_then(|apx| apx.get("dev"));
+
+ let Some(dev) = dev_section else {
+ return Ok(DevConfig::default());
+ };
+
+ let logging = dev.get("logging").map(parse_logging_config).transpose()?;
+
+ let log_config_file = dev
+ .get("log_config_file")
+ .and_then(|v| v.as_str())
+ .map(|s| project_root.join(s));
+
+ // Validate mutual exclusivity
+ if logging.is_some() && log_config_file.is_some() {
+ return Err(
+ "Cannot specify both [tool.apx.dev.logging] and log_config_file in pyproject.toml"
+ .to_string(),
+ );
+ }
+
+ // Validate external file exists
+ if let Some(ref path) = log_config_file
+ && !path.exists()
+ {
+ return Err(format!("log_config_file not found: {}", path.display()));
+ }
+
+ Ok(DevConfig {
+ logging,
+ log_config_file,
+ })
+}
+
+/// Parse inline logging configuration from TOML value
+fn parse_logging_config(value: &toml::Value) -> Result {
+ let version = value
+ .get("version")
+ .and_then(|v| v.as_integer())
+ .unwrap_or(1) as i32;
+
+ let disable_existing_loggers = value
+ .get("disable_existing_loggers")
+ .and_then(|v| v.as_bool())
+ .unwrap_or(false);
+
+ let formatters = parse_formatters(value.get("formatters"))?;
+ let handlers = parse_handlers(value.get("handlers"))?;
+ let loggers = parse_loggers(value.get("loggers"))?;
+ let root = parse_root_logger(value.get("root"))?;
+
+ Ok(LoggingConfig {
+ version,
+ disable_existing_loggers,
+ formatters,
+ handlers,
+ loggers,
+ root,
+ })
+}
+
+fn parse_formatters(
+ value: Option<&toml::Value>,
+) -> Result, String> {
+ let Some(v) = value else {
+ return Ok(HashMap::new());
+ };
+
+ let table = v.as_table().ok_or("formatters must be a table")?;
+
+ let mut result = HashMap::new();
+ for (name, formatter_value) in table {
+ let formatter_table = formatter_value
+ .as_table()
+ .ok_or_else(|| format!("formatter '{}' must be a table", name))?;
+
+ let format = formatter_table
+ .get("format")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let datefmt = formatter_table
+ .get("datefmt")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let class_name = formatter_table
+ .get("class")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ result.insert(
+ name.clone(),
+ FormatterConfig {
+ format,
+ datefmt,
+ class_name,
+ },
+ );
+ }
+
+ Ok(result)
+}
+
+fn parse_handlers(value: Option<&toml::Value>) -> Result, String> {
+ let Some(v) = value else {
+ return Ok(HashMap::new());
+ };
+
+ let table = v.as_table().ok_or("handlers must be a table")?;
+
+ let mut result = HashMap::new();
+ for (name, handler_value) in table {
+ let handler_table = handler_value
+ .as_table()
+ .ok_or_else(|| format!("handler '{}' must be a table", name))?;
+
+ let class_name = handler_table
+ .get("class")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string())
+ .ok_or_else(|| format!("handler '{}' must have a 'class' field", name))?;
+
+ let level = handler_table
+ .get("level")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let formatter = handler_table
+ .get("formatter")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let stream = handler_table
+ .get("stream")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let filename = handler_table
+ .get("filename")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let filters = handler_table.get("filters").and_then(|v| {
+ v.as_array().map(|arr| {
+ arr.iter()
+ .filter_map(|item| item.as_str().map(|s| s.to_string()))
+ .collect()
+ })
+ });
+
+ result.insert(
+ name.clone(),
+ HandlerConfig {
+ class_name,
+ level,
+ formatter,
+ stream,
+ filename,
+ filters,
+ },
+ );
+ }
+
+ Ok(result)
+}
+
+fn parse_loggers(value: Option<&toml::Value>) -> Result, String> {
+ let Some(v) = value else {
+ return Ok(HashMap::new());
+ };
+
+ let table = v.as_table().ok_or("loggers must be a table")?;
+
+ let mut result = HashMap::new();
+ for (name, logger_value) in table {
+ let logger_table = logger_value
+ .as_table()
+ .ok_or_else(|| format!("logger '{}' must be a table", name))?;
+
+ let handlers = logger_table.get("handlers").and_then(|v| {
+ v.as_array().map(|arr| {
+ arr.iter()
+ .filter_map(|item| item.as_str().map(|s| s.to_string()))
+ .collect()
+ })
+ });
+
+ let level = logger_table
+ .get("level")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
+
+ let propagate = logger_table.get("propagate").and_then(|v| v.as_bool());
+
+ result.insert(
+ name.clone(),
+ LoggerConfig {
+ handlers,
+ level,
+ propagate,
+ },
+ );
+ }
+
+ Ok(result)
+}
+
+fn parse_root_logger(value: Option<&toml::Value>) -> Result