diff --git a/Cargo.lock b/Cargo.lock index ed781e66..806162aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -180,6 +180,7 @@ dependencies = [ "reqwest 0.13.1", "serde", "serde_json", + "serde_yaml", "similar", "tempfile", "tera", @@ -236,6 +237,7 @@ dependencies = [ "serde", "serde_json", "serde_with", + "serde_yaml", "sha2", "sqlx", "swc_atoms", @@ -5067,6 +5069,7 @@ version = "8.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bcdef0be6fe7f6fa333b1073c949729274b05f123a0ad7efcb8efd878e5c3b1" dependencies = [ + "globset", "sha2", "walkdir", ] diff --git a/Cargo.toml b/Cargo.toml index 1bd49829..1b9c65f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -70,7 +70,7 @@ hex = "0.4" rand = "0.8.5" walkdir = "2.5.0" url = "2.5.8" -rust-embed = "8" +rust-embed = { version = "8", features = ["include-exclude"] } zip = "2.2" flate2 = "1.1" sha2 = "0.10" diff --git a/README.md b/README.md index bdadc0ee..f7ed3498 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,58 @@ πŸ’‘ The main idea of `apx` is to provide convenient, fast and AI-friendly development experience. +## Agents + +`apx init --addon agent` adds a complete agent framework β€” tool-calling loop, composition patterns, OBO auth, MCP, and dev UI. + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ /_apx/agent (dev UI) POST /invocations β”‚ +β”‚ β”‚ β”‚ β”‚ +β”‚ └──────────┐ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ β–Ό β–Ό β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ LlmAgent │────▢│ FMAPI (LLM) β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ β”‚ tool calls β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β–Ό β–Ό β–Ό β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Local β”‚ β”‚ Genie β”‚ β”‚ Sub-agent β”‚ β”‚ +β”‚ β”‚ tools β”‚ β”‚ Space β”‚ β”‚ /invoke β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”¬β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ OBO auth (X-Forwarded-Access-Token) β”‚ +β”‚ forwarded through every call β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ β”‚ + /mcp/sse /.well-known/agent.json + (MCP server) (A2A discovery) +``` + +**What you get out of the box:** + +| Feature | How | +|---|---| +| OBO auth in every tool | `ws: Dependencies.UserClient` β€” token flows automatically | +| MCP server | `/mcp/sse` β€” connect Claude Desktop, Cursor, etc. | +| A2A discovery | `/.well-known/agent.json` β€” auto-populated at request time | +| Agent composition | `SequentialAgent`, `ParallelAgent`, `LoopAgent`, `RouterAgent`, `HandoffAgent` | +| Dev UI | `/_apx/agent` β€” chat, tool trace, MCP URL copy | +| Deploy | `apx deploy` β€” one command to production | +| MLflow eval | `app_predict_fn(url)` β†’ `mlflow.genai.evaluate()` | + +**Define tools as plain functions β€” type hints become the schema:** + +```python +def query_genie(question: str, space_id: str, ws: Dependencies.Workspace) -> str: + """Answer a question using a Genie Space.""" + return ws.genie.ask(space_id=space_id, question=question).answer or "" + +agent = Agent(tools=[query_genie]) +``` + ## πŸš€ Quickstart Install `apx`: diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 12a06263..6d5912d0 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -31,6 +31,7 @@ tokio.workspace = true tracing.workspace = true serde.workspace = true serde_json.workspace = true +serde_yaml.workspace = true chrono.workspace = true reqwest.workspace = true toml.workspace = true diff --git a/crates/cli/src/build.rs b/crates/cli/src/build.rs index 0aa21a81..f08502da 100644 --- a/crates/cli/src/build.rs +++ b/crates/cli/src/build.rs @@ -8,8 +8,10 @@ use crate::common::find_app_dir; use crate::run_cli_async_helper; use apx_core::api_generator::generate_openapi; use apx_core::common::{ - ensure_dir, format_elapsed_ms, run_command_streaming_with_output, run_preflight_checks, spinner, + ensure_dir, format_elapsed_ms, read_project_metadata, run_command_streaming_with_output, + run_preflight_checks, spinner, }; +use apx_core::dotenv::DotenvFile; use apx_core::external::uv::Uv; const DEFAULT_BUILD_DIR: &str = ".build"; @@ -40,39 +42,46 @@ pub async fn run(args: BuildArgs) -> i32 { async fn run_inner(args: BuildArgs) -> Result<(), String> { let app_path = find_app_dir(args.app_path)?; let build_dir = app_path.join(&args.build_path); - println!("Building project in {}", app_path.display()); + run_build(&app_path, &build_dir).await?; + println!("Build completed"); + Ok(()) +} +/// Core build logic shared with `apx deploy`. +pub async fn run_build(app_path: &Path, build_dir: &Path) -> Result<(), String> { // Run preflight checks: generate _metadata.py, __dist__, uv sync, version file, bun install if needed debug!("Running preflight checks before build"); - let _preflight = run_preflight_checks(&app_path).await?; + let _preflight = run_preflight_checks(app_path).await?; // Set up build directory if build_dir.exists() { - fs::remove_dir_all(&build_dir) + fs::remove_dir_all(build_dir) .map_err(|err| format!("Failed to remove build directory: {err}"))?; } - ensure_dir(&build_dir)?; + ensure_dir(build_dir)?; fs::write(build_dir.join(".gitignore"), "*\n") .map_err(|err| format!("Failed to write build .gitignore: {err}"))?; - generate_openapi(&app_path).await?; - - if args.skip_ui_build { - println!("Skipping UI build"); - } else { - build_ui(&app_path).await?; + generate_openapi(app_path).await?; + let meta = read_project_metadata(app_path)?; + if meta.has_ui() { + build_ui(app_path).await?; } - build_wheel(&app_path, &args.build_path).await?; - copy_app_config_files(&app_path, &build_dir)?; + // build_path is relative to app_path; find_wheel_file needs the resolved build_dir + let build_path = build_dir + .strip_prefix(app_path) + .map(|p| p.to_path_buf()) + .unwrap_or_else(|_| build_dir.to_path_buf()); + build_wheel(app_path, &build_path).await?; + copy_app_config_files(app_path, build_dir)?; - let wheel_file = find_wheel_file(&build_dir)?; + let wheel_file = find_wheel_file(build_dir)?; let requirements_path = build_dir.join("requirements.txt"); fs::write(&requirements_path, format!("{wheel_file}\n")) .map_err(|err| format!("Failed to write requirements.txt: {err}"))?; - println!("Build completed"); Ok(()) } @@ -91,6 +100,16 @@ async fn build_wheel(app_path: &Path, build_path: &Path) -> Result<(), String> { let mut cmd = uv.build_wheel_command(app_path, build_path).into_command(); cmd.env("UV_DYNAMIC_VERSIONING_BYPASS", build_version); + // Forward UV_* vars from the project .env so users can set e.g. UV_OFFLINE=1 + // or UV_NATIVE_TLS=1 once in .env rather than prefixing every command. + if let Ok(dotenv) = DotenvFile::read(&app_path.join(".env")) { + for (key, value) in dotenv.get_vars() { + if key.starts_with("UV_") { + cmd.env(&key, &value); + } + } + } + let result = run_command_streaming_with_output(cmd, &sp, "🐍 Wheel:", "Failed to build Python wheel") .await; diff --git a/crates/cli/src/deploy.rs b/crates/cli/src/deploy.rs new file mode 100644 index 00000000..8d7abec4 --- /dev/null +++ b/crates/cli/src/deploy.rs @@ -0,0 +1,372 @@ +//! `apx deploy` β€” build and deploy the app to Databricks Apps. +//! +//! Flow: +//! 1. Build wheel (skips UI for agent-only apps, respects UV_OFFLINE) +//! 2. Copy pyproject.toml into .build/ (needed for agent config at runtime) +//! 3. Remove .build/.gitignore so DABs syncs all files +//! 4. `databricks bundle deploy --auto-approve` (uploads .build/ to workspace) +//! 5. `databricks apps deploy --source-code-path ` +//! 6. Poll `databricks apps get` until RUNNING + +use clap::Args; +use indicatif::ProgressBar; +use serde::Deserialize; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::time::Instant; +use tracing::debug; + +use crate::common::find_app_dir; +use crate::run_cli_async_helper; +use apx_core::common::{format_elapsed_ms, read_project_metadata, spinner}; +use apx_core::dotenv::DotenvFile; + +/// Default build output directory, relative to app path. +const DEFAULT_BUILD_DIR: &str = ".build"; +/// How long to wait between deployment status polls (ms). +const POLL_INTERVAL_MS: u64 = 3_000; +/// How many times to poll before giving up (~3 min total). +const POLL_MAX_ATTEMPTS: u32 = 60; + +#[derive(Args, Debug, Clone)] +pub struct DeployArgs { + #[arg( + value_name = "APP_PATH", + help = "Path to the app. Defaults to current working directory" + )] + pub app_path: Option, + + #[arg( + long = "build-path", + default_value = DEFAULT_BUILD_DIR, + help = "Path to the build directory, relative to app path" + )] + pub build_path: PathBuf, + + #[arg( + long = "skip-build", + help = "Skip the build step and deploy whatever is in the build directory" + )] + pub skip_build: bool, + + #[arg( + long = "profile", + help = "Databricks CLI profile to use. Defaults to DATABRICKS_CONFIG_PROFILE from .env" + )] + pub profile: Option, +} + +pub async fn run(args: DeployArgs) -> i32 { + run_cli_async_helper(|| run_inner(args)).await +} + +async fn run_inner(args: DeployArgs) -> Result<(), String> { + let app_path = find_app_dir(args.app_path)?; + let build_dir = app_path.join(&args.build_path); + + // --- 1. Build --- + if !args.skip_build { + crate::build::run_build(&app_path, &build_dir).await?; + } else if !build_dir.exists() { + return Err(format!( + "--skip-build specified but build directory does not exist: {}", + build_dir.display() + )); + } + + // --- 2. Post-build fixups --- + // Copy pyproject.toml so the agent config is available at runtime. + let pyproject_src = app_path.join("pyproject.toml"); + if pyproject_src.exists() { + fs::copy(&pyproject_src, build_dir.join("pyproject.toml")) + .map_err(|e| format!("Failed to copy pyproject.toml to build dir: {e}"))?; + } + // Remove .gitignore β€” it contains "*" which blocks DABs sync. + let gitignore = build_dir.join(".gitignore"); + if gitignore.exists() { + fs::remove_file(&gitignore) + .map_err(|e| format!("Failed to remove build .gitignore: {e}"))?; + } + + // --- 3. Read metadata + resolve profile --- + let meta = read_project_metadata(&app_path)?; + let app_slug = &meta.app_name; + + let profile = match args.profile { + Some(p) => p, + None => { + let dotenv = DotenvFile::read(&app_path.join(".env"))?; + let vars = dotenv.get_vars(); + vars.get("DATABRICKS_CONFIG_PROFILE") + .cloned() + .ok_or_else(|| { + "No Databricks profile found. Set DATABRICKS_CONFIG_PROFILE in .env \ + or pass --profile" + .to_string() + })? + } + }; + + debug!("deploying app={app_slug} profile={profile}"); + + let sp = spinner(&format!("πŸš€ Deploying {app_slug}...")); + let deploy_start = Instant::now(); + + // --- 4. Bundle deploy (uploads .build/ to workspace) --- + sp.set_message(format!("πŸ“¦ Uploading {app_slug} to workspace...")); + run_bundle_deploy(&app_path, &profile, &sp)?; + + // --- 5. App deploy (create code deployment from workspace path) --- + sp.set_message(format!("πŸ”— Creating app deployment for {app_slug}...")); + let workspace_path = resolve_workspace_path(&app_path, &profile, &args.build_path)?; + debug!("workspace_path={workspace_path}"); + run_app_deploy(app_slug, &workspace_path, &profile, &sp)?; + + sp.set_message(format!("⏳ Waiting for {app_slug} to reach RUNNING...")); + + // --- 6. Poll until RUNNING --- + poll_until_running(app_slug, &profile, &sp).await?; + + sp.finish_and_clear(); + println!( + "βœ… Deployed {} in {}", + app_slug, + format_elapsed_ms(deploy_start) + ); + + Ok(()) +} + +// --------------------------------------------------------------------------- +// Bundle deploy +// --------------------------------------------------------------------------- + +fn run_bundle_deploy(app_path: &Path, profile: &str, sp: &ProgressBar) -> Result<(), String> { + sp.set_message("πŸ“¦ Running bundle deploy...".to_string()); + + let output = Command::new("databricks") + .args([ + "bundle", + "deploy", + "--auto-approve", + "--profile", + profile, + ]) + .current_dir(app_path) + .output() + .map_err(|e| { + format!( + "Failed to run `databricks` CLI: {e}\n\ + Make sure the Databricks CLI is installed: https://docs.databricks.com/dev-tools/cli/install.html" + ) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + return Err(format!( + "databricks bundle deploy failed:\n{stdout}{stderr}" + )); + } + + Ok(()) +} + +// --------------------------------------------------------------------------- +// Workspace path resolution +// --------------------------------------------------------------------------- + +/// Minimal subset of databricks.yml we need. +#[derive(Deserialize, Debug)] +struct BundleConfig { + bundle: BundleSection, + #[serde(default)] + targets: std::collections::HashMap, +} + +#[derive(Deserialize, Debug)] +struct BundleSection { + name: String, +} + +#[derive(Deserialize, Debug, Default)] +struct TargetSection { + #[serde(default)] + default: bool, +} + +/// Derive the workspace source-code path from `databricks.yml` + auth info. +/// +/// Format: `/Workspace/Users//.bundle///files/` +fn resolve_workspace_path( + app_path: &Path, + profile: &str, + build_path: &Path, +) -> Result { + // Read databricks.yml + let bundle_config_path = app_path.join("databricks.yml"); + if !bundle_config_path.exists() { + return Err( + "databricks.yml not found. Run `databricks bundle init` or use --skip-bundle." + .to_string(), + ); + } + let bundle_yaml = fs::read_to_string(&bundle_config_path) + .map_err(|e| format!("Failed to read databricks.yml: {e}"))?; + let bundle_config: BundleConfig = serde_yaml::from_str(&bundle_yaml) + .map_err(|e| format!("Failed to parse databricks.yml: {e}"))?; + + let bundle_name = &bundle_config.bundle.name; + + // Find the default target (or fall back to "dev") + let target = bundle_config + .targets + .iter() + .find(|(_, v)| v.default) + .map(|(k, _)| k.as_str()) + .unwrap_or("dev"); + + // Get current user from auth + let user = get_auth_user(profile)?; + + let build_path_str = build_path.to_string_lossy(); + // Strip leading ./ if present + let build_path_clean = build_path_str.trim_start_matches("./"); + + Ok(format!( + "/Workspace/Users/{user}/.bundle/{bundle_name}/{target}/files/{build_path_clean}" + )) +} + +/// Call `databricks auth describe --profile ` and extract the username. +fn get_auth_user(profile: &str) -> Result { + let output = Command::new("databricks") + .args(["auth", "describe", "--profile", profile, "--output", "json"]) + .output() + .map_err(|e| format!("Failed to call `databricks auth describe`: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("databricks auth describe failed: {stderr}")); + } + + let json: serde_json::Value = serde_json::from_slice(&output.stdout) + .map_err(|e| format!("Failed to parse auth describe output: {e}"))?; + + json.get("user") + .and_then(|u| u.get("userName")) + .and_then(|n| n.as_str()) + .map(str::to_string) + .or_else(|| { + // Fall back to top-level "username" field + json.get("username") + .and_then(|n| n.as_str()) + .map(str::to_string) + }) + .ok_or_else(|| { + format!( + "Could not find username in `databricks auth describe` output: {}", + serde_json::to_string_pretty(&json).unwrap_or_default() + ) + }) +} + +// --------------------------------------------------------------------------- +// App deploy +// --------------------------------------------------------------------------- + +fn run_app_deploy( + app_slug: &str, + workspace_path: &str, + profile: &str, + sp: &ProgressBar, +) -> Result<(), String> { + sp.set_message(format!("πŸ”— Deploying code for {app_slug}...")); + + let output = Command::new("databricks") + .args([ + "apps", + "deploy", + app_slug, + "--source-code-path", + workspace_path, + "--profile", + profile, + ]) + .output() + .map_err(|e| format!("Failed to run `databricks apps deploy`: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + return Err(format!( + "databricks apps deploy failed:\n{stdout}{stderr}" + )); + } + + Ok(()) +} + +// --------------------------------------------------------------------------- +// Poll until RUNNING +// --------------------------------------------------------------------------- + +async fn poll_until_running(app_slug: &str, profile: &str, sp: &ProgressBar) -> Result<(), String> { + for attempt in 1..=POLL_MAX_ATTEMPTS { + tokio::time::sleep(tokio::time::Duration::from_millis(POLL_INTERVAL_MS)).await; + + let output = Command::new("databricks") + .args([ + "apps", + "get", + app_slug, + "--profile", + profile, + "--output", + "json", + ]) + .output() + .map_err(|e| format!("Failed to poll deployment status: {e}"))?; + + if !output.status.success() { + debug!("poll attempt {attempt}: status command failed, retrying"); + continue; + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let state = parse_app_state(&stdout); + + debug!("poll attempt {attempt}: state={state:?}"); + + match state.as_deref() { + Some("RUNNING") => return Ok(()), + Some("ERROR" | "CRASHED") => { + return Err(format!( + "App {app_slug} entered state {}: check `databricks apps logs {app_slug} --profile {profile}`", + state.unwrap_or_default() + )); + } + _ => { + sp.set_message(format!( + "⏳ [{attempt}/{POLL_MAX_ATTEMPTS}] Waiting for {app_slug} ({})...", + state.as_deref().unwrap_or("unknown") + )); + } + } + } + + Err(format!( + "Timed out waiting for {app_slug} to reach RUNNING. \ + Check status with: databricks apps get {app_slug} --profile {profile}" + )) +} + +fn parse_app_state(json: &str) -> Option { + let value: serde_json::Value = serde_json::from_str(json).ok()?; + value + .get("app_status")? + .get("state")? + .as_str() + .map(str::to_uppercase) +} diff --git a/crates/cli/src/init.rs b/crates/cli/src/init.rs index c0fefd0c..58ec5c1f 100644 --- a/crates/cli/src/init.rs +++ b/crates/cli/src/init.rs @@ -139,6 +139,23 @@ async fn run_inner(mut args: InitArgs) -> Result<(), String> { // Eagerly resolve uv (always needed) let _uv = apx_core::external::Uv::new().await?; + // Support `apx init ` as shorthand for `apx init --addons=`. + // If the first positional is a known addon name (not a file path), treat it + // as a preset so `apx init agent` works alongside `apx init --addons=agent`. + if args.addons.is_none() && !args.no_addons { + if let Some(ref p) = args.app_path { + let s = p.to_string_lossy(); + let is_path_like = s.contains('/') || s.contains('\\') || s.contains('.'); + if !is_path_like { + let known = discover_all_addons(); + if known.iter().any(|(name, _)| name == s.as_ref()) { + args.addons = Some(vec![s.into_owned()]); + args.app_path = None; + } + } + } + } + let (workspace_root, app_path, is_member) = resolve_app_path(&mut args)?; println!("Welcome to apx πŸš€\n"); diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 14e73b3d..07f562c8 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -7,6 +7,7 @@ pub(crate) mod __generate_openapi; pub(crate) mod build; pub(crate) mod bun; +pub(crate) mod deploy; pub(crate) mod common; pub(crate) mod components; pub(crate) mod dev; @@ -39,6 +40,8 @@ enum Commands { Init(init::InitArgs), /// πŸ”¨ Build the project Build(build::BuildArgs), + /// πŸš€ Deploy the project to Databricks Apps + Deploy(deploy::DeployArgs), /// 🍞 Run a command using bun Bun(bun::BunArgs), /// 🧩 Components commands @@ -183,6 +186,7 @@ async fn run_command(args: Vec) -> i32 { match cli.command { Some(Commands::Init(init_args)) => init::run(init_args).await, Some(Commands::Build(build_args)) => build::run(build_args).await, + Some(Commands::Deploy(deploy_args)) => deploy::run(deploy_args).await, Some(Commands::Bun(bun_args)) => bun::run(bun_args).await, Some(Commands::Components(components_cmd)) => match components_cmd { ComponentsCommands::Add(args) => components::add::run(args).await, diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 8aa4d7a2..50716f24 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -27,6 +27,7 @@ axum.workspace = true reqwest.workspace = true serde.workspace = true serde_json.workspace = true +serde_yaml.workspace = true tokio.workspace = true tokio-stream.workspace = true futures-util.workspace = true diff --git a/crates/core/src/common.rs b/crates/core/src/common.rs index 9eab0d80..084d7363 100644 --- a/crates/core/src/common.rs +++ b/crates/core/src/common.rs @@ -10,6 +10,7 @@ use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::process::Command; use crate::api_generator::generate_openapi; +use crate::dotenv::DotenvFile; use crate::external::{Bun, Uv}; use crate::python_logging::{DevConfig, parse_dev_config}; @@ -66,6 +67,11 @@ pub struct ProjectMetadata { pub ui_registries: Option>, /// Dev server configuration parsed from `[tool.apx.dev]`. pub dev_config: DevConfig, + /// Optional `[project.scripts]` entry to use instead of uvicorn. + /// When set (e.g. `"start-app"`), the dev server runs `uv run ` + /// instead of `uv run uvicorn `. The script receives `--port` + /// and `--reload` so it must accept those arguments (MLflow AgentServer does). + pub start_script: Option, } impl ProjectMetadata { @@ -135,6 +141,11 @@ pub fn read_project_metadata(project_root: &Path) -> Result"], + // the dev server uses `uv run