{"version":"1.0","workflow_uuid":"0f113965-1adc-4435-982b-fb613fa4d157","workflow_title":"LiteLLM Proxy — Unified Gateway for 100+ LLM APIs","install_contract":{"version":"1.0","installReady":false,"title":"LiteLLM Proxy — Unified Gateway for 100+ LLM APIs","summary":"LiteLLM Proxy maps 100+ LLM providers (Anthropic, OpenAI, Bedrock, Vertex) to one OpenAI-compatible endpoint. Auth, rate limit, cost track, fallbacks.","assetType":"Workflows","pageUrl":"","sourceUrl":"https://github.com/BerriAI","intendedFor":[],"firstActions":[],"agentFirstSteps":[],"targetPaths":[],"verification":[],"startingPoints":[],"example":"","successOutcome":"","boundaries":[],"askUserIf":["the current workspace stack cannot be matched to a safe upstream template","the target path is not the project root, or an existing file should be merged instead of overwritten"]}}