{"version":"1.0","workflow_uuid":"d11eb1fe-cfa0-4da0-ac2d-b6a77abc1b8c","workflow_title":"LiteLLM — Unified Proxy for 100+ LLM APIs","install_contract":{"version":"1.0","installReady":false,"title":"LiteLLM — Unified Proxy for 100+ LLM APIs","summary":"Python SDK and proxy server to call 100+ LLM APIs in OpenAI format. Cost tracking, guardrails, load balancing, logging. Supports Bedrock, Azure, Anthropic, Vertex, and more. 42K+ stars.","assetType":"Scripts","pageUrl":"https://tokrepo.com/en/workflows/litellm-unified-proxy-100-llm-apis-d11eb1fe","sourceUrl":"http://localhost:4000","intendedFor":[],"firstActions":[],"agentFirstSteps":[],"targetPaths":[],"verification":[],"startingPoints":[],"example":"","successOutcome":"","boundaries":[],"askUserIf":["the current workspace stack cannot be matched to a safe upstream template","the target path is not the project root, or an existing file should be merged instead of overwritten"]}}