{"version":"1.0","workflow_uuid":"1b0d1ab2-1edb-49e1-9853-b02807a64140","workflow_title":"DeepSeek-V3 — Open-Weight 671B MoE Model with GPT-4o Quality","install_contract":{"version":"1.0","installReady":false,"title":"DeepSeek-V3 — Open-Weight 671B MoE Model with GPT-4o Quality","summary":"DeepSeek-V3 is a 671B-param MoE model (37B active per token). Matches GPT-4o on benchmarks. MIT-licensed weights, $0.27/1M input on the hosted API.","assetType":"Knowledge","pageUrl":"","sourceUrl":"https://github.com/deepseek-ai","intendedFor":[],"firstActions":[],"agentFirstSteps":[],"targetPaths":[],"verification":[],"startingPoints":[],"example":"","successOutcome":"","boundaries":[],"askUserIf":["the current workspace stack cannot be matched to a safe upstream template","the target path is not the project root, or an existing file should be merged instead of overwritten"]}}