{"version":"1.0","workflow_uuid":"f07fee9a-45df-11f1-9bc6-00163e2b0d79","workflow_title":"GPT-NeoX — Open-Source Large Language Model Training Library","install_contract":{"version":"1.0","installReady":false,"title":"GPT-NeoX — Open-Source Large Language Model Training Library","summary":"A GPU-optimized library by EleutherAI for training large-scale autoregressive language models. GPT-NeoX powered the training of GPT-NeoX-20B and Pythia, providing the open-source community with tools for billion-parameter model training.","assetType":"Configs","pageUrl":"https://tokrepo.com/en/workflows/asset-f07fee9a","sourceUrl":"https://github.com/EleutherAI/gpt-neox.git","intendedFor":[],"firstActions":[],"agentFirstSteps":[],"targetPaths":[],"verification":[],"startingPoints":[],"example":"","successOutcome":"","boundaries":[],"askUserIf":["the current workspace stack cannot be matched to a safe upstream template","the target path is not the project root, or an existing file should be merged instead of overwritten"]}}