mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2025-01-12 03:31:46 +00:00
convert : add --print-supported-models option (#11172)
* convert : add --print-supported-models option This commit adds a new option to the convert_hf_to_gguf.py script to print the supported models. The motivation for this is that it can be useful to know which models are supported by the script without having to look at the code. Example usage: ```console $ ./convert_hf_to_gguf.py --print-supported-models Supported models: - GPTNeoXForCausalLM - BloomForCausalLM - BloomModel - MPTForCausalLM - OrionForCausalLM - BaichuanForCausalLM - BaiChuanForCausalLM - XverseForCausalLM - FalconForCausalLM - RWForCausalLM - GPTBigCodeForCausalLM - GPTRefactForCausalLM - StableLmForCausalLM - StableLMEpochForCausalLM - LlavaStableLMEpochForCausalLM - LLaMAForCausalLM - LlamaForCausalLM - MistralForCausalLM - MixtralForCausalLM - DeciLMForCausalLM - BitnetForCausalLM - GrokForCausalLM - DbrxForCausalLM - MiniCPMForCausalLM - MiniCPM3ForCausalLM - QWenLMHeadModel - Qwen2ForCausalLM - Qwen2VLForConditionalGeneration - WavTokenizerDec - Qwen2MoeForCausalLM - GPT2LMHeadModel - PhiForCausalLM - Phi3ForCausalLM - PhiMoEForCausalLM - PlamoForCausalLM - CodeShellForCausalLM - InternLM2ForCausalLM - BertModel - BertForMaskedLM - CamembertModel - RobertaModel - NomicBertModel - XLMRobertaModel - XLMRobertaForSequenceClassification - GemmaForCausalLM - Gemma2ForCausalLM - Starcoder2ForCausalLM - Rwkv6ForCausalLM - RWKV6Qwen2ForCausalLM - MambaForCausalLM - MambaLMHeadModel - FalconMambaForCausalLM - CohereForCausalLM - Cohere2ForCausalLM - OLMoForCausalLM - OlmoForCausalLM - Olmo2ForCausalLM - OlmoeForCausalLM - JinaBertModel - JinaBertForMaskedLM - OpenELMForCausalLM - ArcticForCausalLM - DeepseekForCausalLM - DeepseekV3ForCausalLM - DeepseekV2ForCausalLM - UMT5ForConditionalGeneration - MT5ForConditionalGeneration - T5ForConditionalGeneration - T5WithLMHeadModel - T5EncoderModel - JAISLMHeadModel - ChatGLMModel - ChatGLMForConditionalGeneration - NemotronForCausalLM - ExaoneForCausalLM - GraniteForCausalLM - GraniteMoeForCausalLM - ChameleonForCausalLM - ChameleonForConditionalGeneration ``` * squash! convert : add --print-supported-models option Fix flake8 error.
This commit is contained in:
parent
c3f9d25706
commit
ff3fcabc72
@ -478,6 +478,11 @@ class Model:
|
||||
return modelcls
|
||||
return func
|
||||
|
||||
@classmethod
|
||||
def print_registered_models(cls):
|
||||
for name in cls._model_classes.keys():
|
||||
logger.error(f"- {name}")
|
||||
|
||||
@classmethod
|
||||
def from_model_architecture(cls, arch: str) -> type[Model]:
|
||||
try:
|
||||
@ -4929,6 +4934,7 @@ def parse_args() -> argparse.Namespace:
|
||||
parser.add_argument(
|
||||
"model", type=Path,
|
||||
help="directory containing model file",
|
||||
nargs="?",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--use-temp-file", action="store_true",
|
||||
@ -4966,8 +4972,15 @@ def parse_args() -> argparse.Namespace:
|
||||
"--metadata", type=Path,
|
||||
help="Specify the path for an authorship metadata override file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--print-supported-models", action="store_true",
|
||||
help="Print the supported models"
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
args = parser.parse_args()
|
||||
if not args.print_supported_models and args.model is None:
|
||||
parser.error("the following arguments are required: model")
|
||||
return args
|
||||
|
||||
|
||||
def split_str_to_n_bytes(split_str: str) -> int:
|
||||
@ -4991,6 +5004,11 @@ def split_str_to_n_bytes(split_str: str) -> int:
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
|
||||
if args.print_supported_models:
|
||||
logger.error("Supported models:")
|
||||
Model.print_registered_models()
|
||||
sys.exit(0)
|
||||
|
||||
if args.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
else:
|
||||
|
Loading…
Reference in New Issue
Block a user