Update with commit dad0e87c79d338f41176166b2e1e0591a87a81a1
Browse filesSee: https://github.com/huggingface/transformers/commit/dad0e87c79d338f41176166b2e1e0591a87a81a1
- frameworks.json +1 -0
- pipeline_tags.json +5 -0
frameworks.json
CHANGED
@@ -269,6 +269,7 @@
|
|
269 |
{"model_type":"siglip","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
270 |
{"model_type":"siglip2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
271 |
{"model_type":"siglip_vision_model","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
272 |
{"model_type":"smolvlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
273 |
{"model_type":"speech-encoder-decoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
274 |
{"model_type":"speech_to_text","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
|
|
|
269 |
{"model_type":"siglip","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
270 |
{"model_type":"siglip2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
271 |
{"model_type":"siglip_vision_model","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
272 |
+
{"model_type":"smollm3","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
273 |
{"model_type":"smolvlm","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
274 |
{"model_type":"speech-encoder-decoder","pytorch":true,"tensorflow":false,"flax":true,"processor":"AutoTokenizer"}
|
275 |
{"model_type":"speech_to_text","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoProcessor"}
|
pipeline_tags.json
CHANGED
@@ -924,6 +924,11 @@
|
|
924 |
{"model_class":"SiglipForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
|
925 |
{"model_class":"SiglipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
|
926 |
{"model_class":"SiglipVisionModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
|
|
|
|
|
|
|
|
|
|
|
927 |
{"model_class":"SmolVLMForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
928 |
{"model_class":"SmolVLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
929 |
{"model_class":"SmolVLMVisionTransformer","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
924 |
{"model_class":"SiglipForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
|
925 |
{"model_class":"SiglipModel","pipeline_tag":"zero-shot-image-classification","auto_class":"AutoModelForZeroShotImageClassification"}
|
926 |
{"model_class":"SiglipVisionModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
|
927 |
+
{"model_class":"SmolLM3ForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
928 |
+
{"model_class":"SmolLM3ForQuestionAnswering","pipeline_tag":"question-answering","auto_class":"AutoModelForQuestionAnswering"}
|
929 |
+
{"model_class":"SmolLM3ForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
930 |
+
{"model_class":"SmolLM3ForTokenClassification","pipeline_tag":"token-classification","auto_class":"AutoModelForTokenClassification"}
|
931 |
+
{"model_class":"SmolLM3Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
932 |
{"model_class":"SmolVLMForConditionalGeneration","pipeline_tag":"image-text-to-text","auto_class":"AutoModelForImageTextToText"}
|
933 |
{"model_class":"SmolVLMModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
934 |
{"model_class":"SmolVLMVisionTransformer","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|