Spaces:
Runtime error
Runtime error
File size: 370 Bytes
0c8d55e |
1 2 3 4 5 6 7 8 9 10 11 12 |
from transformers.models.clip import modeling_clip
import torch
class CompiledCLIPTextModel(modeling_clip.CLIPTextModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@torch.compile
def forward(self, *args, **kwargs):
return super().forward(*args, **kwargs)
modeling_clip.CLIPTextModel = CompiledCLIPTextModel |