text
stringlengths
31
243k
type
stringclasses
1 value
start
int64
36
275k
end
int64
286
280k
depth
int64
0
1
filepath
stringlengths
85
188
parent_class
stringclasses
3 values
class_index
int64
0
10.8k
class RealmTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
3,088
3,257
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,500
class RetriBertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
3,260
3,433
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,501
class DistilBertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
3,436
3,610
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,502
class DPRContextEncoderTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
3,613
3,794
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,503
class DPRQuestionEncoderTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
3,797
3,979
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,504
class DPRReaderTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
3,982
4,155
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,505
class ElectraTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
4,158
4,329
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,506
class FNetTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
4,332
4,500
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,507
class FunnelTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
4,503
4,673
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,508
class GemmaTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
4,676
4,845
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,509
class GPT2TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
4,848
5,016
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,510
class GPTNeoXTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
5,019
5,190
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,511
class GPTNeoXJapaneseTokenizer(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
5,193
5,368
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,512
class HerbertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
5,371
5,542
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,513
class LayoutLMTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
5,545
5,717
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,514
class LayoutLMv2TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
5,720
5,894
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,515
class LayoutLMv3TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
5,897
6,071
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,516
class LayoutXLMTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
6,074
6,247
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,517
class LEDTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
6,250
6,417
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,518
class LlamaTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
6,420
6,589
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,519
class LongformerTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
6,592
6,766
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,520
class LxmertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
6,769
6,939
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,521
class MarkupLMTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
6,942
7,114
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,522
class MBartTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
7,117
7,286
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,523
class MBart50TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
7,289
7,460
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,524
class MobileBertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
7,463
7,637
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,525
class MPNetTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
7,640
7,809
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,526
class MT5TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
7,812
7,979
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,527
class MvpTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
7,982
8,149
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,528
class NllbTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
8,152
8,320
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,529
class NougatTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
8,323
8,493
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,530
class OpenAIGPTTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
8,496
8,669
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,531
class PegasusTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
8,672
8,843
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,532
class Qwen2TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
8,846
9,015
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,533
class ReformerTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
9,018
9,190
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,534
class RemBertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
9,193
9,364
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,535
class RobertaTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
9,367
9,538
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,536
class RoFormerTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
9,541
9,713
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,537
class SeamlessM4TTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
9,716
9,891
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,538
class SplinterTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
9,894
10,066
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,539
class SqueezeBertTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
10,069
10,244
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,540
class T5TokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
10,247
10,413
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,541
class UdopTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
10,416
10,584
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,542
class WhisperTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
10,587
10,758
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,543
class XGLMTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
10,761
10,929
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,544
class XLMRobertaTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
10,932
11,106
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,545
class XLNetTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
11,109
11,278
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,546
class PreTrainedTokenizerFast(metaclass=DummyObject): _backends = ["tokenizers"] def __init__(self, *args, **kwargs): requires_backends(self, ["tokenizers"])
class_definition
11,281
11,455
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py
null
2,547
class AlbertTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
129
301
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,548
class BarthezTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
304
477
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,549
class BartphoTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
480
653
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,550
class BertGenerationTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
656
836
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,551
class BigBirdTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
839
1,012
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,552
class CamembertTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
1,015
1,190
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,553
class CodeLlamaTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
1,193
1,368
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,554
class CpmTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
1,371
1,540
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,555
class DebertaV2Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
1,543
1,718
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,556
class ErnieMTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
1,721
1,893
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,557
class XLMProphetNetTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
1,896
2,075
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,558
class FNetTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
2,078
2,248
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,559
class GemmaTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
2,251
2,422
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,560
class GPTSw3Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
2,425
2,597
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,561
class LayoutXLMTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
2,600
2,775
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,562
class LlamaTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
2,778
2,949
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,563
class M2M100Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
2,952
3,124
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,564
class MarianTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
3,127
3,299
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,565
class MBartTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
3,302
3,473
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,566
class MBart50Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
3,476
3,649
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,567
class MLukeTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
3,652
3,823
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,568
class MT5Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
3,826
3,995
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,569
class NllbTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
3,998
4,168
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,570
class PegasusTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
4,171
4,344
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,571
class PLBartTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
4,347
4,519
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,572
class ReformerTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
4,522
4,696
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,573
class RemBertTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
4,699
4,872
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,574
class SeamlessM4TTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
4,875
5,052
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,575
class SiglipTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
5,055
5,227
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,576
class Speech2TextTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
5,230
5,407
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,577
class SpeechT5Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
5,410
5,584
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,578
class T5Tokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
5,587
5,755
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,579
class UdopTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
5,758
5,928
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,580
class XGLMTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
5,931
6,101
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,581
class XLMRobertaTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
6,104
6,280
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,582
class XLNetTokenizer(metaclass=DummyObject): _backends = ["sentencepiece"] def __init__(self, *args, **kwargs): requires_backends(self, ["sentencepiece"])
class_definition
6,283
6,454
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py
null
2,583
class TFBertTokenizer(metaclass=DummyObject): _backends = ["tensorflow_text"] def __init__(self, *args, **kwargs): requires_backends(self, ["tensorflow_text"])
class_definition
129
305
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tensorflow_text_objects.py
null
2,584
class Pop2PianoFeatureExtractor(metaclass=DummyObject): _backends = ["music"] def __init__(self, *args, **kwargs): requires_backends(self, ["music"])
class_definition
129
295
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_music_objects.py
null
2,585
class Pop2PianoTokenizer(metaclass=DummyObject): _backends = ["music"] def __init__(self, *args, **kwargs): requires_backends(self, ["music"])
class_definition
298
457
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_music_objects.py
null
2,586
class HFProxy(Proxy): """ Proxy that uses metadata to handle data-dependent control-flow. """ def install_metadata(self, metadata): self._metadata = metadata @property def shape(self): return self.tracer.create_proxy("call_method", "size", (self,), {}) @property def device(self): # Hack so we can track when devices are used. During meta-tensor propagation, # replace these values with a constant 'meta' return MetaDeviceAttribute(self, "device") def __len__(self): if hasattr(self, "_metadata") and self._metadata is not None: return len(self._metadata) return super().__len__() def __bool__(self): if hasattr(self, "_metadata") and self._metadata is not None: return self._metadata return super().__bool__() def __getattr__(self, k): if k == "_metadata": return self.__getattribute__(k) # note: not added to the graph yet, if this is a method call # we peephole optimize to the method invocation return HFAttribute(self, k) def __setitem__(self, indices, values): return self.tracer.create_proxy("call_function", operator.setitem, (self, indices, values), {}) def __contains__(self, key): if hasattr(self, "_metadata") and self._metadata is not None: return key in self._metadata return super().__contains__(key)
class_definition
19,690
21,141
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/fx.py
null
2,587
class HFAttribute(HFProxy): def __init__(self, root, attr: str): self.root = root self.attr = attr self.tracer = root.tracer self._node = None if hasattr(self.root, "_metadata"): self.install_metadata(getattr(self.root._metadata, attr)) @property def node(self): # the node for attributes is added lazily, since most will just be method calls # which do not rely on the getitem call if self._node is None: self._node = self.tracer.create_proxy("call_function", builtins.getattr, (self.root, self.attr), {}).node return self._node def __call__(self, *args, **kwargs): return self.tracer.create_proxy("call_method", self.attr, (self.root,) + args, kwargs)
class_definition
21,144
21,920
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/fx.py
null
2,588
class MetaDeviceAttribute(HFAttribute): pass
class_definition
21,923
21,971
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/fx.py
null
2,589
class HFCacheProxy(HFProxy): """ Proxy that represents an instance of `transformers.cache_utils.Cache`. """ def install_orig_cache_cls(self, orig_cache_cls: Type[Cache]): self._orig_cache_cls = orig_cache_cls @property def __class__(self): if not hasattr(self, "_orig_cache_cls"): raise RuntimeError("The original Cache class must be installed to the HFCacheProxy.") return self.tracer._CLASSES_TO_PATCH[self._orig_cache_cls]
class_definition
21,974
22,461
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/fx.py
null
2,590
class HFProxyableClassMeta(type): """ Metaclass that creates a class with its main methods wrapped to be proxyable. """ def __new__( cls, name: str, bases: Tuple[Type, ...], attrs: Dict[str, Any], proxy_factory_fn: Optional[Callable[[Node], Proxy]] = None, ): cls = super().__new__(cls, name, bases, attrs) for attr_name in dir(cls): attr = getattr(cls, attr_name, None) if attr is None: continue if attr_name == "__init__": op_type = "call_function" elif attr_name.startswith("__"): op_type = None elif inspect.ismethod(attr): op_type = "call_function" elif inspect.isfunction(attr): op_type = "call_method" else: op_type = None if op_type is not None: setattr(cls, attr_name, create_wrapper(attr, op_type, proxy_factory_fn=proxy_factory_fn)) return cls
class_definition
23,668
24,715
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/fx.py
null
2,591
class HFTracer(Tracer): """ Tracer that is able to symbolically trace models from the library. To do that, it uses the HFProxy instead of the regular PyTorch torch.fx.Proxy. """ # Feature flag for proxying accesses to buffer values proxy_buffer_attributes: bool = True allow_insert_stateless_mods: bool = True _TORCH_METHODS_TO_PATCH = [ "arange", "zeros", "ones", "full", "full_like", "eye", "empty", "tensor", "clamp", "finfo", "tril", ] _CLASSES_TO_PATCH = { Cache: ProxyableCache, DynamicCache: ProxyableDynamicCache, SinkCache: ProxyableSinkCache, StaticCache: ProxyableStaticCache, } supported_archs = (PreTrainedModel,) if not is_peft_available() else (PreTrainedModel, PeftModel) def __init__(self, autowrap_modules=(math,), autowrap_functions=()): super().__init__(autowrap_modules=autowrap_modules, autowrap_functions=autowrap_functions) if not is_torch_fx_available(): raise ImportError( f"Found an incompatible version of torch. Found version {get_torch_version()}, but only version " f"{TORCH_FX_REQUIRED_VERSION} is supported." ) def _generate_dummy_input( self, model: "PreTrainedModel", input_name: str, shape: List[int], input_names: List[str] ) -> Dict[str, torch.Tensor]: """Generates dummy input for model inference recording.""" # Retrieving the model class, either from the "class_for_deserialization" attribute if the model was restored # from pickle, or from the "__class__" attribute in the general case. model_class_name = getattr(model, "class_for_deserialization", model.__class__).__name__ device = model.device inputs_dict = {} # when tracing a model with KV cache, we simply need to unsure that the KV cache length is larger than one to # rightfully pass certain controlflows (Example: https://github.com/huggingface/transformers/blob/5c8d941d66734811d2ef6f57f15b44f7fb7a98c4/src/transformers/modeling_attn_mask_utils.py#L162). # After tracing, the model can then still be used with arbitrary lengths different than the one used during tracing. kv_cache_length = 5 if input_name in ["labels", "start_positions", "end_positions"]: batch_size = shape[0] if model_class_name in [ *get_values(MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING_NAMES), *get_values(MODEL_FOR_MULTIPLE_CHOICE_MAPPING_NAMES), *get_values(MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING_NAMES), *get_values(MODEL_FOR_BACKBONE_MAPPING_NAMES), *get_values(MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING_NAMES), ]: inputs_dict["labels"] = torch.zeros(batch_size, dtype=torch.long, device=device) elif model_class_name in [ *get_values(MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES), *get_values(MODEL_FOR_DOCUMENT_QUESTION_ANSWERING_MAPPING_NAMES), "XLNetForQuestionAnswering", ]: inputs_dict["start_positions"] = torch.zeros(batch_size, dtype=torch.long, device=device) inputs_dict["end_positions"] = torch.zeros(batch_size, dtype=torch.long, device=device) elif model_class_name in get_values(MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING_NAMES): if not hasattr(model.config, "problem_type") or model.config.problem_type is None: raise ValueError( "Could not retrieve the problem type for the sequence classification task, please set " 'model.config.problem_type to one of the following values: "regression", ' '"single_label_classification", or "multi_label_classification".' ) if model.config.problem_type == "regression": labels_shape = (batch_size, model.config.num_labels) labels_dtype = torch.float32 elif model.config.problem_type == "single_label_classification": labels_shape = (batch_size,) labels_dtype = torch.long elif model.config.problem_type == "multi_label_classification": labels_shape = (batch_size, model.config.num_labels) labels_dtype = torch.float32 else: raise ValueError( 'Expected model.config.problem_type to be either: "regression", "single_label_classification"' f', or "multi_label_classification", but "{model.config.problem_type}" was provided.' ) inputs_dict["labels"] = torch.zeros(*labels_shape, dtype=labels_dtype, device=device) elif model_class_name in [ *get_values(MODEL_FOR_PRETRAINING_MAPPING_NAMES), *get_values(MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING_NAMES), *get_values(MODEL_FOR_CAUSAL_LM_MAPPING_NAMES), *get_values(MODEL_FOR_MASKED_LM_MAPPING_NAMES), *get_values(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING_NAMES), *get_values(MODEL_FOR_SEMANTIC_SEGMENTATION_MAPPING_NAMES), "GPT2DoubleHeadsModel", "PeftModelForCausalLM", "PeftModelForSeq2SeqLM", ]: inputs_dict["labels"] = torch.zeros(shape, dtype=torch.long, device=device) elif model_class_name in [*get_values(MODEL_FOR_CTC_MAPPING_NAMES)]: inputs_dict["labels"] = torch.zeros(shape, dtype=torch.float32, device=device) else: raise NotImplementedError( f"Generating the dummy input named {input_name} for {model_class_name} is not supported yet." ) elif "pixel_values" in input_name: batch_size = shape[0] image_size = getattr(model.config, "image_size", None) if image_size is None: if hasattr(model.config, "vision_config"): image_size = model.config.vision_config.image_size elif hasattr(model.config, "encoder"): image_size = model.config.encoder.image_size else: image_size = (_generate_random_int(), _generate_random_int()) # If no num_channels is in the config, use some arbitrary value. num_channels = getattr(model.config, "num_channels", 3) if not isinstance(image_size, collections.abc.Iterable): image_size = (image_size, image_size) height, width = image_size inputs_dict[input_name] = torch.zeros( batch_size, num_channels, height, width, dtype=torch.float32, device=device ) elif "bbox" in input_name: inputs_dict[input_name] = torch.zeros(*shape, 4, dtype=torch.float, device=device) elif "input_features" in input_name: inputs_dict[input_name] = torch.zeros( *shape, model.config.input_feat_per_channel, dtype=torch.float, device=device ) elif "inputs_embeds" in input_name: batch_size = shape[0] if ( getattr(model.config, "embedding_size", None) is not None and model.config.model_type != "megatron-bert" ): embedding_size = model.config.embedding_size else: embedding_size = model.config.hidden_size if len(shape) == 3: # (batch_size, num_choices, sequence_length, embedding_size) embedding_shape = (batch_size, shape[1], shape[2], embedding_size) else: # (batch_size, sequence_length, embedding_size) embedding_shape = (batch_size, shape[1], embedding_size) inputs_dict[input_name] = torch.zeros(embedding_shape, dtype=torch.float, device=device) elif "visual_feats" in input_name: inputs_dict[input_name] = torch.zeros( shape + [ model.config.visual_feat_dim, ], dtype=torch.float, device=device, ) elif "visual_pos" in input_name: inputs_dict[input_name] = torch.zeros( shape + [ model.config.visual_pos_dim, ], dtype=torch.float, device=device, ) elif "inputs" in input_name: inputs_dict[input_name] = torch.zeros(*shape, dtype=torch.float, device=device) elif "input_values" in input_name: batch_size, _ = shape # Generating big sequence length for audio inputs. seq_length = _generate_random_int(low=10000, high=20000) inputs_dict[input_name] = torch.zeros(batch_size, seq_length, dtype=torch.float, device=device) elif "mask" in input_name: if "past_key_values" in input_names: mask_shape = [shape[0], shape[1] + kv_cache_length] else: mask_shape = shape inputs_dict[input_name] = torch.zeros(mask_shape, dtype=torch.long, device=device) elif "ids" in input_name: inputs_dict[input_name] = torch.zeros(shape, dtype=torch.long, device=device) elif "past_key_values" in input_name: if model.config.model_type not in _FX_SUPPORTED_MODELS_WITH_KV_CACHE: raise NotImplementedError( f"Symbolic trace with past_key_values input is not supported yet for the model {model.config.model_type}. Please open an issue or a PR in Transformers repository if you would like to see the support added." ) num_heads = model.config.num_attention_heads head_dim = model.config.hidden_size // model.config.num_attention_heads cache_shape = (shape[0], num_heads, kv_cache_length, head_dim) pkv = tuple( ( torch.rand(cache_shape, dtype=torch.float, device=device), torch.rand(cache_shape, dtype=torch.float, device=device), ) for i in range(model.config.num_hidden_layers) ) inputs_dict[input_name] = pkv else: shape_with_hidden_size = shape + [model.config.hidden_size] inputs_dict[input_name] = torch.zeros(shape_with_hidden_size, dtype=torch.float, device=device) return inputs_dict def create_proxy(self, kind, target, args, kwargs, name=None, type_expr=None, proxy_factory_fn=None): rv = super().create_proxy(kind, target, args, kwargs, name, type_expr, proxy_factory_fn) if kind == "placeholder" and target in self.meta_args: rv.install_metadata(self.meta_args[target]) return rv if target in self.orig_fns: # NOTE: tensor constructors in PyTorch define the `device` argument as # *kwargs-only*. That is why this works. If you add methods to # _TORCH_METHODS_TO_PATCH that do not define `device` as kwarg-only, # this will break and you will likely see issues where we cannot infer # the size of the output. if "device" in kwargs: kwargs["device"] = "meta" try: args_metas = torch.fx.node.map_aggregate(args, _proxies_to_metas) kwargs_metas = torch.fx.node.map_aggregate(kwargs, _proxies_to_metas) should_install_metadata = True self._disable_module_getattr = True self._disable_call_module = True if kind == "call_function": meta_target = _MANUAL_META_OVERRIDES.get(target, target) meta_out = meta_target(*args_metas, **kwargs_metas) if isinstance(meta_out, torch.Tensor): meta_out = meta_out.to(device="meta") elif kind == "call_method": method = getattr(args_metas[0].__class__, target) meta_target = _MANUAL_META_OVERRIDES.get(method, method) meta_out = meta_target(*args_metas, **kwargs_metas) elif kind == "call_module": if not hasattr(self, "orig_forward"): raise AttributeError(f"{self} does not have an attribute called orig_forward") mod = self.root.get_submodule(target) mod_type = type(mod) if mod_type in _MANUAL_META_OVERRIDES: meta_out = _MANUAL_META_OVERRIDES[mod_type](mod, *args_metas, **kwargs_metas) else: meta_out = self.orig_forward(*args_metas, **kwargs_metas) elif kind == "get_attr": attr_itr = self.root atoms = target.split(".") for atom in atoms: attr_itr = getattr(attr_itr, atom) if isinstance(attr_itr, torch.Tensor): meta_out = attr_itr.to(device="meta") else: meta_out = attr_itr else: should_install_metadata = False if should_install_metadata: if not isinstance(rv, Proxy): raise ValueError("Don't support composite output yet") rv.install_metadata(meta_out) except Exception as e: if _IS_IN_DEBUG_MODE: warnings.warn(f"Could not compute metadata for {kind} target {target}: {e}") self._disable_module_getattr = False self._disable_call_module = False return rv # Replaced by .getattr from PyTorch 1.13 def _module_getattr(self, attr, attr_val, parameter_proxy_cache): if getattr(self, "_disable_module_getattr", False): return attr_val else: def maybe_get_proxy_for_attr(attr_val, collection_to_search, parameter_proxy_cache): for n, p in collection_to_search: if attr_val is p: if n not in parameter_proxy_cache: kwargs = {} if "proxy_factory_fn" in inspect.signature(self.create_proxy).parameters: kwargs["proxy_factory_fn"] = ( None if not self.param_shapes_constant else lambda node: ParameterProxy(self, node, n, attr_val) ) val_proxy = self.create_proxy("get_attr", n, (), {}, **kwargs) # type: ignore[arg-type] parameter_proxy_cache[n] = val_proxy return parameter_proxy_cache[n] return None if isinstance(attr_val, torch.nn.Parameter): maybe_parameter_proxy = maybe_get_proxy_for_attr( attr_val, self.root.named_parameters(), parameter_proxy_cache ) if maybe_parameter_proxy is not None: return maybe_parameter_proxy if self.proxy_buffer_attributes and isinstance(attr_val, torch.Tensor): maybe_buffer_proxy = maybe_get_proxy_for_attr( attr_val, self.root.named_buffers(), parameter_proxy_cache ) if maybe_buffer_proxy is not None: return maybe_buffer_proxy return attr_val # Needed for PyTorch 1.13+ def getattr(self, attr: str, attr_val: Any, parameter_proxy_cache: Dict[str, Any]): return self._module_getattr(attr, attr_val, parameter_proxy_cache) def call_module(self, m, forward, args, kwargs): if getattr(self, "_disable_call_module", False): return forward(*args, **kwargs) self.orig_forward = forward return super().call_module(m, forward, args, kwargs) def proxy(self, node): return HFProxy(node, self) @contextlib.contextmanager def patch_for_tracing(self, root: Union[torch.nn.Module, Callable[..., Any]]): # Patching torch functions self.patched_torch_methods = { target: gen_constructor_wrapper(getattr(torch, target)) for target in self._TORCH_METHODS_TO_PATCH } self.orig_fns = set() for name, (wrapper, orig) in self.patched_torch_methods.items(): setattr(torch, name, wrapper) self.orig_fns.add(orig) # Patching classes patched = [] module_of_model = inspect.getmodule(root) for name, mod in sys.modules.items(): if module_of_model is not None and mod is not module_of_model: continue if not name.startswith("transformers"): continue for orig_cls, patched_cls in self._CLASSES_TO_PATCH.items(): for attr_name, attr in mod.__dict__.items(): if attr is orig_cls: patched.append((mod, attr_name, orig_cls)) setattr(mod, attr_name, patched_cls) yield # Restoring patched functions and classes. for name, (_, orig) in self.patched_torch_methods.items(): setattr(torch, name, orig) self.patched_torch_methods = {} self.orig_fns = set() for mod, attr_name, orig_cls in patched: setattr(mod, attr_name, orig_cls) def trace( self, root: Union[torch.nn.Module, Callable[..., Any]], concrete_args: Optional[Dict[str, Any]] = None, dummy_inputs: Optional[Dict[str, Any]] = None, complete_concrete_args_with_inputs_not_in_dummy_inputs: bool = True, ) -> Graph: """ Traces `root` and returns the corresponding FX `torch.fx.Graph` representation. `root` can either be a `torch.nn.Module` instance or a Python callable. Note that after this call, `self.root` may be different from the `root` passed in here. For example, when a free function is passed to `trace()`, we will create a `torch.nn.Module` instance to use as the root and add embedded constants to. Args: root (`torch.nn.Module` or `Callable`): Either a `torch.nn.Module`` or a function to be traced through. If root is not a [`~transformers.PreTrainedModel`], then `dummy_inputs` must be passed, otherwise tracing will fail. concrete_args (`Dict[str, Any], *optional*): Concrete arguments that should not be treated as Proxies dummy_inputs (`Dict[str, Any]`, *optional*): The dummy inputs needed to handle data-dependent control-flow if `root` is not a [`~transformers.PreTrainedModel`]. It can also be used when `root` is a [`~transformers.PreTrainedModel`] to specify custom dummy inputs for a subset or all the model inputs. complete_concrete_args_with_inputs_not_in_dummy_inputs (`bool`, *optional*, defaults to `True`): If `True`, and `dummy_inputs` is specified, every argument that `root` can take that is not in `dummy_inputs` and not in `concrete_args` will be added to `concrete_args`, otherwise does nothing. Returns: `torch.fx.Graph`: A FX `torch.fx.Graph` representing the semantics of the passed-in `root`. """ sig = inspect.signature(root.forward if isinstance(root, torch.nn.Module) else root) if concrete_args is None: concrete_args = {} if dummy_inputs is not None and complete_concrete_args_with_inputs_not_in_dummy_inputs: for param in sig.parameters.values(): if param.name in dummy_inputs: continue if param.default is inspect.Parameter.empty: raise ValueError(f"You need to specify a default value for the parameter {param.name}.") concrete_args.update( { p.name: p.default for p in sig.parameters.values() if (p.name not in dummy_inputs and p.name not in concrete_args) } ) input_names = sig.parameters.keys() - concrete_args.keys() # Creating a random input shape to generate dummy inputs. batch_size = _generate_random_int() sequence_length = _generate_random_int() shape = [batch_size, sequence_length] if root.__class__.__name__ in get_values(MODEL_FOR_MULTIPLE_CHOICE_MAPPING_NAMES): num_choices = _generate_random_int(low=2, high=5) shape.insert(1, num_choices) inputs = dict(dummy_inputs) if dummy_inputs is not None else {} for input_name in input_names: if input_name in inputs: continue # We enforce that root must either be a PreTrainedModel or deserialized from a serialized traced model to # be able to use HFTracer._generate_dummy_input. if isinstance(root, self.supported_archs) or type(root).__qualname__.startswith( ("_deserialize_graph_module", "_CodeOnlyModule") ): inputs.update(self._generate_dummy_input(root, input_name, shape, input_names=input_names)) else: raise RuntimeError( f"Could not generate input named {input_name} for because root is not a" " transformers.PreTrainedModel." ) def to_meta(value): if isinstance(value, torch.Tensor): return value.to("meta") return value concrete_metas = pytree.tree_map(to_meta, inputs) for param in sig.parameters.values(): if param.kind == inspect.Parameter.VAR_KEYWORD and param.name not in input_names: concrete_metas[f"**{param.name}"] = {} self.meta_args = concrete_metas global _CURRENT_TRACER _CURRENT_TRACER = self with self.patch_for_tracing(root): try: self.graph = super().trace(root, concrete_args=concrete_args) finally: _CURRENT_TRACER = None # This is necessary because concrete args are added as input to the traced module since # https://github.com/pytorch/pytorch/pull/55888. for node in self.graph.nodes: if node.op == "placeholder": # Removing default values for inputs as the forward pass will fail with them. if node.target in input_names: node.args = () # Without this, torch.jit.script fails because the inputs type is Optional[torch.Tensor]. # It cannot infer on the attributes and methods the input should have, and fails. node.type = torch.Tensor # It is a concrete arg so it is not used and should be removed. else: to_visit = [node] to_delete = collections.OrderedDict() while to_visit: n = to_visit.pop(0) to_delete[n] = None to_visit += list(n.users.keys()) for user in reversed(to_delete.keys()): self.graph.erase_node(user) # TODO: solves GraphModule creation. # Without this, return type annotation "Tuple" is causing code execution failure. if node.op == "output": node.type = None return self.graph def _stateless_mod_instanciation_depends_on_proxies(self, mod: nn.Module) -> bool: """ Whether the module was instantiated with Proxies. If that is the case, such module cannot be a leaf module because its attributes are input-dependent. """ return any(isinstance(attr, Proxy) for attr in mod.__dict__.values()) def _insert_module_as_submodule(self, mod: nn.Module) -> str: """ Helper method which tries to insert a module that was not declared as submodule. """ # If one of the module attributes is a Proxy, it means that its instantiation is input-dependent. # It is not possible to insert such modules, those should be traced through. if self._stateless_mod_instanciation_depends_on_proxies(mod): return "" idx = 0 mod_name = mod.__class__.__name__.lower() path = f"{mod_name}_{idx}" already_inserted = False while hasattr(self.root, path): if getattr(self.root, path) is mod: already_inserted = True break path = f"{mod_name}_{idx}" idx += 1 # No need to add multiple instances of the same module. if not already_inserted: self.root.add_module(path, mod) return path def path_of_module(self, mod: nn.Module) -> str: """ Helper method to find the qualified name of `mod` in the Module hierarchy of `root`. For example, if `root` has a submodule named `foo`, which has a submodule named `bar`, passing `bar` into this function will return the string "foo.bar". Args: mod (str): The `Module` to retrieve the qualified name for. """ try: return super().path_of_module(mod) except NameError as e: if self.allow_insert_stateless_mods and len(list(mod.parameters())) == 0 and len(list(mod.buffers())) == 0: path = self._insert_module_as_submodule(mod) return path raise e def is_leaf_module(self, m: torch.nn.Module, module_qualified_name: str) -> bool: return (not self._stateless_mod_instanciation_depends_on_proxies(m)) and super().is_leaf_module( m, module_qualified_name ) @compatibility(is_backward_compatible=True) def keys(self, obj: "Proxy") -> Any: """Called when a proxy object is has the keys() method called. This is what happens when ** is called on a proxy. This should return an iterator if ** is supposed to work in your custom tracer. """ attribute = HFAttribute(obj, "keys")() if obj.node.target.startswith("**"): return attribute._metadata return attribute
class_definition
26,952
53,738
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/fx.py
null
2,592
class NotebookProgressBar: """ A progress par for display in a notebook. Class attributes (overridden by derived classes) - **warmup** (`int`) -- The number of iterations to do at the beginning while ignoring `update_every`. - **update_every** (`float`) -- Since calling the time takes some time, we only do it every presumed `update_every` seconds. The progress bar uses the average time passed up until now to guess the next value for which it will call the update. Args: total (`int`): The total number of iterations to reach. prefix (`str`, *optional*): A prefix to add before the progress bar. leave (`bool`, *optional*, defaults to `True`): Whether or not to leave the progress bar once it's completed. You can always call the [`~utils.notebook.NotebookProgressBar.close`] method to make the bar disappear. parent ([`~notebook.NotebookTrainingTracker`], *optional*): A parent object (like [`~utils.notebook.NotebookTrainingTracker`]) that spawns progress bars and handle their display. If set, the object passed must have a `display()` method. width (`int`, *optional*, defaults to 300): The width (in pixels) that the bar will take. Example: ```python import time pbar = NotebookProgressBar(100) for val in range(100): pbar.update(val) time.sleep(0.07) pbar.update(100) ```""" warmup = 5 update_every = 0.2 def __init__( self, total: int, prefix: Optional[str] = None, leave: bool = True, parent: Optional["NotebookTrainingTracker"] = None, width: int = 300, ): self.total = total self.prefix = "" if prefix is None else prefix self.leave = leave self.parent = parent self.width = width self.last_value = None self.comment = None self.output = None self.value = None self.label = None if "VSCODE_PID" in os.environ: self.update_every = 0.5 # Adjusted for smooth updated as html rending is slow on VS Code # This is the only adjustment required to optimize training html rending def update(self, value: int, force_update: bool = False, comment: str = None): """ The main method to update the progress bar to `value`. Args: value (`int`): The value to use. Must be between 0 and `total`. force_update (`bool`, *optional*, defaults to `False`): Whether or not to force and update of the internal state and display (by default, the bar will wait for `value` to reach the value it predicted corresponds to a time of more than the `update_every` attribute since the last update to avoid adding boilerplate). comment (`str`, *optional*): A comment to add on the left of the progress bar. """ self.value = value if comment is not None: self.comment = comment if self.last_value is None: self.start_time = self.last_time = time.time() self.start_value = self.last_value = value self.elapsed_time = self.predicted_remaining = None self.first_calls = self.warmup self.wait_for = 1 self.update_bar(value) elif value <= self.last_value and not force_update: return elif force_update or self.first_calls > 0 or value >= min(self.last_value + self.wait_for, self.total): if self.first_calls > 0: self.first_calls -= 1 current_time = time.time() self.elapsed_time = current_time - self.start_time # We could have value = self.start_value if the update is called twixe with the same start value. if value > self.start_value: self.average_time_per_item = self.elapsed_time / (value - self.start_value) else: self.average_time_per_item = None if value >= self.total: value = self.total self.predicted_remaining = None if not self.leave: self.close() elif self.average_time_per_item is not None: self.predicted_remaining = self.average_time_per_item * (self.total - value) self.update_bar(value) self.last_value = value self.last_time = current_time if (self.average_time_per_item is None) or (self.average_time_per_item == 0): self.wait_for = 1 else: self.wait_for = max(int(self.update_every / self.average_time_per_item), 1) def update_bar(self, value, comment=None): spaced_value = " " * (len(str(self.total)) - len(str(value))) + str(value) if self.elapsed_time is None: self.label = f"[{spaced_value}/{self.total} : < :" elif self.predicted_remaining is None: self.label = f"[{spaced_value}/{self.total} {format_time(self.elapsed_time)}" else: self.label = ( f"[{spaced_value}/{self.total} {format_time(self.elapsed_time)} <" f" {format_time(self.predicted_remaining)}" ) if self.average_time_per_item == 0: self.label += ", +inf it/s" else: self.label += f", {1/self.average_time_per_item:.2f} it/s" self.label += "]" if self.comment is None or len(self.comment) == 0 else f", {self.comment}]" self.display() def display(self): self.html_code = html_progress_bar(self.value, self.total, self.prefix, self.label, self.width) if self.parent is not None: # If this is a child bar, the parent will take care of the display. self.parent.display() return if self.output is None: self.output = disp.display(disp.HTML(self.html_code), display_id=True) else: self.output.update(disp.HTML(self.html_code)) def close(self): "Closes the progress bar." if self.parent is None and self.output is not None: self.output.update(disp.HTML(""))
class_definition
1,908
8,262
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/notebook.py
null
2,593
class NotebookTrainingTracker(NotebookProgressBar): """ An object tracking the updates of an ongoing training with progress bars and a nice table reporting metrics. Args: num_steps (`int`): The number of steps during training. column_names (`List[str]`, *optional*): The list of column names for the metrics table (will be inferred from the first call to [`~utils.notebook.NotebookTrainingTracker.write_line`] if not set). """ def __init__(self, num_steps, column_names=None): super().__init__(num_steps) self.inner_table = None if column_names is None else [column_names] self.child_bar = None def display(self): self.html_code = html_progress_bar(self.value, self.total, self.prefix, self.label, self.width) if self.inner_table is not None: self.html_code += text_to_html_table(self.inner_table) if self.child_bar is not None: self.html_code += self.child_bar.html_code if self.output is None: self.output = disp.display(disp.HTML(self.html_code), display_id=True) else: self.output.update(disp.HTML(self.html_code)) def write_line(self, values): """ Write the values in the inner table. Args: values (`Dict[str, float]`): The values to display. """ if self.inner_table is None: self.inner_table = [list(values.keys()), list(values.values())] else: columns = self.inner_table[0] for key in values.keys(): if key not in columns: columns.append(key) self.inner_table[0] = columns if len(self.inner_table) > 1: last_values = self.inner_table[-1] first_column = self.inner_table[0][0] if last_values[0] != values[first_column]: # write new line self.inner_table.append([values[c] if c in values else "No Log" for c in columns]) else: # update last line new_values = values for c in columns: if c not in new_values.keys(): new_values[c] = last_values[columns.index(c)] self.inner_table[-1] = [new_values[c] for c in columns] else: self.inner_table.append([values[c] for c in columns]) def add_child(self, total, prefix=None, width=300): """ Add a child progress bar displayed under the table of metrics. The child progress bar is returned (so it can be easily updated). Args: total (`int`): The number of iterations for the child progress bar. prefix (`str`, *optional*): A prefix to write on the left of the progress bar. width (`int`, *optional*, defaults to 300): The width (in pixels) of the progress bar. """ self.child_bar = NotebookProgressBar(total, prefix=prefix, parent=self, width=width) return self.child_bar def remove_child(self): """ Closes the child progress bar. """ self.child_bar = None self.display()
class_definition
8,265
11,522
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/notebook.py
null
2,594
class NotebookProgressCallback(TrainerCallback): """ A [`TrainerCallback`] that displays the progress of training or evaluation, optimized for Jupyter Notebooks or Google colab. """ def __init__(self): self.training_tracker = None self.prediction_bar = None self._force_next_update = False def on_train_begin(self, args, state, control, **kwargs): self.first_column = "Epoch" if args.eval_strategy == IntervalStrategy.EPOCH else "Step" self.training_loss = 0 self.last_log = 0 column_names = [self.first_column] + ["Training Loss"] if args.eval_strategy != IntervalStrategy.NO: column_names.append("Validation Loss") self.training_tracker = NotebookTrainingTracker(state.max_steps, column_names) def on_step_end(self, args, state, control, **kwargs): epoch = int(state.epoch) if int(state.epoch) == state.epoch else f"{state.epoch:.2f}" self.training_tracker.update( state.global_step + 1, comment=f"Epoch {epoch}/{state.num_train_epochs}", force_update=self._force_next_update, ) self._force_next_update = False def on_prediction_step(self, args, state, control, eval_dataloader=None, **kwargs): if not has_length(eval_dataloader): return if self.prediction_bar is None: if self.training_tracker is not None: self.prediction_bar = self.training_tracker.add_child(len(eval_dataloader)) else: self.prediction_bar = NotebookProgressBar(len(eval_dataloader)) self.prediction_bar.update(1) else: self.prediction_bar.update(self.prediction_bar.value + 1) def on_predict(self, args, state, control, **kwargs): if self.prediction_bar is not None: self.prediction_bar.close() self.prediction_bar = None def on_log(self, args, state, control, logs=None, **kwargs): # Only for when there is no evaluation if args.eval_strategy == IntervalStrategy.NO and "loss" in logs: values = {"Training Loss": logs["loss"]} # First column is necessarily Step sine we're not in epoch eval strategy values["Step"] = state.global_step self.training_tracker.write_line(values) def on_evaluate(self, args, state, control, metrics=None, **kwargs): if self.training_tracker is not None: values = {"Training Loss": "No log", "Validation Loss": "No log"} for log in reversed(state.log_history): if "loss" in log: values["Training Loss"] = log["loss"] break if self.first_column == "Epoch": values["Epoch"] = int(state.epoch) else: values["Step"] = state.global_step metric_key_prefix = "eval" for k in metrics: if k.endswith("_loss"): metric_key_prefix = re.sub(r"\_loss$", "", k) _ = metrics.pop("total_flos", None) _ = metrics.pop("epoch", None) _ = metrics.pop(f"{metric_key_prefix}_runtime", None) _ = metrics.pop(f"{metric_key_prefix}_samples_per_second", None) _ = metrics.pop(f"{metric_key_prefix}_steps_per_second", None) _ = metrics.pop(f"{metric_key_prefix}_jit_compilation_time", None) for k, v in metrics.items(): splits = k.split("_") name = " ".join([part.capitalize() for part in splits[1:]]) if name == "Loss": # Single dataset name = "Validation Loss" values[name] = v self.training_tracker.write_line(values) self.training_tracker.remove_child() self.prediction_bar = None # Evaluation takes a long time so we should force the next update. self._force_next_update = True def on_train_end(self, args, state, control, **kwargs): self.training_tracker.update( state.global_step, comment=f"Epoch {int(state.epoch)}/{state.num_train_epochs}", force_update=True, ) self.training_tracker = None
class_definition
11,525
15,827
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/notebook.py
null
2,595
class DummyObject(type): """ Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by `requires_backend` each time a user tries to access any method of that class. """ def __getattribute__(cls, key): if key.startswith("_") and key != "_from_config": return super().__getattribute__(key) requires_backends(cls, cls._backends)
class_definition
58,802
59,221
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/import_utils.py
null
2,596
class _LazyModule(ModuleType): """ Module class that surfaces all objects but only performs associated imports when the objects are requested. """ # Very heavily inspired by optuna.integration._IntegrationModule # https://github.com/optuna/optuna/blob/master/optuna/integration/__init__.py def __init__( self, name: str, module_file: str, import_structure: IMPORT_STRUCTURE_T, module_spec: importlib.machinery.ModuleSpec = None, extra_objects: Dict[str, object] = None, ): super().__init__(name) self._object_missing_backend = {} if any(isinstance(key, frozenset) for key in import_structure.keys()): self._modules = set() self._class_to_module = {} self.__all__ = [] _import_structure = {} for backends, module in import_structure.items(): missing_backends = [] for backend in backends: if backend not in BACKENDS_MAPPING: raise ValueError( f"Error: the following backend: '{backend}' was specified around object {module} but isn't specified in the backends mapping." ) callable, error = BACKENDS_MAPPING[backend] if not callable(): missing_backends.append(backend) self._modules = self._modules.union(set(module.keys())) for key, values in module.items(): if len(missing_backends): self._object_missing_backend[key] = missing_backends for value in values: self._class_to_module[value] = key if len(missing_backends): self._object_missing_backend[value] = missing_backends _import_structure.setdefault(key, []).extend(values) # Needed for autocompletion in an IDE self.__all__.extend(list(module.keys()) + list(chain(*module.values()))) self.__file__ = module_file self.__spec__ = module_spec self.__path__ = [os.path.dirname(module_file)] self._objects = {} if extra_objects is None else extra_objects self._name = name self._import_structure = _import_structure # This can be removed once every exportable object has a `export()` export. else: self._modules = set(import_structure.keys()) self._class_to_module = {} for key, values in import_structure.items(): for value in values: self._class_to_module[value] = key # Needed for autocompletion in an IDE self.__all__ = list(import_structure.keys()) + list(chain(*import_structure.values())) self.__file__ = module_file self.__spec__ = module_spec self.__path__ = [os.path.dirname(module_file)] self._objects = {} if extra_objects is None else extra_objects self._name = name self._import_structure = import_structure # Needed for autocompletion in an IDE def __dir__(self): result = super().__dir__() # The elements of self.__all__ that are submodules may or may not be in the dir already, depending on whether # they have been accessed or not. So we only add the elements of self.__all__ that are not already in the dir. for attr in self.__all__: if attr not in result: result.append(attr) return result def __getattr__(self, name: str) -> Any: if name in self._objects: return self._objects[name] if name in self._object_missing_backend.keys(): missing_backends = self._object_missing_backend[name] class Placeholder(metaclass=DummyObject): _backends = missing_backends def __init__(self, *args, **kwargs): requires_backends(self, missing_backends) Placeholder.__name__ = name Placeholder.__module__ = self.__spec__ value = Placeholder elif name in self._class_to_module.keys(): module = self._get_module(self._class_to_module[name]) value = getattr(module, name) elif name in self._modules: value = self._get_module(name) else: raise AttributeError(f"module {self.__name__} has no attribute {name}") setattr(self, name, value) return value def _get_module(self, module_name: str): try: return importlib.import_module("." + module_name, self.__name__) except Exception as e: raise RuntimeError( f"Failed to import {self.__name__}.{module_name} because of the following error (look up to see its" f" traceback):\n{e}" ) from e def __reduce__(self): return (self.__class__, (self._name, self.__file__, self._import_structure))
class_definition
59,460
64,598
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/import_utils.py
null
2,597
class Placeholder(metaclass=DummyObject): _backends = missing_backends def __init__(self, *args, **kwargs): requires_backends(self, missing_backends)
class_definition
63,391
63,593
1
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/import_utils.py
_LazyModule
2,598
class OptionalDependencyNotAvailable(BaseException): """Internally used error class for signalling an optional dependency was not found."""
class_definition
64,601
64,744
0
/Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/import_utils.py
null
2,599