code
stringlengths
86
54.5k
code_codestyle
int64
0
371
style_context
stringlengths
87
49.2k
style_context_codestyle
int64
0
349
label
int64
0
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available _UpperCamelCase = { "configuration_roc_bert": ["ROC_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "RoCBertConfig"], "tokenization_roc_bert": ["RoCBertTok...
351
from multiprocessing import Lock, Pipe, Process # lock used to ensure that two processes do not access a pipe at the same time _UpperCamelCase = Lock() def UpperCamelCase_( snake_case__: Optional[Any] , snake_case__: Optional[int] , snake_case__: Tuple , snake_case__: Tuple ...
335
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_torch_available _UpperCamelCase = {'''configuration_speech_encoder_decoder''': ['''SpeechEncoderDecoderConfig''']} try: if not is_torch_available(): raise OptionalDe...
352
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class lowercase : '''simple docstring''' def __init__(self ) -> str: """simple docstring""" UpperCAmelCase__ = '' UpperCAmelCase__ = ...
335
0
from .data_collator import ( DataCollatorForLanguageModeling, DataCollatorForPermutationLanguageModeling, DataCollatorForSeqaSeq, DataCollatorForSOP, DataCollatorForTokenClassification, DataCollatorForWholeWordMask, DataCollatorWithPadding, DefaultDataCollator, default_d...
353
import collections import inspect import unittest from transformers import SwinvaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ...
335
0
import sys from collections.abc import Mapping from typing import TYPE_CHECKING, Dict, Optional import numpy as np import pyarrow as pa from .. import config from ..utils.logging import get_logger from ..utils.py_utils import map_nested from .formatting import TensorFormatter if TYPE_CHECKING: im...
354
from collections import deque def UpperCamelCase_( snake_case__: Tuple ) -> Tuple: UpperCAmelCase__ = len(snake_case__ ) UpperCAmelCase__ = deque() UpperCAmelCase__ = [False for _ in range(snake_case__ )] UpperCAmelCase__ = [-1...
335
0
_UpperCamelCase = 9.8_0_6_6_5 def UpperCamelCase_( snake_case__: Any , snake_case__: Union[str, Any] , snake_case__: Optional[int] = g ) -> List[str]: if fluid_density <= 0: raise ValueError('Impossible fluid density' ) if volume < 0: raise ValueError(...
355
from ...configuration_utils import PretrainedConfig _UpperCamelCase = { '''google/tapas-base-finetuned-sqa''': ( '''https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json''' ), '''google/tapas-base-finetuned-wtq''': ( '''https://huggingface.c...
335
0
from . import ( albert, align, altclip, audio_spectrogram_transformer, auto, autoformer, bark, bart, barthez, bartpho, beit, bert, bert_generation, bert_japanese, bertweet, big_bird, bigbird_pegasus, biogpt, bit, ...
356
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available _UpperCamelCase = { '''configuration_squeezebert''': [ '''SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''SqueezeBertConfig''', ...
335
0
import itertools import random import unittest import numpy as np from transformers import is_speech_available from transformers.testing_utils import require_torch, require_torchaudio from ...test_sequence_feature_extraction_common import SequenceFeatureExtractionTestMixin if is_speech_available():...
357
import argparse import gdown import numpy as np import torch from huggingface_hub import hf_hub_download from transformers import ( CLIPTokenizer, CLIPTokenizerFast, VideoMAEImageProcessor, XCLIPConfig, XCLIPModel, XCLIPProcessor, XCLIPTextConfig, XCLIPVisionConfig, ...
335
0
import argparse import json import os import fairseq import torch from fairseq.data import Dictionary from transformers import ( HubertConfig, HubertForCTC, HubertModel, WavaVecaCTCTokenizer, WavaVecaFeatureExtractor, WavaVecaProcessor, logging, ) logging.set_verbo...
358
import argparse import OmegaConf import torch from diffusers import DDIMScheduler, LDMPipeline, UNetLDMModel, VQModel def UpperCamelCase_( snake_case__: Optional[int] , snake_case__: List[Any] , snake_case__: Union[str, Any] ) -> Tuple: UpperCAmelCase__ = OmegaConf....
335
0
import numpy as np import datasets _UpperCamelCase = "\nCompute the Mahalanobis Distance\n\nMahalonobis distance is the distance between a point and a distribution.\nAnd not between two distinct points. It is effectively a multivariate equivalent of the Euclidean distance.\nIt was introduced by P...
359
# flake8: noqa # Lint as: python3 _UpperCamelCase = [ '''VerificationMode''', '''Version''', '''disable_progress_bar''', '''enable_progress_bar''', '''is_progress_bar_enabled''', '''experimental''', ] from .info_utils import VerificationMode from .logging import disab...
335
0
from argparse import ArgumentParser, Namespace from ..utils import logging from . import BaseTransformersCLICommand def UpperCamelCase_( snake_case__: Optional[int] ) -> Any: return ConvertCommand( args.model_type , args.tf_checkpoint , args.pytorch_dump_output , args.confi...
360
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''asapp/sew-d-tiny-100k''': '''https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/confi...
335
0
import torch from diffusers import EulerDiscreteScheduler from diffusers.utils import torch_device from .test_schedulers import SchedulerCommonTest class lowercase ( lowerCamelCase__ ): '''simple docstring''' __SCREAMING_SNAKE_CASE : str = (EulerDiscreteScheduler,) ...
361
import argparse import os from pathlib import Path from typing import Dict import tensorflow as tf import torch from tqdm import tqdm from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_spe...
335
0
"""simple docstring""" from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_tokenizers_available, is_torch_available, ) _UpperCamelCase = { '''configuration_funnel''': ['''FUNNEL_PRETRAINED...
362
from __future__ import annotations import os import tempfile import unittest from transformers import ConvBertConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ...
335
0
def UpperCamelCase_( snake_case__: str = 10_00 ) -> int: UpperCAmelCase__ = -1 UpperCAmelCase__ = 0 for a in range(1 , n // 3 ): # Solving the two equations a**2+b**2=c**2 and a+b+c=N eliminating c UpperCAmelCase__ = (n * n - 2 * a * n) //...
363
from collections import defaultdict from typing import Optional from ..image_utils import load_image from ..utils import ( add_end_docstrings, is_torch_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, ChunkPipeline if is_torch_available(): import torch...
335
0
import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_device from ...test_configuration_...
364
from dataclasses import dataclass, field from typing import Optional @dataclass class lowercase : '''simple docstring''' __SCREAMING_SNAKE_CASE = field( default="""codeparrot/codeparrot""" , metadata={"""help""": """Model name or path of model to be trained."""} )...
335
0
import numpy as np import torch from imwatermark import WatermarkEncoder # Copied from https://github.com/Stability-AI/generative-models/blob/613af104c6b85184091d42d374fef420eddb356d/scripts/demo/streamlit_helpers.py#L66 _UpperCamelCase = 0B101_100_111_110_110_010_010_000_011_110_111_011_000_110_...
365
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transfo...
335
0
def UpperCamelCase_( snake_case__: int = 1_00 ) -> int: """simple docstring""" UpperCAmelCase__ = set() UpperCAmelCase__ = 0 UpperCAmelCase__ = n + 1 # maximum limit for a in range(2 , lowerCAmelCase__ ): for b ...
366
import warnings from ...utils import logging from .image_processing_mobilevit import MobileViTImageProcessor _UpperCamelCase = logging.get_logger(__name__) class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , *__a , **__a ) -> None: ...
335
0
import unittest from transformers import is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY i...
367
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _UpperCamelCase = { '''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''], } try: if not is_torch_available(): raise O...
335
0
import numpy as np import torch from torch.utils.data import Dataset, IterableDataset from ..utils.generic import ModelOutput class lowercase ( UpperCamelCase__ ): '''simple docstring''' def __init__(self , __a , __a , __a ) -> Optional[int]: """simple docstring"...
368
import os import tempfile import unittest from pathlib import Path from transformers import AutoConfig, is_tf_available from transformers.testing_utils import require_tf if is_tf_available(): import tensorflow as tf from transformers import TensorFlowBenchmark, TensorFlowBenchmarkArguments ...
335
0
from binascii import hexlify from hashlib import shaaaa from os import urandom # RFC 3526 - More Modular Exponential (MODP) Diffie-Hellman groups for # Internet Key Exchange (IKE) https://tools.ietf.org/html/rfc3526 _UpperCamelCase = { # 1536-bit 5: { '''prime''': int( ...
369
from .constants import ( MODEL_NAME, OPTIMIZER_NAME, RNG_STATE_NAME, SAFE_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SCALER_NAME, SCHEDULER_NAME, TORCH_LAUNCH_PARAMS, WEIGHTS_INDEX_NAME, WEIGHTS_NAME, ) from .dataclasses import ( BnbQuantizationConfig, Compu...
335
0
def UpperCamelCase_( snake_case__: Optional[int] , snake_case__: Tuple ) -> int: return int((input_a, input_a).count(0 ) != 0 ) def UpperCamelCase_( ) -> None: assert nand_gate(0 , 0 ) == 1 assert nand_gate(0 , 1 ) == 1 assert nand_gate(1 ...
370
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class lowercase ( _UpperCamelCase , _UpperCamelCase ): '''simple docstring''' @register_to_config def __init__(self , *, __a = 4 , __a ...
335
0
_UpperCamelCase = 0 # The first color of the flag. _UpperCamelCase = 1 # The second color of the flag. _UpperCamelCase = 2 # The third color of the flag. _UpperCamelCase = (red, white, blue) def UpperCamelCase_( snake_case__: list ) -> List[Any]...
371
import json import os import unittest from transformers.models.biogpt.tokenization_biogpt import VOCAB_FILES_NAMES, BioGptTokenizer from transformers.testing_utils import slow from ...test_tokenization_common import TokenizerTesterMixin class lowercase ( _UpperCamelCase , unittest.TestC...
335
0
import os from pathlib import Path def UpperCamelCase_( ) -> List[Any]: from torch.utils.cpp_extension import load UpperCAmelCase__ = Path(snake_case__ ).resolve().parent.parent.parent / 'kernels' / 'deformable_detr' UpperCAmelCase__ = [ root / filename ...
350
class lowercase : # Public class to implement a graph '''simple docstring''' def __init__(self , __a , __a , __a ) -> None: """simple docstring""" UpperCAmelCase__ = row UpperCAmelCase__ = col UpperCAmelCase__ = graph ...
335
0
import json import os import unittest from transformers import OpenAIGPTTokenizer, OpenAIGPTTokenizerFast from transformers.models.openai.tokenization_openai import VOCAB_FILES_NAMES from transformers.testing_utils import require_ftfy, require_spacy, require_tokenizers from ...test_tokenization_common imp...
351
from multiprocessing import Lock, Pipe, Process # lock used to ensure that two processes do not access a pipe at the same time _UpperCamelCase = Lock() def UpperCamelCase_( snake_case__: Optional[Any] , snake_case__: Optional[int] , snake_case__: Tuple , snake_case__: Tuple ...
335
0
import subprocess import sys from transformers import BertConfig, BertModel, BertTokenizer, pipeline from transformers.testing_utils import TestCasePlus, require_torch class lowercase ( _UpperCamelCase ): '''simple docstring''' @require_torch def UpperCamelCase__ (self ...
352
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class lowercase : '''simple docstring''' def __init__(self ) -> str: """simple docstring""" UpperCAmelCase__ = '' UpperCAmelCase__ = ...
335
0
import argparse import requests import torch from PIL import Image from transformers import SwinConfig, SwinForMaskedImageModeling, ViTImageProcessor def UpperCamelCase_( snake_case__: str ) -> str: UpperCAmelCase__ = SwinConfig(image_size=1_92 ) if "base" in mod...
353
import collections import inspect import unittest from transformers import SwinvaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ...
335
0
from __future__ import annotations import math class lowercase : '''simple docstring''' def __init__(self , __a ) -> None: """simple docstring""" UpperCAmelCase__ = size # approximate the overall size of segment tree with given value ...
354
from collections import deque def UpperCamelCase_( snake_case__: Tuple ) -> Tuple: UpperCAmelCase__ = len(snake_case__ ) UpperCAmelCase__ = deque() UpperCAmelCase__ = [False for _ in range(snake_case__ )] UpperCAmelCase__ = [-1...
335
0
from typing import Callable, Optional from .. import Features from ..packaged_modules.generator.generator import Generator from .abc import AbstractDatasetInputStream class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , __a , __a = None , __a = None , __a...
355
from ...configuration_utils import PretrainedConfig _UpperCamelCase = { '''google/tapas-base-finetuned-sqa''': ( '''https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json''' ), '''google/tapas-base-finetuned-wtq''': ( '''https://huggingface.c...
335
0
import argparse import os import re # All paths are set with the intent you should run this script from the root of the repo with the command # python utils/check_dummies.py _UpperCamelCase = '''src/diffusers''' # Matches is_xxx_available() _UpperCamelCase = re.compile(R'''is\...
356
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available _UpperCamelCase = { '''configuration_squeezebert''': [ '''SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''SqueezeBertConfig''', ...
335
0
import json import pathlib import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs...
357
import argparse import gdown import numpy as np import torch from huggingface_hub import hf_hub_download from transformers import ( CLIPTokenizer, CLIPTokenizerFast, VideoMAEImageProcessor, XCLIPConfig, XCLIPModel, XCLIPProcessor, XCLIPTextConfig, XCLIPVisionConfig, ...
335
0
import logging import random import ray from transformers import RagConfig, RagRetriever, RagTokenizer from transformers.models.rag.retrieval_rag import CustomHFIndex _UpperCamelCase = logging.getLogger(__name__) class lowercase : '''simple docstring''' def __ini...
358
import argparse import OmegaConf import torch from diffusers import DDIMScheduler, LDMPipeline, UNetLDMModel, VQModel def UpperCamelCase_( snake_case__: Optional[int] , snake_case__: List[Any] , snake_case__: Union[str, Any] ) -> Tuple: UpperCAmelCase__ = OmegaConf....
335
0
from typing import Optional, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACTaFN from ...modeling_outputs import BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAttention from ...modeling_utils imp...
359
# flake8: noqa # Lint as: python3 _UpperCamelCase = [ '''VerificationMode''', '''Version''', '''disable_progress_bar''', '''enable_progress_bar''', '''is_progress_bar_enabled''', '''experimental''', ] from .info_utils import VerificationMode from .logging import disab...
335
0
from ...configuration_utils import PretrainedConfig _UpperCamelCase = { '''google/tapas-base-finetuned-sqa''': ( '''https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json''' ), '''google/tapas-base-finetuned-wtq''': ( '''https://huggingface.c...
360
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''asapp/sew-d-tiny-100k''': '''https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/confi...
335
0
import collections import inspect import unittest from transformers import SwinvaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ...
361
import argparse import os from pathlib import Path from typing import Dict import tensorflow as tf import torch from tqdm import tqdm from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_spe...
335
0
"""simple docstring""" def UpperCamelCase_( snake_case__: int , snake_case__: int ) -> str: if number < 0 or shift_amount < 0: raise ValueError('both inputs must be positive integers' ) UpperCAmelCase__ = str(bin(snake_case__ ) ) binary_numbe...
362
from __future__ import annotations import os import tempfile import unittest from transformers import ConvBertConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ...
335
0
import sacrebleu as scb from packaging import version from sacrebleu import CHRF import datasets _UpperCamelCase = '''\ @inproceedings{popovic-2015-chrf, title = "chr{F}: character n-gram {F}-score for automatic {MT} evaluation", author = "Popovi{\'c}, Maja", booktitle = "Proceeding...
363
from collections import defaultdict from typing import Optional from ..image_utils import load_image from ..utils import ( add_end_docstrings, is_torch_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, ChunkPipeline if is_torch_available(): import torch...
335
0
from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''microsoft/swinv2-tiny-patch4-window8-256''': ( '''https://huggingface.co/microsoft/swinv2-tiny-patch4-window8-256/resolv...
364
from dataclasses import dataclass, field from typing import Optional @dataclass class lowercase : '''simple docstring''' __SCREAMING_SNAKE_CASE = field( default="""codeparrot/codeparrot""" , metadata={"""help""": """Model name or path of model to be trained."""} )...
335
0
from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''facebook/xmod-base''': '''http...
365
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transfo...
335
0
import json import os import torch from diffusers import UNetaDModel os.makedirs('''hub/hopper-medium-v2/unet/hor32''', exist_ok=True) os.makedirs('''hub/hopper-medium-v2/unet/hor128''', exist_ok=True) os.makedirs('''hub/hopper-medium-v2/value_function''', exist_ok=True) def UpperCamelCase_( ...
366
import warnings from ...utils import logging from .image_processing_mobilevit import MobileViTImageProcessor _UpperCamelCase = logging.get_logger(__name__) class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , *__a , **__a ) -> None: ...
335
0
def UpperCamelCase_( snake_case__: int , snake_case__: int ) -> int: while b: UpperCAmelCase__ , UpperCAmelCase__ = b, a % b return a def UpperCamelCase_( snake_case__: int , snake_case__: int ) -> int: return a if b == 0 else euclidean_gcd_...
367
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _UpperCamelCase = { '''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''], } try: if not is_torch_available(): raise O...
335
0
import inspect import unittest from transformers import BitConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_backbone_common import BackboneTesterMixin from .....
368
import os import tempfile import unittest from pathlib import Path from transformers import AutoConfig, is_tf_available from transformers.testing_utils import require_tf if is_tf_available(): import tensorflow as tf from transformers import TensorFlowBenchmark, TensorFlowBenchmarkArguments ...
335
0
import os import zipfile import requests from get_ci_error_statistics import download_artifact, get_artifacts_links def UpperCamelCase_( snake_case__: List[str] , snake_case__: List[Any]=7 ) -> Union[str, Any]: UpperCAmelCase__ = None if token is not None: UpperCAmel...
369
from .constants import ( MODEL_NAME, OPTIMIZER_NAME, RNG_STATE_NAME, SAFE_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SCALER_NAME, SCHEDULER_NAME, TORCH_LAUNCH_PARAMS, WEIGHTS_INDEX_NAME, WEIGHTS_NAME, ) from .dataclasses import ( BnbQuantizationConfig, Compu...
335
0
class lowercase : '''simple docstring''' def __init__(self , __a ) -> Tuple: """simple docstring""" UpperCAmelCase__ = val UpperCAmelCase__ = None UpperCAmelCase__ = None def UpperCamelCase__ (self , __...
370
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class lowercase ( _UpperCamelCase , _UpperCamelCase ): '''simple docstring''' @register_to_config def __init__(self , *, __a = 4 , __a ...
335
0
import inspect import unittest from transformers import YolosConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from .....
371
import json import os import unittest from transformers.models.biogpt.tokenization_biogpt import VOCAB_FILES_NAMES, BioGptTokenizer from transformers.testing_utils import slow from ...test_tokenization_common import TokenizerTesterMixin class lowercase ( _UpperCamelCase , unittest.TestC...
335
0
import math import torch from torch import nn from ..configuration_utils import ConfigMixin, register_to_config from .attention_processor import Attention from .embeddings import get_timestep_embedding from .modeling_utils import ModelMixin class lowercase ( _UpperCamelCase , _UpperCam...
350
class lowercase : # Public class to implement a graph '''simple docstring''' def __init__(self , __a , __a , __a ) -> None: """simple docstring""" UpperCAmelCase__ = row UpperCAmelCase__ = col UpperCAmelCase__ = graph ...
335
0
import requests from bsa import BeautifulSoup def UpperCamelCase_( snake_case__: str = "AAPL" ) -> str: UpperCAmelCase__ = f"https://in.finance.yahoo.com/quote/{symbol}?s={symbol}" UpperCAmelCase__ = BeautifulSoup(requests.get(snake_case__ ).text , 'html.pa...
351
from multiprocessing import Lock, Pipe, Process # lock used to ensure that two processes do not access a pipe at the same time _UpperCamelCase = Lock() def UpperCamelCase_( snake_case__: Optional[Any] , snake_case__: Optional[int] , snake_case__: Tuple , snake_case__: Tuple ...
335
0
import copy import os from typing import TYPE_CHECKING, List, Union if TYPE_CHECKING: pass from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''kakaobrain/align-base''':...
352
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class lowercase : '''simple docstring''' def __init__(self ) -> str: """simple docstring""" UpperCAmelCase__ = '' UpperCAmelCase__ = ...
335
0
import argparse import torch # Step 1. clone https://github.com/microsoft/unilm # Step 2. git checkout to https://github.com/microsoft/unilm/commit/b94ec76c36f02fb2b0bf0dcb0b8554a2185173cd # Step 3. cd unilm # Step 4. ln -s $(realpath wavlm/modules.py) ./ # create simlink # import classes from unilm.wav...
353
import collections import inspect import unittest from transformers import SwinvaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ...
335
0
# Copyright 2023 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless requ...
354
from collections import deque def UpperCamelCase_( snake_case__: Tuple ) -> Tuple: UpperCAmelCase__ = len(snake_case__ ) UpperCAmelCase__ = deque() UpperCAmelCase__ = [False for _ in range(snake_case__ )] UpperCAmelCase__ = [-1...
335
0
# flake8: noqa # Lint as: python3 _UpperCamelCase = [ '''VerificationMode''', '''Version''', '''disable_progress_bar''', '''enable_progress_bar''', '''is_progress_bar_enabled''', '''experimental''', ] from .info_utils import VerificationMode from .logging import disab...
355
from ...configuration_utils import PretrainedConfig _UpperCamelCase = { '''google/tapas-base-finetuned-sqa''': ( '''https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json''' ), '''google/tapas-base-finetuned-wtq''': ( '''https://huggingface.c...
335
0
import unittest import numpy as np from transformers import RoFormerConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.numpy as jn...
356
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available _UpperCamelCase = { '''configuration_squeezebert''': [ '''SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''SqueezeBertConfig''', ...
335
0
from math import sqrt def UpperCamelCase_( snake_case__: int ) -> bool: assert isinstance(snake_case__ , snake_case__ ) and ( number >= 0 ), "'number' must been an int and positive" UpperCAmelCase__ = True # 0 and 1 are none primes. if number <= 1: Up...
357
import argparse import gdown import numpy as np import torch from huggingface_hub import hf_hub_download from transformers import ( CLIPTokenizer, CLIPTokenizerFast, VideoMAEImageProcessor, XCLIPConfig, XCLIPModel, XCLIPProcessor, XCLIPTextConfig, XCLIPVisionConfig, ...
335
0
from math import pi def UpperCamelCase_( snake_case__: int , snake_case__: int ) -> float: return 2 * pi * radius * (angle / 3_60) if __name__ == "__main__": print(arc_length(90, 10))
358
import argparse import OmegaConf import torch from diffusers import DDIMScheduler, LDMPipeline, UNetLDMModel, VQModel def UpperCamelCase_( snake_case__: Optional[int] , snake_case__: List[Any] , snake_case__: Union[str, Any] ) -> Tuple: UpperCAmelCase__ = OmegaConf....
335
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _UpperCamelCase = { '''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''], } try: if not is_torch_available(): raise O...
359
# flake8: noqa # Lint as: python3 _UpperCamelCase = [ '''VerificationMode''', '''Version''', '''disable_progress_bar''', '''enable_progress_bar''', '''is_progress_bar_enabled''', '''experimental''', ] from .info_utils import VerificationMode from .logging import disab...
335
0
import os def UpperCamelCase_( snake_case__: str = "matrix.txt" ) -> int: with open(os.path.join(os.path.dirname(snake_case__ ) , snake_case__ ) ) as in_file: UpperCAmelCase__ = in_file.read() UpperCAmelCase__ = [[int(snake_case__ ) fo...
360
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''asapp/sew-d-tiny-100k''': '''https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/confi...
335
0
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class lowercase : '''simple docstring''' def __init__(self ) -> str: """simple docstring""" UpperCAmelCase__ = '' UpperCAmelCase__ = ...
361
import argparse import os from pathlib import Path from typing import Dict import tensorflow as tf import torch from tqdm import tqdm from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_spe...
335
0
"""simple docstring""" import collections import json import os import re from typing import TYPE_CHECKING, List, Optional, Tuple import numpy as np from ...tokenization_utils_fast import PreTrainedTokenizer from ...utils import logging if TYPE_CHECKING: from transformers.pipelines.conver...
362
from __future__ import annotations import os import tempfile import unittest from transformers import ConvBertConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ...
335
0
from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxSeqaSeqConfigWithPast from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''google/umt5-small''': '''https://huggingface.co/goog...
363
from collections import defaultdict from typing import Optional from ..image_utils import load_image from ..utils import ( add_end_docstrings, is_torch_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, ChunkPipeline if is_torch_available(): import torch...
335
0
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from trans...
364
from dataclasses import dataclass, field from typing import Optional @dataclass class lowercase : '''simple docstring''' __SCREAMING_SNAKE_CASE = field( default="""codeparrot/codeparrot""" , metadata={"""help""": """Model name or path of model to be trained."""} )...
335
0
from transformers import HfArgumentParser, TensorFlowBenchmark, TensorFlowBenchmarkArguments def UpperCamelCase_( ) -> Tuple: UpperCAmelCase__ = HfArgumentParser(snake_case__ ) UpperCAmelCase__ = parser.parse_args_into_dataclasses()[0] UpperCAmelCase__ = ...
365
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transfo...
335
0
import unittest import torch from diffusers import DDIMScheduler, DDPMScheduler, UNetaDModel from diffusers.training_utils import set_seed from diffusers.utils.testing_utils import slow _UpperCamelCase = False class lowercase ( unittest.TestCase ): '''simple docstring'...
366
import warnings from ...utils import logging from .image_processing_mobilevit import MobileViTImageProcessor _UpperCamelCase = logging.get_logger(__name__) class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , *__a , **__a ) -> None: ...
335
0
_UpperCamelCase = tuple[float, float, float] _UpperCamelCase = tuple[float, float, float] def UpperCamelCase_( snake_case__: Pointad , snake_case__: Pointad ) -> Vectorad: UpperCAmelCase__ = end_pointa[0] - end_pointa[0] UpperCAmelCase__ = ...
367
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _UpperCamelCase = { '''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''], } try: if not is_torch_available(): raise O...
335
0
import warnings from ...utils import logging from .image_processing_mobilevit import MobileViTImageProcessor _UpperCamelCase = logging.get_logger(__name__) class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , *__a , **__a ) -> None: ...
368
import os import tempfile import unittest from pathlib import Path from transformers import AutoConfig, is_tf_available from transformers.testing_utils import require_tf if is_tf_available(): import tensorflow as tf from transformers import TensorFlowBenchmark, TensorFlowBenchmarkArguments ...
335
0
import heapq def UpperCamelCase_( snake_case__: dict ) -> set[int]: UpperCAmelCase__ = [] # for each node and his adjacency list add them and the rank of the node to queue # using heapq module the queue will be filled like a Priority Queue # heapq works with a min priority...
369
from .constants import ( MODEL_NAME, OPTIMIZER_NAME, RNG_STATE_NAME, SAFE_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SCALER_NAME, SCHEDULER_NAME, TORCH_LAUNCH_PARAMS, WEIGHTS_INDEX_NAME, WEIGHTS_NAME, ) from .dataclasses import ( BnbQuantizationConfig, Compu...
335
0
import pytest from datasets.parallel import ParallelBackendConfig, parallel_backend from datasets.utils.py_utils import map_nested from .utils import require_dill_gt_0_3_2, require_joblibspark, require_not_windows def UpperCamelCase_( snake_case__: int ) -> List[str]: # picklable for mu...
370
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class lowercase ( _UpperCamelCase , _UpperCamelCase ): '''simple docstring''' @register_to_config def __init__(self , *, __a = 4 , __a ...
335
0
def UpperCamelCase_( snake_case__: str , snake_case__: str ) -> int: if len(snake_case__ ) != len(snake_case__ ): raise ValueError('String lengths must match!' ) UpperCAmelCase__ = 0 for chara, chara in zip(snake_case__ , snak...
371
import json import os import unittest from transformers.models.biogpt.tokenization_biogpt import VOCAB_FILES_NAMES, BioGptTokenizer from transformers.testing_utils import slow from ...test_tokenization_common import TokenizerTesterMixin class lowercase ( _UpperCamelCase , unittest.TestC...
335
0
from typing import List, Optional, Union from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''huggingface/informer-tourism-monthly''': ( '''https://huggingface.co/hugging...
350
class lowercase : # Public class to implement a graph '''simple docstring''' def __init__(self , __a , __a , __a ) -> None: """simple docstring""" UpperCAmelCase__ = row UpperCAmelCase__ = col UpperCAmelCase__ = graph ...
335
0
# Copyright 2023 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless requ...
351
from multiprocessing import Lock, Pipe, Process # lock used to ensure that two processes do not access a pipe at the same time _UpperCamelCase = Lock() def UpperCamelCase_( snake_case__: Optional[Any] , snake_case__: Optional[int] , snake_case__: Tuple , snake_case__: Tuple ...
335
0
def UpperCamelCase_( snake_case__: list ) -> float: UpperCAmelCase__ = 0 while len(snake_case__ ) > 1: UpperCAmelCase__ = 0 # Consider two files with minimum cost to be merged for _ in range(2 ): UpperCAmelCase__ = files.index(min(...
352
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class lowercase : '''simple docstring''' def __init__(self ) -> str: """simple docstring""" UpperCAmelCase__ = '' UpperCAmelCase__ = ...
335
0
import torch from torch import nn class lowercase ( nn.Module ): '''simple docstring''' def __init__(self , __a , __a , __a , __a , __a=1 , __a=False ) -> Optional[Any]: """simple docstring""" super().__init__() UpperCAmelCase__ = n_toke...
353
import collections import inspect import unittest from transformers import SwinvaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ...
335
0
from packaging import version from .import_utils import is_accelerate_available if is_accelerate_available(): import accelerate def UpperCamelCase_( snake_case__: Optional[int] ) -> Dict: if not is_accelerate_available(): return method UpperCAmelCase__ = version.par...
354
from collections import deque def UpperCamelCase_( snake_case__: Tuple ) -> Tuple: UpperCAmelCase__ = len(snake_case__ ) UpperCAmelCase__ = deque() UpperCAmelCase__ = [False for _ in range(snake_case__ )] UpperCAmelCase__ = [-1...
335
0
def UpperCamelCase_( snake_case__: int = 2_00 ) -> int: UpperCAmelCase__ = [1, 2, 5, 10, 20, 50, 1_00, 2_00] UpperCAmelCase__ = [0] * (pence + 1) UpperCAmelCase__ = 1 # base case: 1 way to make 0 pence for coin in coins: for i in range(snake_case...
355
from ...configuration_utils import PretrainedConfig _UpperCamelCase = { '''google/tapas-base-finetuned-sqa''': ( '''https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json''' ), '''google/tapas-base-finetuned-wtq''': ( '''https://huggingface.c...
335
0
import argparse import glob import logging import os import time from argparse import Namespace import numpy as np import torch from lightning_base import BaseTransformer, add_generic_args, generic_train from torch.utils.data import DataLoader, TensorDataset from transformers import glue_compute_m...
356
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available _UpperCamelCase = { '''configuration_squeezebert''': [ '''SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''SqueezeBertConfig''', ...
335
0
import unittest from transformers import GPTSwaTokenizer from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from ...test_tokenization_common import TokenizerTesterMixin _UpperCamelCase = get_tests_dir('''fixtures/test_sentencepiece_with_bytef...
357
import argparse import gdown import numpy as np import torch from huggingface_hub import hf_hub_download from transformers import ( CLIPTokenizer, CLIPTokenizerFast, VideoMAEImageProcessor, XCLIPConfig, XCLIPModel, XCLIPProcessor, XCLIPTextConfig, XCLIPVisionConfig, ...
335
0
import argparse import json import os import sys import tempfile import unittest from argparse import Namespace from dataclasses import dataclass, field from enum import Enum from pathlib import Path from typing import List, Literal, Optional import yaml from transformers import HfArgumentParser, Tr...
358
import argparse import OmegaConf import torch from diffusers import DDIMScheduler, LDMPipeline, UNetLDMModel, VQModel def UpperCamelCase_( snake_case__: Optional[int] , snake_case__: List[Any] , snake_case__: Union[str, Any] ) -> Tuple: UpperCAmelCase__ = OmegaConf....
335
0
import argparse import torch from transformers import LxmertConfig, LxmertForPreTraining, load_tf_weights_in_lxmert from transformers.utils import logging logging.set_verbosity_info() def UpperCamelCase_( snake_case__: Union[str, Any] , snake_case__: int , snake_case__: Dict )...
359
# flake8: noqa # Lint as: python3 _UpperCamelCase = [ '''VerificationMode''', '''Version''', '''disable_progress_bar''', '''enable_progress_bar''', '''is_progress_bar_enabled''', '''experimental''', ] from .info_utils import VerificationMode from .logging import disab...
335
0
import argparse import torch from transformers import BlenderbotConfig, BlenderbotForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = [ ['''attention''', '''attn'''], ...
360
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''asapp/sew-d-tiny-100k''': '''https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/confi...
335
0
def UpperCamelCase_( snake_case__: list[list[int]] , snake_case__: int , snake_case__: int , snake_case__: set ) -> int: UpperCAmelCase__ , UpperCAmelCase__ = len(snake_case__ ), len(grid[0] ) if ( min(snake_case__ , snake_case__ ) < 0 ...
361
import argparse import os from pathlib import Path from typing import Dict import tensorflow as tf import torch from tqdm import tqdm from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_spe...
335
0
"""simple docstring""" import argparse from pathlib import Path from transformers import AutoConfig, AutoTokenizer, RagConfig, RagSequenceForGeneration, RagTokenForGeneration def UpperCamelCase_( snake_case__: str , snake_case__: str , snake_case__: str , snake_case__: Path , ...
362
from __future__ import annotations import os import tempfile import unittest from transformers import ConvBertConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ...
335
0
import os import random import sys from . import cryptomath_module as cryptomath from . import rabin_miller _UpperCamelCase = 3 def UpperCamelCase_( snake_case__: int ) -> int: print('Generating primitive root of p' ) while True: UpperCAmelCase__ = r...
363
from collections import defaultdict from typing import Optional from ..image_utils import load_image from ..utils import ( add_end_docstrings, is_torch_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, ChunkPipeline if is_torch_available(): import torch...
335
0
from __future__ import annotations from fractions import Fraction def UpperCamelCase_( snake_case__: int , snake_case__: int ) -> bool: return ( num != den and num % 10 == den // 10 and (num // 10) / (den % 10) == num / den ) def UpperCamelCase_( snake_case__: int ...
364
from dataclasses import dataclass, field from typing import Optional @dataclass class lowercase : '''simple docstring''' __SCREAMING_SNAKE_CASE = field( default="""codeparrot/codeparrot""" , metadata={"""help""": """Model name or path of model to be trained."""} )...
335
0
import math import os import unittest from transformers import MegatronBertConfig, is_torch_available from transformers.models.auto import get_values from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from ...test_configuration_common import...
365
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transfo...
335
0
def UpperCamelCase_( snake_case__: int ) -> int: """simple docstring""" UpperCAmelCase__ = 1 for i in range(1 , num + 1 ): fact *= i return fact def UpperCamelCase_( snake_case__: int ) -> int: """simple docst...
366
import warnings from ...utils import logging from .image_processing_mobilevit import MobileViTImageProcessor _UpperCamelCase = logging.get_logger(__name__) class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , *__a , **__a ) -> None: ...
335
0
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): im...
367
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _UpperCamelCase = { '''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''], } try: if not is_torch_available(): raise O...
335
0
from datetime import datetime import matplotlib.pyplot as plt import torch def UpperCamelCase_( snake_case__: Optional[int] ) -> Dict: for param in module.parameters(): UpperCAmelCase__ = False def UpperCamelCase_( ) -> List[str]: UpperCAmelCase__ =...
368
import os import tempfile import unittest from pathlib import Path from transformers import AutoConfig, is_tf_available from transformers.testing_utils import require_tf if is_tf_available(): import tensorflow as tf from transformers import TensorFlowBenchmark, TensorFlowBenchmarkArguments ...
335
0
import math import time from typing import Dict, List, Optional from torch.utils.data import Dataset from transformers import SeqaSeqTrainer, is_torch_tpu_available from transformers.trainer_utils import PredictionOutput, speed_metrics if is_torch_tpu_available(check_device=False): import torch_xla....
369
from .constants import ( MODEL_NAME, OPTIMIZER_NAME, RNG_STATE_NAME, SAFE_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SCALER_NAME, SCHEDULER_NAME, TORCH_LAUNCH_PARAMS, WEIGHTS_INDEX_NAME, WEIGHTS_NAME, ) from .dataclasses import ( BnbQuantizationConfig, Compu...
335
0
from typing import TYPE_CHECKING from ...file_utils import _LazyModule, is_tokenizers_available, is_torch_available, is_vision_available from ...utils import OptionalDependencyNotAvailable _UpperCamelCase = {'''configuration_dpt''': ['''DPT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''DPTConfig''']} try: ...
370
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class lowercase ( _UpperCamelCase , _UpperCamelCase ): '''simple docstring''' @register_to_config def __init__(self , *, __a = 4 , __a ...
335
0
from collections import deque def UpperCamelCase_( snake_case__: Tuple ) -> Tuple: UpperCAmelCase__ = len(snake_case__ ) UpperCAmelCase__ = deque() UpperCAmelCase__ = [False for _ in range(snake_case__ )] UpperCAmelCase__ ...
371
import json import os import unittest from transformers.models.biogpt.tokenization_biogpt import VOCAB_FILES_NAMES, BioGptTokenizer from transformers.testing_utils import slow from ...test_tokenization_common import TokenizerTesterMixin class lowercase ( _UpperCamelCase , unittest.TestC...
335
0
import numpy as np import datasets _UpperCamelCase = ''' Compute the Mahalanobis Distance Mahalonobis distance is the distance between a point and a distribution. And not between two distinct points. It is effectively a multivariate equivalent of the Euclidean distance. It was introduced by Prof...
350
class lowercase : # Public class to implement a graph '''simple docstring''' def __init__(self , __a , __a , __a ) -> None: """simple docstring""" UpperCAmelCase__ = row UpperCAmelCase__ = col UpperCAmelCase__ = graph ...
335
0
# Usage: # ./gen-card-allenai-wmt16.py import os from pathlib import Path def UpperCamelCase_( snake_case__: List[Any] , snake_case__: List[str] , snake_case__: Dict , snake_case__: Optional[int] ) -> Optional[int]: UpperCAmelCase__ = { 'en': 'Machine learni...
351
from multiprocessing import Lock, Pipe, Process # lock used to ensure that two processes do not access a pipe at the same time _UpperCamelCase = Lock() def UpperCamelCase_( snake_case__: Optional[Any] , snake_case__: Optional[int] , snake_case__: Tuple , snake_case__: Tuple ...
335
0
from __future__ import annotations import queue class lowercase : '''simple docstring''' def __init__(self , __a ) -> int: """simple docstring""" UpperCAmelCase__ = data UpperCAmelCase__ = None UpperCAmelCase__ = ...
352
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class lowercase : '''simple docstring''' def __init__(self ) -> str: """simple docstring""" UpperCAmelCase__ = '' UpperCAmelCase__ = ...
335
0
import torch from diffusers import DiffusionPipeline class lowercase ( _UpperCamelCase ): '''simple docstring''' def __init__(self , __a , __a ) -> List[Any]: """simple docstring""" super().__init__() self.register_modules(unet=__a , scheduler=__a...
353
import collections import inspect import unittest from transformers import SwinvaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ...
335
0
from __future__ import annotations _UpperCamelCase = [] def UpperCamelCase_( snake_case__: list[list[int]] , snake_case__: int , snake_case__: int ) -> bool: for i in range(len(snake_case__ ) ): if board[row][i] == 1: return False for i in range(len(...
354
from collections import deque def UpperCamelCase_( snake_case__: Tuple ) -> Tuple: UpperCAmelCase__ = len(snake_case__ ) UpperCAmelCase__ = deque() UpperCAmelCase__ = [False for _ in range(snake_case__ )] UpperCAmelCase__ = [-1...
335
0
from __future__ import annotations import pandas as pd def UpperCamelCase_( snake_case__: list[int] , snake_case__: list[int] , snake_case__: int ) -> list[int]: UpperCAmelCase__ = [0] * no_of_processes UpperCAmelCase__ = [0] * no_of_processes # Copy the...
355
from ...configuration_utils import PretrainedConfig _UpperCamelCase = { '''google/tapas-base-finetuned-sqa''': ( '''https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json''' ), '''google/tapas-base-finetuned-wtq''': ( '''https://huggingface.c...
335
0
import fire from transformers import AutoConfig, AutoModelForSeqaSeqLM, AutoTokenizer def UpperCamelCase_( snake_case__: str , snake_case__: str , **snake_case__: Tuple ) -> Optional[Any]: UpperCAmelCase__ = AutoConfig.from_pretrained(snake_case__ , **snake_case_...
356
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available _UpperCamelCase = { '''configuration_squeezebert''': [ '''SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''SqueezeBertConfig''', ...
335
0
# Author: OMKAR PATHAK, Nwachukwu Chidiebere # Use a Python dictionary to construct the graph. from __future__ import annotations from pprint import pformat from typing import Generic, TypeVar _UpperCamelCase = TypeVar('''T''') class lowercase ( Generic[T] ): '''simple d...
357
import argparse import gdown import numpy as np import torch from huggingface_hub import hf_hub_download from transformers import ( CLIPTokenizer, CLIPTokenizerFast, VideoMAEImageProcessor, XCLIPConfig, XCLIPModel, XCLIPProcessor, XCLIPTextConfig, XCLIPVisionConfig, ...
335
0
import unittest from datasets import load_dataset from transformers import BloomTokenizerFast from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers class lowercase ( _UpperCamelCase , unittest.TestCas...
358
import argparse import OmegaConf import torch from diffusers import DDIMScheduler, LDMPipeline, UNetLDMModel, VQModel def UpperCamelCase_( snake_case__: Optional[int] , snake_case__: List[Any] , snake_case__: Union[str, Any] ) -> Tuple: UpperCAmelCase__ = OmegaConf....
335
0
import os import numpy import onnx def UpperCamelCase_( snake_case__: str , snake_case__: Dict ) -> str: UpperCAmelCase__ = a.name UpperCAmelCase__ = b.name UpperCAmelCase__ = '' UpperCAmelCase__ = '' UpperCAmelCase__ ...
359
# flake8: noqa # Lint as: python3 _UpperCamelCase = [ '''VerificationMode''', '''Version''', '''disable_progress_bar''', '''enable_progress_bar''', '''is_progress_bar_enabled''', '''experimental''', ] from .info_utils import VerificationMode from .logging import disab...
335
0
import os _UpperCamelCase = {'''I''': 1, '''V''': 5, '''X''': 10, '''L''': 50, '''C''': 100, '''D''': 500, '''M''': 1000} def UpperCamelCase_( snake_case__: str ) -> int: UpperCAmelCase__ = 0 UpperCAmelCase__ = 0 while index < len(snake_case__ ...
360
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCamelCase = logging.get_logger(__name__) _UpperCamelCase = { '''asapp/sew-d-tiny-100k''': '''https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/confi...
335
0
import gc import random import unittest import numpy as np import torch from PIL import Image from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer from diffusers import AutoencoderKL, DDIMScheduler, DDPMScheduler, StableDiffusionUpscalePipeline, UNetaDConditionModel from diffusers.utils...
361
import argparse import os from pathlib import Path from typing import Dict import tensorflow as tf import torch from tqdm import tqdm from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_spe...
335
0
"""simple docstring""" import importlib.metadata from typing import Union from packaging.version import Version, parse from .constants import STR_OPERATION_TO_FUNC _UpperCamelCase = parse(importlib.metadata.version('''torch''')) def UpperCamelCase_( snake_case__: Union[s...
362
from __future__ import annotations import os import tempfile import unittest from transformers import ConvBertConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ...
335
0