code
stringlengths
86
54.5k
code_codestyle
int64
0
371
style_context
stringlengths
87
49.2k
style_context_codestyle
int64
0
349
label
int64
0
1
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __A ( __lowerCamelCase ) -> bool: a = int(number**0.5 ) return number == sq * sq def __A ( __lowerCamelCase , __lowerCamelCase , ...
347
import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_transforme...
347
1
__UpperCamelCase : str = tuple[float, float, float] __UpperCamelCase : Union[str, Any] = tuple[float, float, float] def __A ( __lowerCamelCase , __lowerCamelCase ) -> Vectorad: a = end_pointa[0] - end_pointa[0] a = end_pointa[1] ...
347
import glob import os import random from string import ascii_lowercase, digits import cva import numpy as np # Parrameters __UpperCamelCase : Union[str, Any] = (720, 1_280) # Height, Width __UpperCamelCase : Any = (0.4, 0.6) # if height or width lower than this scale, drop it. __Upp...
347
1
import unittest import numpy as np from diffusers import LMSDiscreteScheduler, OnnxStableDiffusionInpaintPipeline from diffusers.utils.testing_utils import ( is_onnx_available, load_image, nightly, require_onnxruntime, require_torch_gpu, ) from ..test_pipelines_onnx_common ...
347
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __UpperCamelCase : Optional[Any] = { "configuration_mobilenet_v2": [ "MOBILENET_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileNetV2Config"...
347
1
class __lowerCAmelCase : def __init__( self :Optional[Any] , __magic_name__ :Tuple , __magic_name__ :Optional[Any] ): '''simple docstring''' a = name a = val def __str__( ...
347
def __A ( __lowerCamelCase ) -> bool: if num < 0: return False a = num a = 0 while num > 0: a = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod...
347
1
import math import unittest def __A ( __lowerCamelCase ) -> bool: assert isinstance(__lowerCamelCase , __lowerCamelCase ) and ( number >= 0 ), "'number' must been an int and positive" if 1 < number < 4: # 2 and 3 are primes return True elif number < 2...
347
import json import os import shutil import tempfile import unittest from transformers import BatchEncoding, CanineTokenizer from transformers.testing_utils import require_tokenizers, require_torch from transformers.tokenization_utils import AddedToken from transformers.utils import cached_property ...
347
1
from __future__ import annotations import math def __A ( __lowerCamelCase , __lowerCamelCase ) -> float: a = u for i in range(1 , __lowerCamelCase ): a = temp * (u - i) return temp def __A ( ) -> None...
347
def __A ( __lowerCamelCase ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
347
1
from typing import List, Optional, Union import numpy as np import PIL import torch from PIL import Image from ...models import UNetaDConditionModel, VQModel from ...pipelines import DiffusionPipeline from ...pipelines.pipeline_utils import ImagePipelineOutput from ...schedulers import DDPMSchedul...
347
def __A ( __lowerCamelCase ) -> int: if not numbers: return 0 if not isinstance(__lowerCamelCase , (list, tuple) ) or not all( isinstance(__lowerCamelCase , __lowerCamelCase ) for number in numbers ): raise ValueError("""numbers must be an iterable of ...
347
1
import unittest from transformers import is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY ...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, ) __UpperCamelCase : Optional[Any] = { "configuration_wav2vec2": ["WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP...
347
1
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, PNDMScheduler, StableDiffusionLDMaDPipeline, UNetaDConditionModel, ) from diffusers.utils im...
347
import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("""dataset_size""" , [None, 400 * 2**20, 600 * 2**20] ) @pytest.mark.parametrize("""input_in_memory_max_size""" , ["""default""", 0, 100 * 2**20, 900 * 2...
347
1
def __A ( __lowerCamelCase ) -> float: if not nums: # Makes sure that the list is not empty raise ValueError("""List is empty""" ) a = sum(__lowerCamelCase ) / len(__lowerCamelCase ) # Calculate the average return sum(abs(x - average ) for x in nums ) / len(__...
347
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __A ( __lowerCamelCase ) -> bool: a = int(number**0.5 ) return number == sq * sq def __A ( __lowerCamelCase , __lowerCamelCase , ...
347
1
__UpperCamelCase : dict[str, float] = { "km/h": 1.0, "m/s": 3.6, "mph": 1.60_93_44, "knot": 1.8_52, } __UpperCamelCase : dict[str, float] = { "km/h": 1.0, "m/s": 0.2_77_77_77_78, "mph": 0.6_21_37_11_92, "knot": 0.5_39_95_68_03, } def __A (...
347
import unittest import numpy as np from transformers import RoFormerConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.numpy as...
347
1
import os import tempfile import unittest from transformers.models.marian.convert_marian_tatoeba_to_pytorch import DEFAULT_REPO, TatoebaConverter from transformers.testing_utils import slow from transformers.utils import cached_property @unittest.skipUnless(os.path.exists(__magic_name__ ) , '''...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_blenderbot": [ ...
347
1
import pickle import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, XLMRobertaTokenizer, XLMRobertaTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import cached_property fr...
347
import tempfile import torch from diffusers import IPNDMScheduler from .test_schedulers import SchedulerCommonTest class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase__ = (IPNDMScheduler,) UpperCamelCase__ = (('''num_inference_steps''', 50),) d...
347
1
import math def __A ( __lowerCamelCase ) -> str: a = 0 a = 0 while num > 0: a = num % 8 a = octal + (remainder * math.floor(math.pow(10 , __lowerCamelCase ) )) counter += 1 a = ma...
347
__UpperCamelCase : Dict = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def __A ( ) -> None: a = input("""Enter message: """ ) a = input("""Enter key [alphanumeric]: """ ) a = input("""Encrypt/Decrypt [e/d]: """ ) if mode.lower().startswit...
347
1
import json import pathlib import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_...
347
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from ...
347
1
from typing import List, Optional from ...configuration_utils import PretrainedConfig from ...utils import logging __UpperCamelCase : Optional[int] = logging.get_logger(__name__) __UpperCamelCase : Tuple = { "huggingface/autoformer-tourism-monthly": "https://huggingface.co/hugging...
347
import io import itertools import json from dataclasses import dataclass from typing import Optional import pyarrow as pa import pyarrow.json as paj import datasets from datasets.table import table_cast from datasets.utils.file_utils import readline __UpperCamelCase : Any = datasets.ut...
347
1
import copy import unittest from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...t...
347
import warnings from typing import Dict, List, Optional, Tuple from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging __UpperCamelCase : Dict = logging.get_logger(__name__) class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase_...
347
1
from collections import OrderedDict from typing import TYPE_CHECKING, Any, Mapping, Optional from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...onnx.utils import compute_effective_axis_dimension from ...utils import logging i...
347
import copy import unittest from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...t...
347
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __UpperCamelCase : Optional[Any] = { "configuration_mobilenet_v2": [ "MOBILENET_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileNetV2Config"...
347
from copy import deepcopy class __lowerCAmelCase : def __init__( self :Union[str, Any] , __magic_name__ :list[int] | None = None , __magic_name__ :int | None = None ): '''simple docstring''' if arr is None and size i...
347
1
def __A ( __lowerCamelCase ) -> list: a = len(__lowerCamelCase ) for i in range(1 , __lowerCamelCase ): a = collection[i] a = 0 a = i - 1 while low <= high: a = (low + high) // 2 if ...
347
from __future__ import annotations from typing import Generic, TypeVar __UpperCamelCase : Union[str, Any] = TypeVar("T") class __lowerCAmelCase ( Generic[T] ): def __init__( self :Tuple , __magic_name__ :T ): '''simple docstring''...
347
1
from __future__ import annotations from decimal import Decimal from numpy import array def __A ( __lowerCamelCase ) -> list[list[float]]: a = Decimal # Check if the provided matrix has 2 rows and 2 columns # since this implementation only works for 2x2 matri...
347
import shutil import tempfile import unittest import numpy as np import pytest from transformers.testing_utils import require_vision from transformers.utils import is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, BertTokenizer, ...
347
1
def __A ( __lowerCamelCase ) -> list[int]: a = [0 for i in range(len(__lowerCamelCase ) )] # initialize interval's left pointer and right pointer a , a = 0, 0 for i in range(1 , len(__lowerCamelCase ) ): # case when current index...
347
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices __UpperCamelCase : Optional[Any] = logging.get_logger(__name__) __UpperCamelCase : int = { "shi-labs/n...
347
1
import numpy as np from cva import COLOR_BGR2GRAY, CV_8UC3, cvtColor, filteraD, imread, imshow, waitKey def __A ( __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase ) -> ...
347
import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_transforme...
347
1
from argparse import ArgumentParser from . import BaseTransformersCLICommand def __A ( __lowerCamelCase ) -> Optional[Any]: return DownloadCommand(args.model , args.cache_dir , args.force , args.trust_remote_code ) class __lowerCAmelCase ...
347
import glob import os import random from string import ascii_lowercase, digits import cva import numpy as np # Parrameters __UpperCamelCase : Union[str, Any] = (720, 1_280) # Height, Width __UpperCamelCase : Any = (0.4, 0.6) # if height or width lower than this scale, drop it. __Upp...
347
1
from ...configuration_utils import PretrainedConfig from ...utils import logging __UpperCamelCase : List[str] = logging.get_logger(__name__) __UpperCamelCase : int = { "vinvino02/glpn-kitti": "https://huggingface.co/vinvino02/glpn-kitti/resolve/main/config.json", # See all GLPN ...
347
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __UpperCamelCase : Optional[Any] = { "configuration_mobilenet_v2": [ "MOBILENET_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileNetV2Config"...
347
1
import math from collections.abc import Iterator from itertools import takewhile def __A ( __lowerCamelCase ) -> bool: if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all mul...
347
def __A ( __lowerCamelCase ) -> bool: if num < 0: return False a = num a = 0 while num > 0: a = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod...
347
1
import absl # noqa: F401 # Here to have a nice missing dependency error message early on import nltk # noqa: F401 # Here to have a nice missing dependency error message early on import numpy # noqa: F401 # Here to have a nice missing dependency error message early on import six # noqa: F401 # Here to h...
347
import json import os import shutil import tempfile import unittest from transformers import BatchEncoding, CanineTokenizer from transformers.testing_utils import require_tokenizers, require_torch from transformers.tokenization_utils import AddedToken from transformers.utils import cached_property ...
347
1
import importlib import json import os from collections import OrderedDict from typing import Dict, Optional, Union # Build the list of all image processors from ...configuration_utils import PretrainedConfig from ...dynamic_module_utils import get_class_from_dynamic_module, resolve_trust_remote_code...
347
def __A ( __lowerCamelCase ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
347
1
from __future__ import annotations import copy import inspect import json import math import os import tempfile import unittest from importlib import import_module import numpy as np from transformers import ViTMAEConfig from transformers.file_utils import cached_property, is_tf_available, is...
347
def __A ( __lowerCamelCase ) -> int: if not numbers: return 0 if not isinstance(__lowerCamelCase , (list, tuple) ) or not all( isinstance(__lowerCamelCase , __lowerCamelCase ) for number in numbers ): raise ValueError("""numbers must be an iterable of ...
347
1
import sys from typing import Tuple import numpy as np import torch from PIL import Image from torch import nn from transformers.image_utils import PILImageResampling from utils import img_tensorize class __lowerCAmelCase : def __init__( self :Union[str, Any] , __magi...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, ) __UpperCamelCase : Optional[Any] = { "configuration_wav2vec2": ["WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP...
347
1
import warnings from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging __UpperCamelCase : int = logging.get_logger(__name__) __UpperCame...
347
import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("""dataset_size""" , [None, 400 * 2**20, 600 * 2**20] ) @pytest.mark.parametrize("""input_in_memory_max_size""" , ["""default""", 0, 100 * 2**20, 900 * 2...
347
1
import webbrowser from sys import argv from urllib.parse import parse_qs, quote import requests from bsa import BeautifulSoup from fake_useragent import UserAgent if __name__ == "__main__": __UpperCamelCase : Dict = "%20".join(argv[1:]) if len(argv) > 1 else quote(str(input("Search: "))) ...
347
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __A ( __lowerCamelCase ) -> bool: a = int(number**0.5 ) return number == sq * sq def __A ( __lowerCamelCase , __lowerCamelCase , ...
347
1
import os import shutil import tempfile import unittest import numpy as np from transformers import AutoTokenizer, BarkProcessor from transformers.testing_utils import require_torch, slow @require_torch class __lowerCAmelCase ( unittest.TestCase ): def lowerCamelCase__ ...
347
import unittest import numpy as np from transformers import RoFormerConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.numpy as...
347
1
class __lowerCAmelCase : def __init__( self :Union[str, Any] ): '''simple docstring''' a = 0 a = 0 a = {} def lowerCamelCase__ ( self :Optional[An...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_blenderbot": [ ...
347
1
from __future__ import annotations __UpperCamelCase : List[str] = tuple[int, int, int] __UpperCamelCase : Optional[Any] = tuple[str, str, str] # used alphabet -------------------------- # from string.ascii_uppercase __UpperCamelCase : Optional[Any] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ...
347
import tempfile import torch from diffusers import IPNDMScheduler from .test_schedulers import SchedulerCommonTest class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase__ = (IPNDMScheduler,) UpperCamelCase__ = (('''num_inference_steps''', 50),) d...
347
1
# Copyright 2023 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unles...
347
__UpperCamelCase : Dict = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def __A ( ) -> None: a = input("""Enter message: """ ) a = input("""Enter key [alphanumeric]: """ ) a = input("""Encrypt/Decrypt [e/d]: """ ) if mode.lower().startswit...
347
1
import os import unittest from transformers.models.bartpho.tokenization_bartpho import VOCAB_FILES_NAMES, BartphoTokenizer from transformers.testing_utils import get_tests_dir from ...test_tokenization_common import TokenizerTesterMixin __UpperCamelCase : Dict = get_tests_dir("fixtures/test...
347
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from ...
347
1
import argparse import json import os import fairseq import torch from fairseq.data import Dictionary from transformers import ( WavaVecaConfig, WavaVecaCTCTokenizer, WavaVecaFeatureExtractor, WavaVecaForCTC, WavaVecaForPreTraining, WavaVecaProcessor, logging, ) ...
347
import io import itertools import json from dataclasses import dataclass from typing import Optional import pyarrow as pa import pyarrow.json as paj import datasets from datasets.table import table_cast from datasets.utils.file_utils import readline __UpperCamelCase : Any = datasets.ut...
347
1
from __future__ import annotations def __A ( __lowerCamelCase , __lowerCamelCase ) -> bool: a = get_failure_array(__lowerCamelCase ) # 2) Step through text searching for pattern a , a = 0, 0 # index into text, pattern while i < ...
347
import warnings from typing import Dict, List, Optional, Tuple from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging __UpperCamelCase : Dict = logging.get_logger(__name__) class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase_...
347
1
from __future__ import annotations __UpperCamelCase : Any = "#" class __lowerCAmelCase : def __init__( self :str ): '''simple docstring''' a = {} def lowerCamelCase__ ( self :Tuple ...
347
import copy import unittest from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...t...
347
1
from __future__ import annotations from PIL import Image # Define glider example __UpperCamelCase : List[Any] = [ [0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0], [1, 1, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0...
347
from copy import deepcopy class __lowerCAmelCase : def __init__( self :Union[str, Any] , __magic_name__ :list[int] | None = None , __magic_name__ :int | None = None ): '''simple docstring''' if arr is None and size i...
347
1
# this script reports modified .py files under the desired list of top-level sub-dirs passed as a list of arguments, e.g.: # python ./utils/get_modified_files.py utils src tests examples # # it uses git to find the forking point and which files were modified - i.e. files not under git won't be considered...
347
from __future__ import annotations from typing import Generic, TypeVar __UpperCamelCase : Union[str, Any] = TypeVar("T") class __lowerCAmelCase ( Generic[T] ): def __init__( self :Tuple , __magic_name__ :T ): '''simple docstring''...
347
1
import math def __A ( __lowerCamelCase ) -> bool: if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are ...
347
import shutil import tempfile import unittest import numpy as np import pytest from transformers.testing_utils import require_vision from transformers.utils import is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, BertTokenizer, ...
347
1
from __future__ import annotations def __A ( __lowerCamelCase ) -> list[int]: a = 2 a = [] while i * i <= n: if n % i: i += 1 else: n //= i factors.append(__lowerCamelCase ) if n > 1: factors.append(__lowerCamelCase ) retu...
347
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices __UpperCamelCase : Optional[Any] = logging.get_logger(__name__) __UpperCamelCase : int = { "shi-labs/n...
347
1
import requests def __A ( __lowerCamelCase , __lowerCamelCase ) -> None: a = {"""Content-Type""": """application/json"""} a = requests.post(__lowerCamelCase , json={"""text""": message_body} , headers=__lowerCamelCase ...
347
import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_transforme...
347
1
import unittest from transformers import is_torch_available from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow if is_torch_available(): import torch from transformers import XLMRobertaModel @require_sentencepiece @require_tokenizers @req...
347
import glob import os import random from string import ascii_lowercase, digits import cva import numpy as np # Parrameters __UpperCamelCase : Union[str, Any] = (720, 1_280) # Height, Width __UpperCamelCase : Any = (0.4, 0.6) # if height or width lower than this scale, drop it. __Upp...
347
1
import warnings from typing import List, Optional, Union from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import BatchEncoding, PaddingStrategy, PreTokenizedInput, TextInput, TruncationStrategy from ...utils import TensorType class __lowerCAmelCase ( __magic_na...
347
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __UpperCamelCase : Optional[Any] = { "configuration_mobilenet_v2": [ "MOBILENET_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileNetV2Config"...
347
1
__UpperCamelCase : Union[str, Any] = { 0: "0", 1: "1", 2: "2", 3: "3", 4: "4", 5: "5", 6: "6", 7: "7", 8: "8", 9: "9", 10: "a", 11: "b", 12: "c", 13: "d", 14: "e", 15: "f", } def __A ( __lowerCamelCase )...
347
def __A ( __lowerCamelCase ) -> bool: if num < 0: return False a = num a = 0 while num > 0: a = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod...
347
1
import argparse import torch from transformers import ( EncodecConfig, EncodecFeatureExtractor, EncodecModel, logging, ) # checkpoints downloaded from: # https://dl.fbaipublicfiles.com/encodec/v0/encodec_24khz-d7cc33bc.th # https://huggingface.co/facebook/musicgen-small/resol...
347
import json import os import shutil import tempfile import unittest from transformers import BatchEncoding, CanineTokenizer from transformers.testing_utils import require_tokenizers, require_torch from transformers.tokenization_utils import AddedToken from transformers.utils import cached_property ...
347
1
import argparse import json from pathlib import Path import torch import torchaudio from datasets import load_dataset from huggingface_hub import hf_hub_download from transformers import ASTConfig, ASTFeatureExtractor, ASTForAudioClassification from transformers.utils import logging logging.s...
347
def __A ( __lowerCamelCase ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
347
1
from __future__ import annotations from typing import Generic, TypeVar __UpperCamelCase : Union[str, Any] = TypeVar("T") class __lowerCAmelCase ( Generic[T] ): def __init__( self :Tuple , __magic_name__ :T ): '''simple docstring''...
347
def __A ( __lowerCamelCase ) -> int: if not numbers: return 0 if not isinstance(__lowerCamelCase , (list, tuple) ) or not all( isinstance(__lowerCamelCase , __lowerCamelCase ) for number in numbers ): raise ValueError("""numbers must be an iterable of ...
347
1
import contextlib import csv import json import os import sqlitea import tarfile import textwrap import zipfile import pyarrow as pa import pyarrow.parquet as pq import pytest import datasets import datasets.config @pytest.fixture(scope="""session""" ) def __A ( ) -> Op...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, ) __UpperCamelCase : Optional[Any] = { "configuration_wav2vec2": ["WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP...
347
1
from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import BatchEncoding class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase__ = '''ClapFeatureExtractor''' UpperCamelCase__ = ('''RobertaTokenizer''', '''RobertaTokenizerFast''') ...
347
import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("""dataset_size""" , [None, 400 * 2**20, 600 * 2**20] ) @pytest.mark.parametrize("""input_in_memory_max_size""" , ["""default""", 0, 100 * 2**20, 900 * 2...
347
1
import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_transforme...
347
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __A ( __lowerCamelCase ) -> bool: a = int(number**0.5 ) return number == sq * sq def __A ( __lowerCamelCase , __lowerCamelCase , ...
347
1
from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging __UpperCamelCase : Any = logging.get_logger(__name__) __UpperCamelCase : Any = { "google/bigbird-roberta...
347
import unittest import numpy as np from transformers import RoFormerConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.numpy as...
347
1
import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("""dataset_size""" , [None, 400 * 2**20, 600 * 2**20] ) @pytest.mark.parametrize("""input_in_memory_max_size""" , ["""default""", 0, 100 * 2**20, 900 * 2...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_blenderbot": [ ...
347
1
from typing import Dict, List, Optional, Tuple, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channe...
347
import tempfile import torch from diffusers import IPNDMScheduler from .test_schedulers import SchedulerCommonTest class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase__ = (IPNDMScheduler,) UpperCamelCase__ = (('''num_inference_steps''', 50),) d...
347
1
import argparse from collections import defaultdict import yaml __UpperCamelCase : str = "docs/source/en/_toctree.yml" def __A ( __lowerCamelCase ) -> Dict: a = defaultdict(__lowerCamelCase ) for doc in model_doc: counts[doc["local"]] += 1 a ...
347
__UpperCamelCase : Dict = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def __A ( ) -> None: a = input("""Enter message: """ ) a = input("""Enter key [alphanumeric]: """ ) a = input("""Encrypt/Decrypt [e/d]: """ ) if mode.lower().startswit...
347
1
import random class __lowerCAmelCase : @staticmethod def lowerCamelCase__ ( __magic_name__ :str ): '''simple docstring''' a = [ord(__magic_name__ ) for i in text] a = [] ...
347
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from ...
347
1
import inspect import unittest import numpy as np from tests.test_modeling_common import floats_tensor from transformers import MaskaFormerConfig, is_torch_available, is_vision_available from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device ...
347
import io import itertools import json from dataclasses import dataclass from typing import Optional import pyarrow as pa import pyarrow.json as paj import datasets from datasets.table import table_cast from datasets.utils.file_utils import readline __UpperCamelCase : Any = datasets.ut...
347
1
import tempfile import unittest from pathlib import Path from shutil import copyfile from transformers import BatchEncoding, MarianTokenizer from transformers.testing_utils import get_tests_dir, require_sentencepiece, slow from transformers.utils import is_sentencepiece_available, is_tf_available, is_...
347
import warnings from typing import Dict, List, Optional, Tuple from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging __UpperCamelCase : Dict = logging.get_logger(__name__) class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase_...
347
1
import argparse import json import os import fairseq import torch from fairseq.data import Dictionary from transformers import ( WavaVecaConformerConfig, WavaVecaConformerForCTC, WavaVecaConformerForPreTraining, WavaVecaCTCTokenizer, WavaVecaFeatureExtractor, WavaVecaPr...
347
import copy import unittest from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...t...
347
1
from math import factorial def __A ( __lowerCamelCase , __lowerCamelCase , __lowerCamelCase ) -> float: if successes > trials: raise ValueError("""successes must be lower or equal to trials""" ) if trials < 0 or successes < 0: raise ValueError("""the fu...
347
from copy import deepcopy class __lowerCAmelCase : def __init__( self :Union[str, Any] , __magic_name__ :list[int] | None = None , __magic_name__ :int | None = None ): '''simple docstring''' if arr is None and size i...
347
1
import copy import fnmatch import json import os import pickle as pkl import shutil import sys import tarfile import tempfile from collections import OrderedDict from contextlib import contextmanager from functools import partial from hashlib import shaaaa from io import BytesIO from pathlib imp...
347
from __future__ import annotations from typing import Generic, TypeVar __UpperCamelCase : Union[str, Any] = TypeVar("T") class __lowerCAmelCase ( Generic[T] ): def __init__( self :Tuple , __magic_name__ :T ): '''simple docstring''...
347
1
import inspect import unittest from transformers import RegNetConfig, is_flax_available from transformers.testing_utils import require_flax, slow from transformers.utils import cached_property, is_vision_available from ...test_configuration_common import ConfigTester from ...test_modeling_flax_commo...
347
import shutil import tempfile import unittest import numpy as np import pytest from transformers.testing_utils import require_vision from transformers.utils import is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, BertTokenizer, ...
347
1
import numpy as np import torch from torch.utils.data import Dataset, IterableDataset from ..utils.generic import ModelOutput class __lowerCAmelCase ( __magic_name__ ): def __init__( self :str , __magic_name__ :Any , __magic_name__ :Union[str, Any] , ...
347
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices __UpperCamelCase : Optional[Any] = logging.get_logger(__name__) __UpperCamelCase : int = { "shi-labs/n...
347
1
import copy import inspect import unittest from transformers import AutoBackbone from transformers.configuration_utils import PretrainedConfig from transformers.testing_utils import require_timm, require_torch, torch_device from transformers.utils.import_utils import is_torch_available from ...test...
347
import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_transforme...
347
1
import os import re import warnings from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tokenization_ta import TaToken...
347
import glob import os import random from string import ascii_lowercase, digits import cva import numpy as np # Parrameters __UpperCamelCase : Union[str, Any] = (720, 1_280) # Height, Width __UpperCamelCase : Any = (0.4, 0.6) # if height or width lower than this scale, drop it. __Upp...
347
1
from ..utils import DummyObject, requires_backends class __lowerCAmelCase ( metaclass=__magic_name__ ): UpperCamelCase__ = ['''keras_nlp'''] def __init__( self :Union[str, Any] , *__magic_name__ :Union[str, Any] , **__magic_name__ :str ): ...
347
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __UpperCamelCase : Optional[Any] = { "configuration_mobilenet_v2": [ "MOBILENET_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileNetV2Config"...
347
1
import argparse import csv import logging import os import random import numpy as np import torch from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset from tqdm import tqdm, trange from transformers import ( CONFIG_NAME, WEIGHTS_NAME, AdamW, ...
347
def __A ( __lowerCamelCase ) -> bool: if num < 0: return False a = num a = 0 while num > 0: a = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod...
347
1
import platform from argparse import ArgumentParser import huggingface_hub from .. import __version__ as version from ..utils import is_accelerate_available, is_torch_available, is_transformers_available, is_xformers_available from . import BaseDiffusersCLICommand def __A ( __lowerC...
347
import json import os import shutil import tempfile import unittest from transformers import BatchEncoding, CanineTokenizer from transformers.testing_utils import require_tokenizers, require_torch from transformers.tokenization_utils import AddedToken from transformers.utils import cached_property ...
347
1
import unittest from transformers import is_torch_available from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device if is_torch_available(): from transformers import AutoModelForSeqaSeqLM, AutoTokenizer @require_torch @require_sentenc...
347
def __A ( __lowerCamelCase ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
347
1
import numpy as np import torch from torch.utils.data import Dataset from utils import logger class __lowerCAmelCase ( __magic_name__ ): def __init__( self :Tuple , __magic_name__ :Any , __magic_name__ :Optional[int] ): '''simpl...
347
def __A ( __lowerCamelCase ) -> int: if not numbers: return 0 if not isinstance(__lowerCamelCase , (list, tuple) ) or not all( isinstance(__lowerCamelCase , __lowerCamelCase ) for number in numbers ): raise ValueError("""numbers must be an iterable of ...
347
1
from .constants import ( MODEL_NAME, OPTIMIZER_NAME, RNG_STATE_NAME, SAFE_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SCALER_NAME, SCHEDULER_NAME, TORCH_LAUNCH_PARAMS, WEIGHTS_INDEX_NAME, WEIGHTS_NAME, ) from .dataclasses import ( BnbQuantizationConfig, ...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, ) __UpperCamelCase : Optional[Any] = { "configuration_wav2vec2": ["WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP...
347
1
import tempfile import unittest import numpy as np import transformers from transformers import GPTaTokenizer, GPTJConfig, is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax, tooslow from ...generation.test_flax_utils import FlaxGenerat...
347
import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("""dataset_size""" , [None, 400 * 2**20, 600 * 2**20] ) @pytest.mark.parametrize("""input_in_memory_max_size""" , ["""default""", 0, 100 * 2**20, 900 * 2...
347
1
import json import os import unittest from transformers import DebertaTokenizer, DebertaTokenizerFast from transformers.models.deberta.tokenization_deberta import VOCAB_FILES_NAMES from transformers.testing_utils import slow from ...test_tokenization_common import TokenizerTesterMixin class __l...
347
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __A ( __lowerCamelCase ) -> bool: a = int(number**0.5 ) return number == sq * sq def __A ( __lowerCamelCase , __lowerCamelCase , ...
347
1
import sys from collections import defaultdict class __lowerCAmelCase : def __init__( self :List[str] ): '''simple docstring''' a = [] def lowerCamelCase__ ( self :List[Any] , __magic_na...
347
import unittest import numpy as np from transformers import RoFormerConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.numpy as...
347
1
from __future__ import annotations from collections.abc import Callable __UpperCamelCase : List[Any] = list[list[float | int]] def __A ( __lowerCamelCase , __lowerCamelCase ) -> Matrix: a = len(__lowerCamelCase ) a = [[0 for _ i...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_blenderbot": [ ...
347
1
def __A ( __lowerCamelCase ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
347
import tempfile import torch from diffusers import IPNDMScheduler from .test_schedulers import SchedulerCommonTest class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase__ = (IPNDMScheduler,) UpperCamelCase__ = (('''num_inference_steps''', 50),) d...
347
1
import argparse from collections import defaultdict def __A ( __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase ) -> List[str]: a = f'{file}_{class_name}_{test_name}' done_test[_id...
347
__UpperCamelCase : Dict = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def __A ( ) -> None: a = input("""Enter message: """ ) a = input("""Enter key [alphanumeric]: """ ) a = input("""Encrypt/Decrypt [e/d]: """ ) if mode.lower().startswit...
347
1
import os import re import warnings from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer if TYPE_CHECKING: from ...tokenization_utils_base import TextInput from ...uti...
347
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from ...
347
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_swiftformer": [ "SWIFTFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "SwiftFormerConfig", ...
347
import io import itertools import json from dataclasses import dataclass from typing import Optional import pyarrow as pa import pyarrow.json as paj import datasets from datasets.table import table_cast from datasets.utils.file_utils import readline __UpperCamelCase : Any = datasets.ut...
347
1
from __future__ import annotations import queue class __lowerCAmelCase : def __init__( self :Union[str, Any] , __magic_name__ :Optional[Any] ): '''simple docstring''' a = data a = Non...
347
import warnings from typing import Dict, List, Optional, Tuple from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging __UpperCamelCase : Dict = logging.get_logger(__name__) class __lowerCAmelCase ( __magic_name__ ): UpperCamelCase_...
347
1
def __A ( __lowerCamelCase ) -> bool: if not isinstance(__lowerCamelCase , __lowerCamelCase ): raise ValueError("""Input series is not valid, valid series - [2, 4, 6]""" ) if len(__lowerCamelCase ) == 0: raise ValueError("""Input list must be a non empty list""" ) if...
347
import copy import unittest from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...t...
347
1
from collections import Counter from timeit import timeit def __A ( __lowerCamelCase = "" , ) -> bool: return sum(c % 2 for c in Counter(input_str.replace(""" """ , """""" ).lower() ).values() ) < 2 def __A ( __lowerCamelCase = "" ) ->...
347
from copy import deepcopy class __lowerCAmelCase : def __init__( self :Union[str, Any] , __magic_name__ :list[int] | None = None , __magic_name__ :int | None = None ): '''simple docstring''' if arr is None and size i...
347
1
import io import itertools import json from dataclasses import dataclass from typing import Optional import pyarrow as pa import pyarrow.json as paj import datasets from datasets.table import table_cast from datasets.utils.file_utils import readline __UpperCamelCase : Any = datasets.ut...
347
from __future__ import annotations from typing import Generic, TypeVar __UpperCamelCase : Union[str, Any] = TypeVar("T") class __lowerCAmelCase ( Generic[T] ): def __init__( self :Tuple , __magic_name__ :T ): '''simple docstring''...
347
1
from typing import Optional, Tuple, Union import flax import flax.linen as nn import jax import jax.numpy as jnp from flax.core.frozen_dict import FrozenDict from ..configuration_utils import ConfigMixin, flax_register_to_config from ..utils import BaseOutput from .embeddings_flax import FlaxTimes...
347
import shutil import tempfile import unittest import numpy as np import pytest from transformers.testing_utils import require_vision from transformers.utils import is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, BertTokenizer, ...
347
1
def __A ( __lowerCamelCase ) -> bool: a = (1 + 24 * n) ** 0.5 return ((1 + root) / 6) % 1 == 0 def __A ( __lowerCamelCase = 5000 ) -> int: a = [(i * (3 * i - 1)) // 2 for i in range(1 , __lowerCamelCase )] for i, pentagonal_...
347
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices __UpperCamelCase : Optional[Any] = logging.get_logger(__name__) __UpperCamelCase : int = { "shi-labs/n...
347
1
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available()...
347
import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_transforme...
347
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_blenderbot": [ ...
347
import glob import os import random from string import ascii_lowercase, digits import cva import numpy as np # Parrameters __UpperCamelCase : Union[str, Any] = (720, 1_280) # Height, Width __UpperCamelCase : Any = (0.4, 0.6) # if height or width lower than this scale, drop it. __Upp...
347
1
import argparse from diffusers.pipelines.stable_diffusion.convert_from_ckpt import download_controlnet_from_original_ckpt if __name__ == "__main__": __UpperCamelCase : Tuple = argparse.ArgumentParser() parser.add_argument( "--checkpoint_path", default=None, type=str, required=True...
347
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available __UpperCamelCase : Optional[Any] = { "configuration_mobilenet_v2": [ "MOBILENET_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileNetV2Config"...
347
1
from collections.abc import Callable import numpy as np def __A ( __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase ) -> np.array: a = int(np.ceil((x_end - xa) / step_size ) ) ...
347
def __A ( __lowerCamelCase ) -> bool: if num < 0: return False a = num a = 0 while num > 0: a = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod...
347
1
from collections.abc import Iterable from typing import Any class __lowerCAmelCase : def __init__( self :List[Any] , __magic_name__ :int | None = None ): '''simple docstring''' a = value a = ...
347
import json import os import shutil import tempfile import unittest from transformers import BatchEncoding, CanineTokenizer from transformers.testing_utils import require_tokenizers, require_torch from transformers.tokenization_utils import AddedToken from transformers.utils import cached_property ...
347
1
import numpy as np from numpy import ndarray from scipy.optimize import Bounds, LinearConstraint, minimize def __A ( __lowerCamelCase ) -> float: return np.dot(__lowerCamelCase , __lowerCamelCase ) class __lowerCAmelCase : def __init__( self :...
347
def __A ( __lowerCamelCase ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
347
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : List[str] = { "configuration_roformer": ["ROFORMER_PRETRA...
347
def __A ( __lowerCamelCase ) -> int: if not numbers: return 0 if not isinstance(__lowerCamelCase , (list, tuple) ) or not all( isinstance(__lowerCamelCase , __lowerCamelCase ) for number in numbers ): raise ValueError("""numbers must be an iterable of ...
347
1
def __A ( __lowerCamelCase = 5000_0000 ) -> int: a = set() a = int((limit - 24) ** (1 / 2) ) a = set(range(3 , prime_square_limit + 1 , 2 ) ) primes.add(2 ) for p in range(3 , prime_square_limit + 1 ...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, ) __UpperCamelCase : Optional[Any] = { "configuration_wav2vec2": ["WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP...
347
1
import gc import random import unittest import numpy as np import torch from PIL import Image from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, EulerAncestralDiscreteScheduler, LMSDiscreteScheduler, P...
347
import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("""dataset_size""" , [None, 400 * 2**20, 600 * 2**20] ) @pytest.mark.parametrize("""input_in_memory_max_size""" , ["""default""", 0, 100 * 2**20, 900 * 2...
347
1
from __future__ import annotations import numpy as np from numpy import floataa from numpy.typing import NDArray def __A ( __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , __lowerCamelCase , ) -> list[float]: a , a =...
347
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __A ( __lowerCamelCase ) -> bool: a = int(number**0.5 ) return number == sq * sq def __A ( __lowerCamelCase , __lowerCamelCase , ...
347
1
import unittest import numpy as np import torch from torch import nn from transformers import ( CLIPImageProcessor, CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer, CLIPVisionConfig, CLIPVisionModelWithProjection, ) from diffusers import KandinskyVaaPriorPipe...
347
import unittest import numpy as np from transformers import RoFormerConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.numpy as...
347
1
import argparse import json from pathlib import Path import requests import timm import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import DeiTImageProcessor, ViTConfig, ViTForImageClassification, ViTImageProcessor, ViTModel from transformers.utils im...
347
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) __UpperCamelCase : Optional[int] = { "configuration_blenderbot": [ ...
347
1