code
stringlengths
86
54.5k
code_codestyle
int64
0
371
style_context
stringlengths
87
49.2k
style_context_codestyle
int64
0
349
label
int64
0
1
import pytest import requests from datasets.utils.file_utils import http_head from .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline @pytest.mark.integration def lowerCamelCase__ (): with offline(OfflineSimulationMode.CONNECTION_TIMES_OUT): with pytest.raises(_...
361
from __future__ import annotations import unittest from transformers import is_tf_available from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow if is_tf_available(): import numpy as np import tensorflow as tf from transformers import TFCamembertMode...
327
0
def lowerCamelCase__ (_UpperCAmelCase , _UpperCAmelCase): return (pointa[0] - pointa[0]) ** 2 + (pointa[1] - pointa[1]) ** 2 def lowerCamelCase__ (_UpperCAmelCase , _UpperCAmelCase=0): return sorted(_UpperCAmelCase , key=lambda _UpperCAmelCase: x[column]) de...
362
from scipy.stats import pearsonr import datasets a_ : Optional[int] = '\nPearson correlation coefficient and p-value for testing non-correlation.\nThe Pearson correlation coefficient measures the linear relationship between two datasets. The calculation of the p-value relies on the assumption t...
327
0
# Lint as: python3 import dataclasses import re from dataclasses import dataclass from functools import total_ordering from typing import Optional, Union a_ : Tuple = re.compile(R'^(?P<major>\d+)' R'\.(?P<minor>\d+)' R'\.(?P<patch>\d+)$') @total_ordering @dataclass class _snake_case :...
363
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_availa...
327
0
import argparse import math import os from copy import deepcopy import torch from audio_diffusion.models import DiffusionAttnUnetaD from diffusion import sampling from torch import nn from diffusers import DanceDiffusionPipeline, IPNDMScheduler, UNetaDModel a_ : Tuple = { 'gwf-440k': { ...
364
import os import tempfile import unittest import numpy as np from diffusers.utils import is_flax_available from diffusers.utils.testing_utils import require_flax, slow if is_flax_available(): import jax import jax.numpy as jnp from flax.jax_utils import replicate from flax.training.common_uti...
327
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available a_ : List[str] = { 'configuration_time_series_transformer': [ 'TIME_SERIES_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'TimeSeriesTransformerConfig', ], ...
365
import absl # noqa: F401 # Here to have a nice missing dependency error message early on import nltk # noqa: F401 # Here to have a nice missing dependency error message early on import numpy # noqa: F401 # Here to have a nice missing dependency error message early on import six # noqa: F401 # Here to have a ...
327
0
from __future__ import annotations class _snake_case : def __init__( self , a) -> None: SCREAMING_SNAKE_CASE = data SCREAMING_SNAKE_CASE = None SCREAMING_SNAKE_CASE = None def lowerCamelCase__ (_UpperC...
366
import gc import importlib.metadata import tempfile import unittest from packaging import version from transformers import ( AutoModel, AutoModelForCausalLM, AutoModelForSeqaSeqLM, AutoModelForSequenceClassification, AutoTokenizer, BitsAndBytesConfig, pipeline, ) from transformers.t...
327
0
import inspect import unittest class _snake_case ( unittest.TestCase ): def SCREAMING_SNAKE_CASE__ ( self) -> int: try: import diffusers # noqa: F401 except ImportError: assert False def SCREAMING_SNAKE_CASE__ ( self) -...
367
# Copyright 2023 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by ap...
327
0
from __future__ import annotations from math import pi, sqrt def lowerCamelCase__ (_UpperCAmelCase , _UpperCAmelCase): if inductance <= 0: raise ValueError('Inductance cannot be 0 or negative') elif capacitance <= 0: raise ValueError('Capacitance cannot be 0 or negative')...
368
import ast import os import re import shutil import tempfile import unittest from unittest import mock import torch from accelerate.test_utils.examples import compare_against_test from accelerate.test_utils.testing import TempDirTestCase, require_trackers, run_command, slow from accelerate.utils import write_b...
327
0
import math def lowerCamelCase__ (_UpperCAmelCase , _UpperCAmelCase): return math.pow(_UpperCAmelCase , 2) - a def lowerCamelCase__ (_UpperCAmelCase): return 2 * x def lowerCamelCase__ (_UpperCAmelCase): SCREAMING_SNAKE_CASE = 2.0 while star...
369
from __future__ import annotations import inspect import unittest import numpy as np from transformers import ResNetConfig from transformers.testing_utils import require_tf, require_vision, slow from transformers.utils import cached_property, is_tf_available, is_vision_available from ...test_configuration_co...
327
0
def lowerCamelCase__ (_UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase): if len(_UpperCAmelCase) != len(_UpperCAmelCase): raise ValueError('The length of profit and weight must be same.') if max_weight <= 0: raise ValueError('max_weight must greater than zero.') if ...
370
from math import isqrt def lowerCamelCase__ (_UpperCAmelCase): SCREAMING_SNAKE_CASE = [True] * max_number for i in range(2 , isqrt(max_number - 1) + 1): if is_prime[i]: for j in range(i**2 , _UpperCAmelCase , _UpperCAmelCase): SCREAMI...
327
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_torch_available a_ : Optional[int] = { 'configuration_longt5': ['LONGT5_PRETRAINED_CONFIG_ARCHIVE_MAP', 'LongT5Config', 'LongT5OnnxConfig'], } try: if not is_torch_a...
371
import baseaa def lowerCamelCase__ (_UpperCAmelCase): return baseaa.aaaencode(string.encode('utf-8')) def lowerCamelCase__ (_UpperCAmelCase): return baseaa.aaadecode(_UpperCAmelCase).decode('utf-8') if __name__ == "__main__": import doctest doctest.testmod()
327
0
import argparse import json import logging import os import sys from unittest.mock import patch from transformers.testing_utils import TestCasePlus, get_gpu_count, slow _UpperCAmelCase = [ os.path.join(os.path.dirname(__file__), dirname) for dirname in [ 'text-classification', 'la...
328
from sklearn.metrics import fa_score, matthews_corrcoef import datasets from .record_evaluation import evaluate as evaluate_record _UpperCAmelCase = '\\n@article{wang2019superglue,\n title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},\n author={Wang, Alex and Pr...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ = 1000000 ) -> int: UpperCamelCase_ = set(range(3 , UpperCamelCase_ , 2 ) ) primes.add(2 ) for p in range(3 , UpperCamelCase_ , 2 ): if p not in primes: ...
328
from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'alibaba-damo/mgp-str-base': 'https://huggingface.co/alibaba-damo/mgp-str-base/resolve/main/config.json', } class _UpperCamelCase ( lo...
328
1
# Copyright 2023 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required b...
328
import logging import os import sys from dataclasses import dataclass, field from typing import Optional from seqaseq_trainer import SeqaSeqTrainer from seqaseq_training_args import SeqaSeqTrainingArguments import transformers from transformers import ( AutoConfig, AutoModelForSeqaSeqLM, AutoTokeniz...
328
1
import unittest from pathlib import Path from tempfile import TemporaryDirectory from transformers import AutoConfig, TFAutoModel, is_tensorflow_text_available, is_tf_available from transformers.models.bert.tokenization_bert import BertTokenizer from transformers.testing_utils import require_tensorflow_text, req...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> list: UpperCamelCase_ = int(UpperCamelCase_ ) if n_element < 1: UpperCamelCase_ = ValueError("a should be a positive number" ) raise my_error UpperCamelCase_ = ...
328
1
from collections import OrderedDict from typing import TYPE_CHECKING, Any, Mapping, Optional, Union from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxSeqaSeqConfigWithPast from ...utils import logging if TYPE_CHECKING: from ...feature_extraction_utils import FeatureEx...
328
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_I...
328
1
import doctest import glob import importlib import inspect import os import re from contextlib import contextmanager from functools import wraps from unittest.mock import patch import numpy as np import pytest from absl.testing import parameterized import datasets from datasets import load_metric from .utils i...
328
from __future__ import annotations import json import requests from bsa import BeautifulSoup from fake_useragent import UserAgent _UpperCAmelCase = {'UserAgent': UserAgent().random} def lowerCAmelCase_ ( UpperCamelCase_ ) -> dict: UpperCamelCase_ = script.conte...
328
1
import requests from bsa import BeautifulSoup def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> str: UpperCamelCase_ = BeautifulSoup(requests.get(UpperCamelCase_ , params=UpperCamelCase_ ).content , "html.parser" ) UpperCam...
328
import io import math from typing import Dict, Optional, Union import numpy as np from huggingface_hub import hf_hub_download from ...image_processing_utils import BaseImageProcessor, BatchFeature from ...image_transforms import convert_to_rgb, normalize, to_channel_dimension_format, to_pil_image from ...image_...
328
1
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) ...
328
from typing import Optional, Union import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models.modeling_utils import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __in...
328
1
from typing import List import jiwer import jiwer.transforms as tr from packaging import version import datasets from datasets.config import PY_VERSION if PY_VERSION < version.parse('3.8'): import importlib_metadata else: import importlib.metadata as importlib_metadata _UpperCAmelCase = '' ...
328
import argparse import json import logging import os import sys from unittest.mock import patch from transformers.testing_utils import TestCasePlus, get_gpu_count, slow _UpperCAmelCase = [ os.path.join(os.path.dirname(__file__), dirname) for dirname in [ 'text-classification', 'la...
328
1
import glob import os import random from string import ascii_lowercase, digits import cva import numpy as np # Parrameters _UpperCAmelCase = (7_2_0, 1_2_8_0) # Height, Width _UpperCAmelCase = (0.4, 0.6) # if height or width lower than this scale, drop it. _UpperCAmelCase = 1 / 1_0_0 _UpperCA...
328
from datetime import datetime import matplotlib.pyplot as plt import torch def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: for param in module.parameters(): UpperCamelCase_ = False def lowerCAmelCase_ ( ) -> Dict: UpperCamelCa...
328
1
import numpy # List of input, output pairs _UpperCAmelCase = ( ((5, 2, 3), 1_5), ((6, 5, 9), 2_5), ((1_1, 1_2, 1_3), 4_1), ((1, 1, 1), 8), ((1_1, 1_2, 1_3), 4_1), ) _UpperCAmelCase = (((5_1_5, 2_2, 1_3), 5_5_5), ((6_1, 3_5, 4_9), 1_5_0)) _UpperCAmelCase = [2, 4, 1, 5] _Upper...
328
import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging _UpperCAmelCase = '▁' _UpperCAmelCase = {'vocab_file': 'spiece.model'} _UpperCAmelCase = ...
328
1
from transformers import BertTokenizer, EncoderDecoderModel, SeqaSeqTrainer, SeqaSeqTrainingArguments from transformers.testing_utils import TestCasePlus, require_torch, slow from transformers.utils import is_datasets_available if is_datasets_available(): import datasets class _UpperCamelCase (...
328
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available _UpperCAmelCase = { 'configuration_tapas': ['TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP', 'TapasConfig'], 'tokenization_tapas': ['TapasTokenizer'], } try: if not i...
328
1
import argparse import json import numpy import torch from transformers.models.xlm.tokenization_xlm import VOCAB_FILES_NAMES from transformers.utils import CONFIG_NAME, WEIGHTS_NAME, logging logging.set_verbosity_info() def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> ...
328
import argparse import json from tqdm import tqdm def lowerCAmelCase_ ( ) -> Tuple: UpperCamelCase_ = argparse.ArgumentParser() # Required parameters parser.add_argument( "--src_path" , type=UpperCamelCase_ , default="biencoder-nq-de...
328
1
# Copyright (c) 2021-, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required b...
328
import requests from bsa import BeautifulSoup def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> str: UpperCamelCase_ = BeautifulSoup(requests.get(UpperCamelCase_ , params=UpperCamelCase_ ).content , "html.parser" ) UpperCam...
328
1
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __init__( self: List[str] , *, _SC...
328
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __init__( self: List[str] , *, _SC...
328
1
import glob import os import random from string import ascii_lowercase, digits import cva _UpperCAmelCase = '' _UpperCAmelCase = '' _UpperCAmelCase = '' _UpperCAmelCase = 1 # (0 is vertical, 1 is horizontal) def lowerCAmelCase_ ( ) -> None: UpperCamelCase_ ...
328
from functools import lru_cache def lowerCAmelCase_ ( UpperCamelCase_ ) -> set: UpperCamelCase_ = 2 UpperCamelCase_ = set() while i * i <= n: if n % i: i += 1 else: n //= i ...
328
1
import itertools import random import unittest import numpy as np from transformers import ASTFeatureExtractor from transformers.testing_utils import require_torch, require_torchaudio from transformers.utils.import_utils import is_torch_available from ...test_sequence_feature_extraction_common import SequenceF...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: UpperCamelCase_ = len(UpperCamelCase_ ) UpperCamelCase_ = len(matrix[0] ) UpperCamelCase_ = min(UpperCamelCase_ , UpperCamelCase_ ) for row in range(UpperCamelCase...
328
1
import argparse import logging import os from pathlib import Path from typing import Any, Dict import pytorch_lightning as pl from pytorch_lightning.utilities import rank_zero_info from transformers import ( AdamW, AutoConfig, AutoModel, AutoModelForPreTraining, AutoModelForQuestionAnswering...
328
import math def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> List[str]: if 0 not in (x, y): # We use the relation x^y = y*log10(x), where 10 is the base. return y * math.logaa(UpperCamelCase_ ) else: if x == 0...
328
1
from . import ( albert, align, altclip, audio_spectrogram_transformer, auto, autoformer, bark, bart, barthez, bartpho, beit, bert, bert_generation, bert_japanese, bertweet, big_bird, bigbird_pegasus, biogpt, bit, blenderbot, blend...
328
from typing import List, Optional, Tuple, Union import PIL import torch from torchvision import transforms from diffusers.pipeline_utils import DiffusionPipeline, ImagePipelineOutput from diffusers.schedulers import DDIMScheduler from diffusers.utils import randn_tensor _UpperCAmelCase = transforms.Comp...
328
1
import time import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch, torch_device from ..test_modeling_common import ids_tensor if is_torch_available(): import torch from transformers.generation import ( MaxLengthCriteria, M...
328
import re from filelock import FileLock try: import nltk _UpperCAmelCase = True except (ImportError, ModuleNotFoundError): _UpperCAmelCase = False if NLTK_AVAILABLE: with FileLock('.lock') as lock: nltk.download('punkt', quiet=True) def lowerCAmelCase_ ( U...
328
1
import os import sys import warnings from dataclasses import dataclass, field from io import BytesIO from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Union import numpy as np import pyarrow as pa from .. import config from ..download.streaming_download_manager import xopen from ..table imp...
328
import gc import unittest import numpy as np import torch from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DPMSolverMultistepScheduler, TransformeraDModel from diffusers.utils import is_xformers_available, load_numpy, slow, torch_device from diffusers.utils.testing_utils import enable_full_deter...
328
1
from typing import Any class _UpperCamelCase : def __init__( self: Optional[Any] , _SCREAMING_SNAKE_CASE: Any ) -> List[str]: """simple docstring""" UpperCamelCase_ = data UpperCamelCase_ = None class ...
328
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class _UpperCamelCase : def __init__( self: str ) -> Any: """simple docstring""" UpperCamelCase_ = "" UpperCamelCase_ = "...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ ) -> list: UpperCamelCase_ = len(UpperCamelCase_ ) for i in range(1 , UpperCamelCase_ ): UpperCamelCase_ = collection[i] UpperCamelCase_ = 0 UpperCame...
328
from sklearn.metrics import fa_score, matthews_corrcoef import datasets from .record_evaluation import evaluate as evaluate_record _UpperCAmelCase = '\\n@article{wang2019superglue,\n title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},\n author={Wang, Alex and Pr...
328
1
import re from filelock import FileLock try: import nltk _UpperCAmelCase = True except (ImportError, ModuleNotFoundError): _UpperCAmelCase = False if NLTK_AVAILABLE: with FileLock('.lock') as lock: nltk.download('punkt', quiet=True) def lowerCAmelCase_ ( U...
328
from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'alibaba-damo/mgp-str-base': 'https://huggingface.co/alibaba-damo/mgp-str-base/resolve/main/config.json', } class _UpperCamelCase ( lo...
328
1
import unittest import numpy as np import torch from .utils_summarization import build_mask, compute_token_type_ids, process_story, truncate_or_pad class _UpperCamelCase ( unittest.TestCase ): def lowercase ( self: Union[str, Any] ) -> Any: """simple doc...
328
import logging import os import sys from dataclasses import dataclass, field from typing import Optional from seqaseq_trainer import SeqaSeqTrainer from seqaseq_training_args import SeqaSeqTrainingArguments import transformers from transformers import ( AutoConfig, AutoModelForSeqaSeqLM, AutoTokeniz...
328
1
import json from typing import Iterator, List, Union from tokenizers import AddedToken, Regex, Tokenizer, decoders, normalizers, pre_tokenizers, trainers from tokenizers.implementations.base_tokenizer import BaseTokenizer from tokenizers.models import Unigram from tokenizers.processors import TemplateProcessing ...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> list: UpperCamelCase_ = int(UpperCamelCase_ ) if n_element < 1: UpperCamelCase_ = ValueError("a should be a positive number" ) raise my_error UpperCamelCase_ = ...
328
1
import unittest from transformers import AutoTokenizer, FalconConfig, is_torch_available from transformers.testing_utils import require_torch, slow, torch_device from ...generation.test_utils import GenerationTesterMixin from ...test_configuration_common import ConfigTester from ...test_modeling_common import M...
328
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_I...
328
1
import gc import random import tempfile import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer from diffusers import AutoencoderKL, DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler, UNetaDConditionModel from diffusers.pipelines.stable_diffusion_sa...
328
from __future__ import annotations import json import requests from bsa import BeautifulSoup from fake_useragent import UserAgent _UpperCAmelCase = {'UserAgent': UserAgent().random} def lowerCAmelCase_ ( UpperCamelCase_ ) -> dict: UpperCamelCase_ = script.conte...
328
1
import argparse import tensorflow as tf import torch from transformers import BertConfig, BertForMaskedLM from transformers.models.bert.modeling_bert import ( BertIntermediate, BertLayer, BertOutput, BertPooler, BertSelfAttention, BertSelfOutput, ) from transformers.utils import logging ...
328
import io import math from typing import Dict, Optional, Union import numpy as np from huggingface_hub import hf_hub_download from ...image_processing_utils import BaseImageProcessor, BatchFeature from ...image_transforms import convert_to_rgb, normalize, to_channel_dimension_format, to_pil_image from ...image_...
328
1
from __future__ import annotations import time from math import sqrt # 1 for manhattan, 0 for euclidean _UpperCAmelCase = 0 _UpperCAmelCase = [ [0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0], # 0 are free path whereas 1's are obstacles [0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0], ...
328
from typing import Optional, Union import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models.modeling_utils import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __in...
328
1
from .dependency_versions_table import deps from .utils.versions import require_version, require_version_core # define which module versions we always want to check at run time # (usually the ones defined in `install_requires` in setup.py) # # order specific notes: # - tqdm must be checked before tokenizers _U...
328
import argparse import json import logging import os import sys from unittest.mock import patch from transformers.testing_utils import TestCasePlus, get_gpu_count, slow _UpperCAmelCase = [ os.path.join(os.path.dirname(__file__), dirname) for dirname in [ 'text-classification', 'la...
328
1
import argparse import json import os import fairseq import torch from fairseq.data import Dictionary from transformers import ( WavaVecaConfig, WavaVecaCTCTokenizer, WavaVecaFeatureExtractor, WavaVecaForCTC, WavaVecaForPreTraining, WavaVecaProcessor, logging, ) from transformers.mod...
328
from datetime import datetime import matplotlib.pyplot as plt import torch def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: for param in module.parameters(): UpperCamelCase_ = False def lowerCAmelCase_ ( ) -> Dict: UpperCamelCa...
328
1
import gc import random import unittest import numpy as np import torch from transformers import CLIPImageProcessor, CLIPVisionConfig, CLIPVisionModel from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEImgaImgPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils imp...
328
import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging _UpperCAmelCase = '▁' _UpperCAmelCase = {'vocab_file': 'spiece.model'} _UpperCAmelCase = ...
328
1
from typing import Any class _UpperCamelCase : def __init__( self: Union[str, Any] , _SCREAMING_SNAKE_CASE: Any ) -> str: """simple docstring""" UpperCamelCase_ = data UpperCamelCase_ = None def _...
328
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available _UpperCAmelCase = { 'configuration_tapas': ['TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP', 'TapasConfig'], 'tokenization_tapas': ['TapasTokenizer'], } try: if not i...
328
1
_UpperCAmelCase = [4, 1, 7, 4, 2, 6, 4, 1, 5, 3, 7, 5] _UpperCAmelCase = [3, 7, 7, 4, 2, 6, 4, 1, 5, 3, 7, 5] _UpperCAmelCase = { 0: 'Sunday', 1: 'Monday', 2: 'Tuesday', 3: 'Wednesday', 4: 'Thursday', 5: 'Friday', 6: 'Saturday', } def lowerCAmelCase_ ( Uppe...
328
import argparse import json from tqdm import tqdm def lowerCAmelCase_ ( ) -> Tuple: UpperCamelCase_ = argparse.ArgumentParser() # Required parameters parser.add_argument( "--src_path" , type=UpperCamelCase_ , default="biencoder-nq-de...
328
1
from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'junnyu/roformer_chinese_small': 'https://huggingfac...
328
import requests from bsa import BeautifulSoup def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> str: UpperCamelCase_ = BeautifulSoup(requests.get(UpperCamelCase_ , params=UpperCamelCase_ ).content , "html.parser" ) UpperCam...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ ) -> bool: return credit_card_number.startswith(("34", "35", "37", "4", "5", "6") ) def lowerCAmelCase_ ( UpperCamelCase_ ) -> bool: UpperCamelCase_ = credit_card_number UpperCamelCase_ ...
328
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __init__( self: List[str] , *, _SC...
328
1
from typing import List, Union from ..utils import ( add_end_docstrings, is_tf_available, is_torch_available, is_vision_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, Pipeline if is_vision_available(): from PIL import Image from ..image_utils i...
328
from functools import lru_cache def lowerCAmelCase_ ( UpperCamelCase_ ) -> set: UpperCamelCase_ = 2 UpperCamelCase_ = set() while i * i <= n: if n % i: i += 1 else: n //= i ...
328
1
from typing import Dict, List from nltk.translate import gleu_score import datasets from datasets import MetricInfo _UpperCAmelCase = '\\n@misc{wu2016googles,\n title={Google\'s Neural Machine Translation System: Bridging the Gap between Human and Machine Translation},\n author={Yonghui Wu and...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: UpperCamelCase_ = len(UpperCamelCase_ ) UpperCamelCase_ = len(matrix[0] ) UpperCamelCase_ = min(UpperCamelCase_ , UpperCamelCase_ ) for row in range(UpperCamelCase...
328
1
from PIL import Image def lowerCAmelCase_ ( UpperCamelCase_ ) -> Image: UpperCamelCase_ , UpperCamelCase_ = image.size UpperCamelCase_ = 0 UpperCamelCase_ = image.load() for i in range(UpperCamelCase_ ): fo...
328
import math def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> List[str]: if 0 not in (x, y): # We use the relation x^y = y*log10(x), where 10 is the base. return y * math.logaa(UpperCamelCase_ ) else: if x == 0...
328
1
import argparse import torch from transformers import YosoConfig, YosoForMaskedLM def lowerCAmelCase_ ( UpperCamelCase_ ) -> Optional[Any]: if "model" in orig_key: UpperCamelCase_ = orig_key.replace("model." , "" ) if "norm1" in orig_ke...
328
from typing import List, Optional, Tuple, Union import PIL import torch from torchvision import transforms from diffusers.pipeline_utils import DiffusionPipeline, ImagePipelineOutput from diffusers.schedulers import DDIMScheduler from diffusers.utils import randn_tensor _UpperCAmelCase = transforms.Comp...
328
1
from __future__ import annotations import inspect import unittest import numpy as np from transformers import ResNetConfig from transformers.testing_utils import require_tf, require_vision, slow from transformers.utils import cached_property, is_tf_available, is_vision_available from ...test_configuration_com...
328
import re from filelock import FileLock try: import nltk _UpperCAmelCase = True except (ImportError, ModuleNotFoundError): _UpperCAmelCase = False if NLTK_AVAILABLE: with FileLock('.lock') as lock: nltk.download('punkt', quiet=True) def lowerCAmelCase_ ( U...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ ) -> None: UpperCamelCase_ = generate_pascal_triangle(UpperCamelCase_ ) for row_idx in range(UpperCamelCase_ ): # Print left spaces for _ in range(num_rows - row_idx - 1 ): ...
328
import gc import unittest import numpy as np import torch from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DPMSolverMultistepScheduler, TransformeraDModel from diffusers.utils import is_xformers_available, load_numpy, slow, torch_device from diffusers.utils.testing_utils import enable_full_deter...
328
1
import os import sys import unittest _UpperCAmelCase = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) sys.path.append(os.path.join(git_repo_path, 'utils')) import check_dummies # noqa: E402 from check_dummies import create_dummy_files, create_dummy_object, find_backend, rea...
328
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class _UpperCamelCase : def __init__( self: str ) -> Any: """simple docstring""" UpperCamelCase_ = "" UpperCamelCase_ = "...
328
1
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'facebook/convnextv2-tiny-1k-224': 'https://hug...
328
from sklearn.metrics import fa_score, matthews_corrcoef import datasets from .record_evaluation import evaluate as evaluate_record _UpperCAmelCase = '\\n@article{wang2019superglue,\n title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},\n author={Wang, Alex and Pr...
328
1
import string def lowerCAmelCase_ ( UpperCamelCase_ ) -> None: for key in range(len(string.ascii_uppercase ) ): UpperCamelCase_ = "" for symbol in message: if symbol in string.ascii_uppercase: ...
328
from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'alibaba-damo/mgp-str-base': 'https://huggingface.co/alibaba-damo/mgp-str-base/resolve/main/config.json', } class _UpperCamelCase ( lo...
328
1
import json import os import unittest from transformers import BatchEncoding, LEDTokenizer, LEDTokenizerFast from transformers.models.led.tokenization_led import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers, require_torch from transformers.utils import cached_property from ...test...
328
import logging import os import sys from dataclasses import dataclass, field from typing import Optional from seqaseq_trainer import SeqaSeqTrainer from seqaseq_training_args import SeqaSeqTrainingArguments import transformers from transformers import ( AutoConfig, AutoModelForSeqaSeqLM, AutoTokeniz...
328
1
from datetime import datetime import requests def lowerCAmelCase_ ( UpperCamelCase_ ) -> bytes: UpperCamelCase_ = "https://downloadgram.net/wp-json/wppress/video-downloader/video?url=" UpperCamelCase_ = requests.get(base_url + url ).json()[0]["url...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> list: UpperCamelCase_ = int(UpperCamelCase_ ) if n_element < 1: UpperCamelCase_ = ValueError("a should be a positive number" ) raise my_error UpperCamelCase_ = ...
328
1
import importlib import math import os from dataclasses import dataclass from enum import Enum from typing import Any, Dict, Optional, Tuple, Union import flax import jax.numpy as jnp from ..utils import BaseOutput _UpperCAmelCase = 'scheduler_config.json' class _UpperCamelCase ( lowerCAmel...
328
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_I...
328
1
from __future__ import annotations _UpperCAmelCase = '#' class _UpperCamelCase : def __init__( self: Dict ) -> None: """simple docstring""" UpperCamelCase_ = {} def lowercase ( self: Optional[int] , _SCREAMING_S...
328
from __future__ import annotations import json import requests from bsa import BeautifulSoup from fake_useragent import UserAgent _UpperCAmelCase = {'UserAgent': UserAgent().random} def lowerCAmelCase_ ( UpperCamelCase_ ) -> dict: UpperCamelCase_ = script.conte...
328
1
import numpy as np import torch from torch.nn import CrossEntropyLoss from transformers import AutoModelForCausalLM, AutoTokenizer import datasets from datasets import logging _UpperCAmelCase = '\\n\n' _UpperCAmelCase = '\nPerplexity (PPL) is one of the most common metrics for evaluating language ...
328
import io import math from typing import Dict, Optional, Union import numpy as np from huggingface_hub import hf_hub_download from ...image_processing_utils import BaseImageProcessor, BatchFeature from ...image_transforms import convert_to_rgb, normalize, to_channel_dimension_format, to_pil_image from ...image_...
328
1
from statistics import mean, stdev def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ = 3 ) -> list: UpperCamelCase_ = min(UpperCamelCase_ ) UpperCamelCase_ = max(UpperCamelCase_ ) # normalize data return [round((x - x...
328
from typing import Optional, Union import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models.modeling_utils import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __in...
328
1
from __future__ import annotations from collections.abc import Callable _UpperCAmelCase = list[list[float | int]] def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> Matrix: UpperCamelCase_ = len(UpperCamelCase_ ) UpperCamelCase_ ...
328
import argparse import json import logging import os import sys from unittest.mock import patch from transformers.testing_utils import TestCasePlus, get_gpu_count, slow _UpperCAmelCase = [ os.path.join(os.path.dirname(__file__), dirname) for dirname in [ 'text-classification', 'la...
328
1
from ..utils import DummyObject, requires_backends class _UpperCamelCase ( metaclass=lowerCAmelCase_ ): _UpperCamelCase : Optional[Any] = ['''flax''', '''transformers'''] def __init__( self: int , *_SCREAMING_SNAKE_CASE: Tuple , **_SCREAMING_SNAK...
328
from datetime import datetime import matplotlib.pyplot as plt import torch def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: for param in module.parameters(): UpperCamelCase_ = False def lowerCAmelCase_ ( ) -> Dict: UpperCamelCa...
328
1
from __future__ import annotations import string from itertools import cycle, product from pathlib import Path _UpperCAmelCase = ( string.ascii_letters + string.digits + string.punctuation + string.whitespace ) _UpperCAmelCase = [ord(letter) for letter in string.ascii_lowercase] _UpperCAmelCase ...
328
import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging _UpperCAmelCase = '▁' _UpperCAmelCase = {'vocab_file': 'spiece.model'} _UpperCAmelCase = ...
328
1
# flake8: noqa # Lint as: python3 _UpperCAmelCase = [ 'VerificationMode', 'Version', 'disable_progress_bar', 'enable_progress_bar', 'is_progress_bar_enabled', 'experimental', ] from .info_utils import VerificationMode from .logging import disable_progress_bar, enable_progress_bar, i...
328
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available _UpperCAmelCase = { 'configuration_tapas': ['TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP', 'TapasConfig'], 'tokenization_tapas': ['TapasTokenizer'], } try: if not i...
328
1
import argparse import os import re # All paths are set with the intent you should run this script from the root of the repo with the command # python utils/check_dummies.py _UpperCAmelCase = 'src/diffusers' # Matches is_xxx_available() _UpperCAmelCase = re.compile(r'is\_([a-z_]*)_available\(\)') #...
328
import argparse import json from tqdm import tqdm def lowerCAmelCase_ ( ) -> Tuple: UpperCamelCase_ = argparse.ArgumentParser() # Required parameters parser.add_argument( "--src_path" , type=UpperCamelCase_ , default="biencoder-nq-de...
328
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available _UpperCAmelCase = { 'configuration_tapas': ['TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP', 'TapasConfig'], 'tokenization_tapas': ['TapasTokenizer'], } try: if not i...
328
import requests from bsa import BeautifulSoup def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> str: UpperCamelCase_ = BeautifulSoup(requests.get(UpperCamelCase_ , params=UpperCamelCase_ ).content , "html.parser" ) UpperCam...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ ) -> Tuple: UpperCamelCase_ = [] UpperCamelCase_ = set({"(", "[", "{"} ) UpperCamelCase_ = set({")", "]", "}"} ) UpperCamelCase_ = {"{": "}", "[": "]", "(": ")"} for ...
328
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __init__( self: List[str] , *, _SC...
328
1
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = {'vocab_file': 'vocab....
328
from functools import lru_cache def lowerCAmelCase_ ( UpperCamelCase_ ) -> set: UpperCamelCase_ = 2 UpperCamelCase_ = set() while i * i <= n: if n % i: i += 1 else: n //= i ...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ = 4000000 ) -> int: UpperCamelCase_ = [] UpperCamelCase_ , UpperCamelCase_ = 0, 1 while b <= n: if b % 2 == 0: even_fibs.append(UpperCamelCase_ ) UpperCame...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: UpperCamelCase_ = len(UpperCamelCase_ ) UpperCamelCase_ = len(matrix[0] ) UpperCamelCase_ = min(UpperCamelCase_ , UpperCamelCase_ ) for row in range(UpperCamelCase...
328
1
import logging import re import pytorch_quantization import pytorch_quantization.nn as quant_nn import torch from pytorch_quantization import calib from pytorch_quantization.tensor_quant import QuantDescriptor _UpperCAmelCase = logging.getLogger(__name__) _UpperCAmelCase = 5_0 # max width of laye...
328
import math def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> List[str]: if 0 not in (x, y): # We use the relation x^y = y*log10(x), where 10 is the base. return y * math.logaa(UpperCamelCase_ ) else: if x == 0...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: UpperCamelCase_ = 0 while num > 0: digit_sum += num % 10 num //= 10 return digit_sum def lowerCAmelCase_ ( UpperCamelCase_ = 100 ) -> int: UpperCamelCa...
328
from typing import List, Optional, Tuple, Union import PIL import torch from torchvision import transforms from diffusers.pipeline_utils import DiffusionPipeline, ImagePipelineOutput from diffusers.schedulers import DDIMScheduler from diffusers.utils import randn_tensor _UpperCAmelCase = transforms.Comp...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ = "The quick brown fox jumps over the lazy dog" , ) -> bool: UpperCamelCase_ = set() # Replace all the whitespace in our sentence UpperCamelCase_ = input_str.replace(" " , "" ) for alpha in...
328
import re from filelock import FileLock try: import nltk _UpperCAmelCase = True except (ImportError, ModuleNotFoundError): _UpperCAmelCase = False if NLTK_AVAILABLE: with FileLock('.lock') as lock: nltk.download('punkt', quiet=True) def lowerCAmelCase_ ( U...
328
1
from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'alibaba-damo/mgp-str-base': 'https://huggingface.co/alibaba-damo/mgp-str-base/resolve/main/config.json', } class _UpperCamelCase ( lo...
328
import gc import unittest import numpy as np import torch from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DPMSolverMultistepScheduler, TransformeraDModel from diffusers.utils import is_xformers_available, load_numpy, slow, torch_device from diffusers.utils.testing_utils import enable_full_deter...
328
1
from collections.abc import Generator from math import sin def lowerCAmelCase_ ( UpperCamelCase_ ) -> bytes: if len(UpperCamelCase_ ) != 32: raise ValueError("Input must be of length 32" ) UpperCamelCase_ = b"" for i in [3, 2, 1, 0]:...
328
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class _UpperCamelCase : def __init__( self: str ) -> Any: """simple docstring""" UpperCamelCase_ = "" UpperCamelCase_ = "...
328
1
from __future__ import annotations from collections.abc import Callable def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ , UpperCamelCase_ , UpperCamelCase_ = 100 , ) -> float: UpperCamelCase_ = x_start UpperCamelCase_ = ...
328
from sklearn.metrics import fa_score, matthews_corrcoef import datasets from .record_evaluation import evaluate as evaluate_record _UpperCAmelCase = '\\n@article{wang2019superglue,\n title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},\n author={Wang, Alex and Pr...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: assert column_title.isupper() UpperCamelCase_ = 0 UpperCamelCase_ = len(UpperCamelCase_ ) - 1 UpperCamelCase_ = 0 while index >= 0: UpperCamelCase_ = ...
328
from ...configuration_utils import PretrainedConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'alibaba-damo/mgp-str-base': 'https://huggingface.co/alibaba-damo/mgp-str-base/resolve/main/config.json', } class _UpperCamelCase ( lo...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> int: return 1 if input_a == input_a else 0 def lowerCAmelCase_ ( ) -> None: assert xnor_gate(0 , 0 ) == 1 assert xnor_gate(0 , 1 ) == 0 assert xnor_gate...
328
import logging import os import sys from dataclasses import dataclass, field from typing import Optional from seqaseq_trainer import SeqaSeqTrainer from seqaseq_training_args import SeqaSeqTrainingArguments import transformers from transformers import ( AutoConfig, AutoModelForSeqaSeqLM, AutoTokeniz...
328
1
import tempfile import unittest from transformers import TaConfig, is_torch_available from transformers.testing_utils import ( require_sentencepiece, require_tokenizers, require_torch, slow, torch_device, ) from ...generation.test_utils import GenerationTesterMixin from ...test_modeling_comm...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> list: UpperCamelCase_ = int(UpperCamelCase_ ) if n_element < 1: UpperCamelCase_ = ValueError("a should be a positive number" ) raise my_error UpperCamelCase_ = ...
328
1
import logging import os import quant_trainer import torch from torch.utils.data import DataLoader from transformers import Trainer, is_torch_tpu_available from transformers.trainer_utils import PredictionOutput _UpperCAmelCase = logging.getLogger(__name__) if is_torch_tpu_available(check_device=False)...
328
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_I...
328
1
# Copyright 2021 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by app...
328
from __future__ import annotations import json import requests from bsa import BeautifulSoup from fake_useragent import UserAgent _UpperCAmelCase = {'UserAgent': UserAgent().random} def lowerCAmelCase_ ( UpperCamelCase_ ) -> dict: UpperCamelCase_ = script.conte...
328
1
import copy import tempfile import unittest from huggingface_hub import HfFolder, delete_repo from parameterized import parameterized from requests.exceptions import HTTPError from transformers import AutoConfig, GenerationConfig from transformers.testing_utils import TOKEN, USER, is_staging_test class ...
328
import io import math from typing import Dict, Optional, Union import numpy as np from huggingface_hub import hf_hub_download from ...image_processing_utils import BaseImageProcessor, BatchFeature from ...image_transforms import convert_to_rgb, normalize, to_channel_dimension_format, to_pil_image from ...image_...
328
1
import argparse import json import re from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileNetVaConfig, MobileNetVaForImageClassification, MobileNetVaImageProcessor, load_tf_weights_in_mobilenet_v...
328
from typing import Optional, Union import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models.modeling_utils import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __in...
328
1
from copy import deepcopy import torch import torch.nn.functional as F from torch.optim import AdamW from torch.optim.lr_scheduler import LambdaLR from torch.utils.data import DataLoader from accelerate.accelerator import Accelerator from accelerate.state import GradientState from accelerate.test_utils import R...
328
import argparse import json import logging import os import sys from unittest.mock import patch from transformers.testing_utils import TestCasePlus, get_gpu_count, slow _UpperCAmelCase = [ os.path.join(os.path.dirname(__file__), dirname) for dirname in [ 'text-classification', 'la...
328
1
import itertools from dataclasses import dataclass from typing import Any, Callable, Dict, List, Optional, Union import pandas as pd import pyarrow as pa import datasets import datasets.config from datasets.features.features import require_storage_cast from datasets.table import table_cast from datasets.utils.p...
328
from datetime import datetime import matplotlib.pyplot as plt import torch def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: for param in module.parameters(): UpperCamelCase_ = False def lowerCAmelCase_ ( ) -> Dict: UpperCamelCa...
328
1
import copy from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Audio, ClassLabel, Features from .base import TaskTemplate @dataclass(frozen=lowerCAmelCase_ ) class _UpperCamelCase ( lowerCAmelCase_ ): _UpperCamelCase : str ...
328
import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging _UpperCAmelCase = '▁' _UpperCAmelCase = {'vocab_file': 'spiece.model'} _UpperCAmelCase = ...
328
1
from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging _UpperCAmelCase = logging.get_logger(__name__) _UpperCAmelCase = { 'bert-base-uncased': 'https://huggingface.co/bert-ba...
328
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available _UpperCAmelCase = { 'configuration_tapas': ['TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP', 'TapasConfig'], 'tokenization_tapas': ['TapasTokenizer'], } try: if not i...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> str: if not (isinstance(UpperCamelCase_ , UpperCamelCase_ ) and isinstance(UpperCamelCase_ , UpperCamelCase_ )): raise ValueError("longest_common_substring() takes two strings for ...
328
import argparse import json from tqdm import tqdm def lowerCAmelCase_ ( ) -> Tuple: UpperCamelCase_ = argparse.ArgumentParser() # Required parameters parser.add_argument( "--src_path" , type=UpperCamelCase_ , default="biencoder-nq-de...
328
1
import json from typing import List, Optional, Tuple from tokenizers import pre_tokenizers, processors from ...tokenization_utils_base import AddedToken, BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_roberta import RobertaTokenizer ...
328
import requests from bsa import BeautifulSoup def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> str: UpperCamelCase_ = BeautifulSoup(requests.get(UpperCamelCase_ , params=UpperCamelCase_ ).content , "html.parser" ) UpperCam...
328
1
def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ , UpperCamelCase_ ) -> Optional[Any]: if n == 0: return 1 elif n % 2 == 1: return (binary_exponentiation(UpperCamelCase_ , n - 1 , UpperCamelCase_ ) * a) % mod ...
328
import torch from torch import nn from ...configuration_utils import ConfigMixin, register_to_config from ...models import ModelMixin class _UpperCamelCase ( lowerCAmelCase_ , lowerCAmelCase_ ): @register_to_config def __init__( self: List[str] , *, _SC...
328
1
import inspect from typing import Callable, List, Optional, Union import torch from transformers import ( CLIPImageProcessor, CLIPTextModel, CLIPTokenizer, WhisperForConditionalGeneration, WhisperProcessor, ) from diffusers import ( AutoencoderKL, DDIMScheduler, DiffusionPipeline...
328
from functools import lru_cache def lowerCAmelCase_ ( UpperCamelCase_ ) -> set: UpperCamelCase_ = 2 UpperCamelCase_ = set() while i * i <= n: if n % i: i += 1 else: n //= i ...
328
1
import math def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> List[str]: if 0 not in (x, y): # We use the relation x^y = y*log10(x), where 10 is the base. return y * math.logaa(UpperCamelCase_ ) else: if x == 0...
328
def lowerCAmelCase_ ( UpperCamelCase_ ) -> int: UpperCamelCase_ = len(UpperCamelCase_ ) UpperCamelCase_ = len(matrix[0] ) UpperCamelCase_ = min(UpperCamelCase_ , UpperCamelCase_ ) for row in range(UpperCamelCase...
328
1
import os from datetime import datetime as dt from github import Github _UpperCAmelCase = [ 'good first issue', 'feature request', 'wip', ] def lowerCAmelCase_ ( ) -> Union[str, Any]: UpperCamelCase_ = Github(os.environ["GITHUB_TOKEN"] ) Uppe...
328
import math def lowerCAmelCase_ ( UpperCamelCase_ , UpperCamelCase_ ) -> List[str]: if 0 not in (x, y): # We use the relation x^y = y*log10(x), where 10 is the base. return y * math.logaa(UpperCamelCase_ ) else: if x == 0...
328
1
from __future__ import annotations from collections import deque from collections.abc import Iterator from dataclasses import dataclass @dataclass class _UpperCamelCase : _UpperCamelCase : int _UpperCamelCase : int class _UpperCamelCase : def __init__( s...
328
from typing import List, Optional, Tuple, Union import PIL import torch from torchvision import transforms from diffusers.pipeline_utils import DiffusionPipeline, ImagePipelineOutput from diffusers.schedulers import DDIMScheduler from diffusers.utils import randn_tensor _UpperCAmelCase = transforms.Comp...
328
1
from copy import deepcopy class _UpperCamelCase : def __init__( self: List[str] , _SCREAMING_SNAKE_CASE: list[int] | None = None , _SCREAMING_SNAKE_CASE: int | None = None ) -> None: """simple docstring""" if arr is None and size is not ...
328
import re from filelock import FileLock try: import nltk _UpperCAmelCase = True except (ImportError, ModuleNotFoundError): _UpperCAmelCase = False if NLTK_AVAILABLE: with FileLock('.lock') as lock: nltk.download('punkt', quiet=True) def lowerCAmelCase_ ( U...
328
1
import argparse from transformers import ( TapasConfig, TapasForMaskedLM, TapasForQuestionAnswering, TapasForSequenceClassification, TapasModel, TapasTokenizer, load_tf_weights_in_tapas, ) from transformers.utils import logging logging.set_verbosity_info() def lowerCAmelCase_ (...
328
import gc import unittest import numpy as np import torch from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DPMSolverMultistepScheduler, TransformeraDModel from diffusers.utils import is_xformers_available, load_numpy, slow, torch_device from diffusers.utils.testing_utils import enable_full_deter...
328
1
import inspect import math import tempfile import unittest import numpy as np from transformers import ViTMAEConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_confi...
328
import copy import os import cva import numpy as np from matplotlib import pyplot as plt class _UpperCamelCase : def __init__( self: str ) -> Any: """simple docstring""" UpperCamelCase_ = "" UpperCamelCase_ = "...
328
1
import os import unittest from transformers.models.cpmant.tokenization_cpmant import VOCAB_FILES_NAMES, CpmAntTokenizer from transformers.testing_utils import require_jieba, tooslow from ...test_tokenization_common import TokenizerTesterMixin @require_jieba class _UpperCamelCase ( lowerCAmelCase_ ...
328
from sklearn.metrics import fa_score, matthews_corrcoef import datasets from .record_evaluation import evaluate as evaluate_record _UpperCAmelCase = '\\n@article{wang2019superglue,\n title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},\n author={Wang, Alex and Pr...
328
1