content stringlengths 5 1.05M |
|---|
from typing import Optional, Any
import torch
from transformers import AutoModel
from flambe.nn import Module
class PretrainedTransformerEmbedder(Module):
"""Embedder intergation of the transformers library.
Instantiate this object using any alias available in the
`transformers` library. More informati... |
import torch
import torch.nn as nn
import torch.nn.functional as F
IMG_LEN = 1024
TXT_LEN = 300
N_TOPICS = 50
N_WORTHINESSES = 2
class MultitargetTridentModelBN(nn.Module):
def __init__(self, d=128, drop=0.25, worthiness_trident=False):
super().__init__()
self.worthiness_trident = worthiness_trid... |
# Copyright 2016 OSNEXUS Corporation
# See LICENSE file for details.
import requests
import json
from uuid import UUID
from requests.auth import HTTPBasicAuth
import socket
import time
import subprocess
import uuid
from twisted.python.filepath import FilePath
from flocker.node.agents.blockdevice import (
AlreadyA... |
# Copyright (c) 2022 McCoy Software Solutions
# Apache License 2.0
import random
chanceToLove = 33.3
chanceToFight = 33.3
chanceToBefriend = 33.3
decision = random.randint(0,100)
print("Chance To Love: %d" % chanceToLove)
print("Chance To Fight: %d" % chanceToFight)
print("Chance To Befriend: %d" % chanceToBefrien... |
print('File IO')
import os
test_file=open("test.txt","wb")
print(test_file.mode)
print(test_file.name)
test_file.write(bytes("Write me to the file \n",'UTF-8'))
test_file.close()
test_file= open("test.txt","r+")
text_in_file= test_file.read()
print(text_in_file)
test_file.close()
os.remove("test.txt")
os.remove("te... |
primeiroTermo = int(input('Primeiro termo: '))
razao = int(input('Razão da PA: '))
termo = primeiroTermo
cont = 0
total = 0
maisTermos = 10
while maisTermos != 0:
total += maisTermos
while cont < total:
print(f'{termo} -> ', end='')
termo += razao
cont += 1
print('PAUSA')
maisTer... |
# create fake data
from django.core.management.base import BaseCommand, CommandError
from faker import Faker
from dashboard.serializers import create_aclpermissions_for_role
fake = Faker()
from systemconfig.models import Country, Nationality, State, CityTown, Religion, Languages, Relationship, Occupation, \
Hobby... |
from flask import Flask
from flask import request
from flask import jsonify
from flask_cors import CORS
import os
import sys
import pandas as pd
import numpy as np
import joblib
import pickle
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.preprocess_data import remove_time_sta... |
"""
Unit tests.
See doc/devel.md for usage instructions.
"""
import json
import logging
import re
import sys
from os import environ
from traceback import format_exc
from unittest import TestCase
import psycopg2
import pg_jts
logging.basicConfig()
verbosity = sys.argv.count('-v')
if verbosity == 0:
level = loggi... |
import urllib
import urllib.request
import json
import os
class Version:
def __init__(self, versionString):
"""semantic version (major.minor.patch)"""
self.setString(versionString)
def __str__(self):
return f"{self.major}.{self.minor}.{self.patch}"
def setNumbers(self, major=None,... |
from django.http import HttpRequest
from rest_framework.views import APIView
from rest_framework.response import Response
from genie.services import NotebookJobServices, Connections, NotebookTemplateService
from rest_framework.decorators import api_view
class NotebookOperationsView(APIView):
"""
Class to get n... |
# -*- coding: utf-8 -*-
# Operational Libs
import collections
import functools
import logging
import os
# Dash Libs
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
from dash.dependencies import Input, Output, ALL
# Analytic Libs
import pandas as pd
... |
#!/usr/bin/env python3
""" simplest flask example
run from CLI with:
python flask_app.py
"""
from flask import Flask
APP = Flask(__name__)
@APP.route('/')
def hello_world():
""" say hello """
return "Hello, world!"
if __name__ == "__main__":
APP.run(debug=True, host='0.0.0.0')
|
import mmcv
import numpy as np
import pycocotools.mask as mask_util
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..builder import build_loss
from ..registry import HEADS
from ..utils import ConvModule
from mmdet.core import mask_target, force_fp32, auto_fp16
import matplotlib.pyplot as plt ... |
# Basic arcade program using objects
# Draw shapes on screen
# Imports
import arcade
# Constants
SCREEN_WIDTH = 600
SCREEN_HEIGHT = 650
SCREEN_TITLE = "Draw Shapes"
# Classes
class Welcome(arcade.Window):
"""Our main welcome window
"""
def __init__(self):
"""Initialize the window
"""
... |
from Operations.MiscUtil import namedtuple, ApplyToResult
from Operations.Shari_Operations.localize.PopConsts import AllAges, AllPops, AllFreqs
import re
class Scenario(object):
"""Represents one simulation scenario."""
@staticmethod
def fromString( s, mutAge = 10 ):
"""Return a scenario based on... |
import dateutil.parser
from datetime import datetime
from flask import render_template
from natural.date import duration
from logviewer2.log_utils.formatter import format_content_html
class LogEntry:
def __init__(self, data, evidence=False):
self.evidence = evidence
self.key = data["key"]
... |
from flaskr import create_app
#from flaskr.routes import socketio
application = create_app()
if __name__ == "__main__":
application.run(debug=True) |
from django.conf import settings
from mighty.functions import make_searchable
from company.backends.search import SearchBackend
from company.choices.fr import LEGALFORM, APE
from io import BytesIO
import base64, pycurl, json, re, logging, time, datetime
logger = logging.getLogger(__name__)
class SearchBackend(Searc... |
from __future__ import print_function
from bokeh.layouts import layout, column
from bokeh.models import Div
from bokeh.models.widgets import DateRangeSlider, Select, MultiSelect
from plots import *
class Page(object):
def __init__(self, data, sizing_mode='stretch_both'):
self.full_data = data
se... |
#!/usr/bin/env python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or... |
#!/usr/bin/env python -u
# Clinton Cario
# 02/05/2016
# Rewritten based on SSM_populator for memsql, no dependency on peewee and cleaner more efficient tables
# 02/09/2016
# Fixed bug where single quotes ruin syntax by incorporating a strip_invalid function that sanitizes syntax inputs. (featurizer)
# ... |
import networkx as nx
import os
from diagram import Diagram
from spf import spf
class lfa:
"""This class provides RFC5286 lfa calculations"""
def __init__(self, debug=0):
"""
Init the lfa class.
:param int debug: debug level, 0 is disabled.
:return None: __init__ shouldn't re... |
#!/usr/bin/env python3
"""
Functions for Dataset Caching
=============================
"""
import os
import pickle
def cached_to_file(filename):
"""Decorator to cache the output of a function to a file
Sometimes your workflow will contain functions that are executed once but
take a lot of time (typically... |
import subprocess
import pytest
class TestCli:
@pytest.fixture
def example_files(self, tmp_path):
tmp_dir = tmp_path / 'example'
tmp_dir.mkdir()
return tmp_dir / 'data.json', tmp_dir / 'schema.json'
def test_cli(self, example_files):
data_file, schema_file = example_files... |
# Problem: Third Maximum Number
#
# Given a non-empty array of integers, return the third maximum number in this array.
# If it does not exist, return the maximum number. The time complexity must be in O(n).
#
# Example 1:
# Input: [3, 2, 1]
# Output: 1
# Explanation: The third maximum is 1.
#
# Example 2:
# In... |
import os
import json
import tqdm
import dataset_maker
from argparse import ArgumentParser
# TODO
# 1. rename ids
# 2. upload make_dataset code
# 3. write readme.md file for constructing dataset
# 4. erase other stuff
def arg_parse():
parser = ArgumentParser()
parser.add_argument('--dataset', type=str, defa... |
# Print a sorted list of entities and their kind
# Todo: Don't print entites from the Ada Standard library
# Hint: See sample 3
import understand
import sys
def sortedEntities(db):
for ent in sorted(db.ents(),key= lambda ent: ent.name()):
print (ent.name()," [",ent.kindname(),"]",sep="",end="\n")
if... |
from .tsv_utils import baseline_df
import shutil
from sklearn.model_selection import StratifiedKFold
sex_dict = {'M': 0, 'F': 1}
if __name__ == "__main__":
import argparse
import pandas as pd
import os
from os import path
import numpy as np
parser = argparse.ArgumentParser(description="Argp... |
import pytest
from CreeDictionary.API.models import Wordform
from CreeDictionary.CreeDictionary.paradigm.filler import (
EmptyRowType,
TitleRow,
InflectionCell,
Layout,
)
from CreeDictionary.utils import ParadigmSize
from CreeDictionary.CreeDictionary.paradigm.generation import generate_paradigm
@pyt... |
import logging
import struct
import six
from voltron.view import *
from voltron.plugin import *
from voltron.api import *
log = logging.getLogger("view")
class MemoryView (TerminalView):
printable_filter = ''.join([(len(repr(chr(x))) == 3) and chr(x) or '.' for x in range(256)])
async = True
@classmeth... |
#!/usr/bin/env python3
# ------------------------------------------------------------------------------
# stress test waflz_server
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# Imports
# ---------------... |
# -*- coding: utf-8 -*-
"""Tests for :mod:`~astronat.utils.table.utils`."""
__all__ = [
"test_rename_columns",
"test_cast_columns",
]
##############################################################################
# IMPORTS
import astropy.units as u
from astropy.table import QTable
from astronat.utils.tabl... |
import idiokit
class Counter(object):
def __init__(self):
self.keys = dict()
def get(self, key):
return self.keys.get(key, ())
def contains(self, key, value=None):
self.inc(key, value)
return not self.dec(key, value)
def inc(self, key, value=None):
if key not... |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
monday = pd.read_csv('data/monday.csv', delimiter =';')
tuesday = pd.read_csv('data/tuesday.csv', delimiter =';' )
wednesday = pd.read_csv('data/wednesday.csv', delimiter =';')
thursday = pd.read_csv('data/thursday.csv', del... |
from vyked import Bus
from ..golem.golem import Golem
REGISTRY_HOST = '127.0.0.1'
REGISTRY_PORT = 4500
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
if __name__ == '__main__':
bus = Bus()
Article, tcp_service, http_service = Golem.generate("Article", [('username', 'id'), ('email', str)])
tcp_service.ronin =... |
import base64
import json
import os
import requests
from apmserver import ServerBaseTest, ElasticTest
from apmserver import TimeoutError, integration_test
from test_apikey_cmd import APIKeyHelper
from helper import wait_until
def headers(auth=None, content_type='application/x-ndjson'):
h = {'content-type': conte... |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-02-21 22:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('guides', '0002_add_languages'),
]
operations = [
migrations.AddField(
... |
from rayintegral_kernels.kernel import TrapezoidKernel, RandomKernel, RBF, M52, M32, M12, RQ
import tensorflow as tf
from rayintegral_kernels import float_type
import numpy as np
import pylab as plt
def main():
"""
D(R) = <(phi(r) - phi(r+R))^2>
= <K^2(TEC(r) - TEC(r + R))^2>
= K^2<DTEC(r, -R... |
"""
graph.py
An ad-hoc implementation of a graph. NetworkX was inneficient for what I was
trying to do, matching many small graphs agains a small set of target graphs.
Graph bags are a set of graphs, which test for containment quickly.
>>> g1 = Graph() ... |
from django.apps import AppConfig
class JacApiConfig(AppConfig):
name = "jaseci_serv.jac_api"
|
from django.urls import include, path
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('home.urls')),
path('calendar/', include('events.urls')),
path('resources/', include('resources.urls')),
path('recruiters/... |
# Copyright (c) 2010 Doug Hellmann. All rights reserved.
#
"""Factorial
"""
# end_pymotw_header
import math
for i in [0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6]:
try:
print("{:2.1f} {:6.2f}".format(i, math.gamma(i)))
except ValueError as err:
print("Error computing gamma({}): {}".format(i, err))
|
import threading
import time
import datetime
from config import Page_URL
from config import ANALYSER
from spider import Spider
def go():
spider = Spider(Page_URL, ANALYSER)
spider.go()
print(str(datetime.datetime.now())+'------------------------------------------------')
time.sleep(60)
timer ... |
'''
https://leetcode.com/problems/generate-parentheses/
Given n pairs of parentheses, write a function to generate all combinations of well-formed parentheses.
'''
'''
Accepted
'''
class Solution:
def generateParenthesisHelper(self, stack_open, stack_closed, result, solutions):
if len(stack_op... |
import numpy as np
from pyglet.gl import *
from fos import Actor
from fos.data import get_sphere
class SphereCloud(Actor):
def __init__(self, positions,
radii = None,
colors = None,
force_centering = False,
affine = None,
... |
"""
File : MSRuleCleanerWflow.py
Description: Provides a document Template for the MSRuleCleaner MicroServices
"""
# futures
from __future__ import division, print_function
from copy import deepcopy
class MSRuleCleanerWflow(dict):
"""
A minimal workflow and transfer information representation to serve... |
from .atss import ATSS
from .base import BaseDetector
from .cascade_rcnn import CascadeRCNN
from .fast_rcnn import FastRCNN
from .faster_rcnn import FasterRCNN
from .fcos import FCOS
from .fovea import FOVEA
from .fsaf import FSAF
from .grid_rcnn import GridRCNN
from .htc import HybridTaskCascade
from .mask_r... |
# Copyright 2019 AUI, Inc. Washington DC, USA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable ... |
"""Library for implementing cascade (sequences) of different neural modules.
Authors
* Anonymous
"""
import torch
import inspect
import logging
import operator
import functools
from speechbrain.nnet.linear import Linear
from speechbrain.utils.callchains import lengths_arg_exists
logger = logging.getLogger(__name__)... |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICEN... |
from __future__ import division, print_function, absolute_import
__author__ = 'Alex Rogozhnikov'
import functools
import nose
def known_failure(test):
"""
Decorator to mark known failures in tests
"""
@functools.wraps(test)
def inner(*args, **kwargs):
try:
test(*args, **kwarg... |
# -*- coding: utf-8 -*-
"""
Template to generate the input files for the FEM code solids_ISO.
The script uses module meshio.py to read a GMSH mesh and produce
text files nodes.txt, eles.txt , mater.txt and loads.txt
@authors: Juan Gomez
Nicolas Guarin-Zapata
"""
from __future__ import division, print... |
def statistical_error(actual, predicted):
error = ((abs(predicted - actual))/actual)
return error |
from PyQt5.QtCore import QThread, QObject, pyqtSignal, pyqtSlot, QRunnable
import numpy as np
class WorkerSignals(QObject):
finished = pyqtSignal()
output = pyqtSignal(np.ndarray)
class GoL_Worker(QRunnable):
def __init__(self, input_array, height, width):
super(GoL_Worker, self).__init__()
... |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to... |
import os
import queue
import threading
from time import sleep
import numpy as np
import tensorflow as tf
from tensorflow.python.training.adam import AdamOptimizer
from env import MultiArmTorqueEnvironment
from models import autoencoder_seq
N_ITERATIONS = 10000
N_JOINTS = 2
SEQ_LEN = 16
BATCH_SIZE = 1024 * 16
MOTION... |
from .schema import *
import pyarrow
def read_type(doc):
t = doc[TYPE]
if PARAM in doc:
tp = doc[PARAM]
else:
tp = None
if t == 'null':
return pyarrow.null()
if t == 'bool':
return pyarrow.bool_()
if t == 'int8':
return pyarrow.int8()
if t == 'i... |
# Copyright (c) ZenML GmbH 2021. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applic... |
#!/usr/bin/python3
import sys, os
filename = sys.argv[1];
output = filename + ".c";
try:
compiler = sys.argv[2]
except:
compiler = "/usr/bin/gcc"
f = open(filename, "r");
content = f.read();
f.close();
del f;
if os.path.exists(output):
os.remove(output);
def fileappend(filename, content):
f = open... |
#from __future__ import print_function
import ast
import os
import sys
import codecs
import subprocess
from fnmatch import fnmatchcase
from distutils.util import convert_path
from setuptools import setup, find_packages
def find_version(*parts):
version_py = os.path.join(os.path.dirname(__file__), 'version.py')
... |
from pypy.module.micronumpy.interp_iter import ViewIterator
class TestIterDirect(object):
def test_C_viewiterator(self):
#Let's get started, simple iteration in C order with
#contiguous layout => strides[-1] is 1
start = 0
shape = [3, 5]
strides = [5, 1]
backstrides... |
"""
This is a place to create a python wrapper for the BASGRA fortran model in fortarn_BASGRA_NZ
Author: Matt Hanson
Created: 12/08/2020 9:32 AM
"""
import os
import ctypes as ct
import numpy as np
import pandas as pd
from subprocess import Popen
from copy import deepcopy
from input_output_keys import param_keys, o... |
from django.urls import (
path,
)
from .views import (
proxy_document,
proxy_pdf,
)
app_name = 'django_simple_file_handler'
urlpatterns = [
path(
'documents/<proxy_slug>',
proxy_document,
name='proxy_document',
),
path(
'pdf/<proxy_slug>',
proxy_pdf,... |
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# This modules allows for querying and inserting perf result data into the
# perf datastore. Currently it has very basic functionality supported with
# little error handling.
# TODO: Make this more robust, add better logging
#
import MySQ... |
import requests
from .config import SUPERADMIN, KEEHOST_URL, KEEHOST_APIKEY
def create_super_admin():
""" Create a base super user based on the configuration """
r = requests.post(KEEHOST_URL + '/accounts',
json=SUPERADMIN,
headers={'Authorization': KEEHOST_APIKEY... |
import unittest.mock
from typing import cast, Any, List, Dict
import pykube.exceptions
import pytest
from pykube import HTTPClient
from pykube.objects import NamespacedAPIObject
from pytest_mock import MockerFixture, MockFixture
from pytest_helm_charts.utils import wait_for_namespaced_objects_condition
MockCR = Name... |
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from functools import singledispatch
import warnings
from jax import config, nn, random, tree_util
import jax.numpy as jnp
try:
# jaxns changes the default precision to double precision
# so here we undo that action
use_x... |
# Compute x**y
def exp(x: int, y: int) -> int:
a: int = 0
def f(i: int) -> int:
nonlocal a
def geta() -> int:
return a
if i <= 0:
return geta()
else:
a = a * x
return f(i-1)
a = 1
return f(y)
# Input parameter
# n:int = 42
# # Run [0, n]
# i:int = 0
# # Crunch
# while i <= n:
# print(exp(2, ... |
# Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... |
class SolidCurveIntersection(object,IEnumerable[Curve],IEnumerable,IDisposable):
""" This class represents the results of a calculation of intersection between a solid volume and a curve. """
def Dispose(self):
""" Dispose(self: SolidCurveIntersection) """
pass
def GetCurveSegment(self,index):
"""
Get... |
from xml.etree import ElementTree as ET
class ShardParser:
"""
Parses an XML object from a string.
"""
def __init__(self, xml):
self.xml = xml
def parse(self):
"""
Parses the XML object.
"""
return ET.fromstring(self.xml)
def search_for_element(self,... |
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def freertos_repos():
http_archive(
name = "freertos",
build_file = Label("//third... |
from app.libraries.token_generator import TokenGenerator
from flask_jwt_extended.internal_utils import verify_token_type
from flask_jwt_extended.utils import get_jwt, get_jwt_identity
from flask_restful import Resource
from flask import request
from app.models.user import User
from app.response import response
from a... |
# coding=utf-8
from __future__ import unicode_literals
import re
import unittest
from ukpostcodeparser.parser import parse_uk_postcode
from faker import Faker
from faker.providers.address.de_AT import Provider as DeAtProvider
from faker.providers.address.de_DE import Provider as DeProvider
from faker.providers.addr... |
from .node import Node
from .problem import Problem
from .search import depth_limited_search
from .search import iterative_deepening_search
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import sys, os, re
import numpy as np
from collections import OrderedDict
import sys, os, re
import numpy as np
from collections import OrderedDict
import pickle
import glob
import datetime, time
import scipy.spatial.distance as distance
from typi... |
'''
Created on Jul 16, 2018
@author: yiyedang
'''
import turtle
from Stack import Stack
class Disk:
def __init__(self, color, shape, width, x, y, speed):
self.x = x
self.y = y
self.shape = shape
self.color = color
self.t = turtle.Turtle()
self.height = 20
se... |
from pyprint.ConsolePrinter import ConsolePrinter
from coala_utils.string_processing.StringConverter import StringConverter
def ask_question(question,
default=None,
printer=ConsolePrinter(),
typecast=str,
**kwargs):
"""
Asks the user a questi... |
from django.urls import path
from . import views
app_name = 'ask'
urlpatterns = [
path(r'continent/', views.continent, name='continent'),
path(r'continent_no_user/', views.continent_no_user, name='continent_no_user'),
path(r'region/', views.region, name='region'),
path(r'country/', views.country, na... |
from django.contrib import admin
from apps.applications.models import Application
@admin.register(Application)
class ApplicationAdmin(admin.ModelAdmin):
"""
Defines the admin model for the Application Model
"""
list_display = ("__str__", "id", "application_owner", "updated_at")
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your o... |
from os.path import expanduser
import toml
class Config:
def __init__(self):
self._path = '{}/.config/dtt/config.toml'.format(expanduser("~"))
self._toml_string = ""
try:
with open(self._path) as f:
self._toml_string = f.read()
except FileNotFoundError:
... |
# Algoritmo genético
from trabalho2.modules import calc_individuos, calc_populacao, calc_fitness, calc_fitness_filhos, calc_roletaSimplesProporcional, calc_pontoCorte, calc_mutacoes
from random import seed
from matplotlib import pyplot as plt
import numpy as np
len_pop = 100
num_individuos = 3000000
print('Tamanho da ... |
import pathlib
from typing import Sequence
import pandas as pd
from visions.relations import IdentityRelation, TypeRelation
from visions.types.type import VisionsBaseType
from visions.utils.series_utils import nullable_series_contains
def _get_relations(cls) -> Sequence[TypeRelation]:
from visions.types import ... |
from etl.workflow.readers.ontolia_reader import read_ontolia_file
from tests.etl.workflow.readers.ontolia.expected_outputs import expected_raw_ontolia_output
from tests.util import convert_to_dataframe, assert_df_are_equal_ignore_id
def test_read_ontolia_file(spark_session):
ontolia_data_df = read_ontolia_file(sp... |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law o... |
from flask import Flask, Response, request
import os
import json
import logging
import pika
from entity_json import entities_to_json
import xmltodict
app = Flask(__name__)
logger = logging.getLogger('service')
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
config = json.loads(os.environ["CONFIG"])
... |
import numpy as np
from pymul.layers.layer import Layer
class NeuronLayer(Layer):
def __init__(self, input_size, output_size):
self.weights = np.random.rand(input_size, output_size) * 2 - 1
self.biases = np.random.rand(1, output_size) * 2 - 1
def forward_propagate(self, inputs):
self.... |
'''
get_transform(args, eval_stage)
get_dataset(args, transform, eval_stage)
In pretraining stage, eval_stage set to 'none'
'''
from metric.stat_metric import StatMetric
from dataloader import get_transform
from dataloader import get_dataset
from ckpt import get_model_ckpt, save_ckpt
from model import get_model
from lo... |
""" Provides a solution (`solve`) to the EMST problem. """
from .edist import edist
from operator import itemgetter
# Euclidean Minimum Spanning Tree (MST) algorithm
#
# input: a list of n Point objects
#
# output: a list of (p, q) tuples, where p and q are each input Point
# objects, and (p, q) should be connected i... |
import os
import pickle
import time
import torchvision.utils as vutils
from torch import optim
from torch.utils.data import DataLoader
from zo.models import Discriminator, Generator, device
from zo.zo_opt import GradientEstimate_dicrs, GradientEstimate, zoVIA, zoESVIA, zoscESVIA
from zo.log_likelihood import log_like... |
import pytest
import numpy as np
import torch
import network
import flow
import variational
import time
import itertools
from torch import nn
torch.manual_seed(2)
use_gpu = True
device = torch.device('cuda:0' if use_gpu else 'cpu')
@pytest.fixture(params=[4])
def L(request):
return request.param
def conv2d... |
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
try:
import numpy as np
except:
exit()
import random
rnd = random.Random()
rnd.seed(128)
from surrogate.benchmarks import movingpeaks
sc = movingpeaks.SCENARIO_1
sc["uniform_height"] = 0
sc["uniform_width"] = ... |
from django.contrib import admin
from ticketing.models import *
class BrownPaperSettingsAdmin(admin.ModelAdmin):
list_display = ('developer_token', 'client_username', 'last_poll_time')
class PayPalSettingsAdmin(admin.ModelAdmin):
list_display = ('business_email', )
class TransactionAdmin(admin.ModelAdmin)... |
"""
Challenge 2: Create a Multi Operation Transaction
"""
from stellar_sdk import Server, Keypair, TransactionBuilder, Network, FeeBumpTransaction
import requests
# 1. Load Keys
server = Server("https://horizon-testnet.stellar.org")
#stellar_quest_keypair = Keypair.from_secret("Shhhhhhh")
stellar_quest_keypair = Keypa... |
# -*- coding: utf-8 -*-
from .setup import *
from scipy.ndimage import gaussian_filter1d
from scipy.sparse import block_diag, identity, bmat, diags, spdiags
from scipy.sparse.linalg import gmres, spsolve
from lmfit import minimize, Parameters, report_fit # for pp vs b1
#from scikits.umfpack import spsolve
#from... |
#!/usr/bin/python
import lutin.debug as debug
import lutin.tools as tools
def get_type():
return "BINARY"
def get_sub_type():
return "TEST"
def get_desc():
return "test chunkware"
def get_licence():
return "MPL-2"
def get_compagny_type():
return "com"
def get_compagny_name():
return "atria-soft"
def get_m... |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isValidBST(self, root: Optional[TreeNode]) -> bool:
ary = []
self.traverse(root, ary)
... |
from requests.auth import AuthBase
class TokenAuthenticator(AuthBase):
"""Token bases authenticator
This authenticator will add the token in the Authorization header of the
request
"""
def __init__(self, token, authentication_type=None):
"""Create a new TokenAuthenticator object
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.