hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a63dd6ae87dac33dcdfc59cc58a963bf0e291080
| 3,817
|
py
|
Python
|
python/acc_sweep.py
|
tkoziara/parmec
|
fefe0586798cd65744334f9abeab183159bd3d7a
|
[
"MIT"
] | null | null | null |
python/acc_sweep.py
|
tkoziara/parmec
|
fefe0586798cd65744334f9abeab183159bd3d7a
|
[
"MIT"
] | 15
|
2017-06-09T12:05:27.000Z
|
2018-10-25T13:59:58.000Z
|
python/acc_sweep.py
|
parmes/parmec
|
fefe0586798cd65744334f9abeab183159bd3d7a
|
[
"MIT"
] | null | null | null |
# acceleration sine sweep signal generation
import matplotlib.pyplot as plt
from math import sin, cos, pi
# acc_sweep: generate constant magnitude acceleration sine sweep signal
# ---------------------------------------------------------------------
# step - signal time step
# stop - duration of the signal
# lofq - low frequency for the sweep
# hifq - high frequency for the sweep
# amag - acceleration magnitude
# acc_plot - path to acceleration signal plot (time-acc)
# vel_plot - path to velocity signal plot (time-velo)
# dsp_plot - path to displacement signal plot (time-disp)
# dsp_envelope - path to displacement signal envelop (frequency-disp)
# -------------------------------------------------------------------------------
# returned: (vt, vd, vv, va), where
# - vt is a list of time instants
# - vd is a list of displacement values
# - vv is a list of velocity values
# - va is a list of acceleration values, at those time instants
# -------------------------------------------------------------------------------
def acc_sweep (step, stop, lofq, hifq, amag, acc_plot = None, vel_plot = None, dsp_plot = None, dsp_envelope = None):
t = 0.0
v = 0.0
va = []
vv = []
vf = []
extend = 0.0
while t < stop+extend:
x = t + step/2. # mid-step time
a = amag * sin (2*pi*(lofq+(hifq-lofq)*x/stop)*x) # mid-step acceleration
v = v + a * step # mid-step integration of dv / dt = a into v
va.append (a)
vv.append (v)
vf.append (lofq + (hifq-lofq)*(t/stop))
if extend == 0.0 and len(vv) > 2 and vv[-1] < vv[-2]: extend = t
t += step
# find stabilized velocity level
# by avaraging the last 5 minima and maxima
imax = 0.0
vmax = 0.0
imin = 0.0
vmin = 0.0
i = len(vv)-2
while i > 0:
if vv[i-1] < vv[i] and vv[i] > vv[i+1]:
imax = imax + 1.0
vmax = vmax + vv[i]
if vv[i-1] > vv[i] and vv[i] < vv[i+1]:
imin = imin + 1.0
vmin = vmin + vv[i]
if imax == 5.0 and imin == 5.0: break
i = i - 1
vlevel = 0.1*(vmax+vmin)
# find when this level is crossed from the start
i = 0
while vv[i] < vlevel: i = i + 1
# trim histories to this moment
while i > 0:
va.pop(0)
vv.pop(0)
vf.pop(0)
i -= 1
# now produce displacement and time history
vt = []
vd = []
d = 0.0
t = 0.0
for v in vv:
vt.append (t)
vd.append (d)
t = t + step
d = d + v * step # integration of dd / dt = v
# displacement has positive drift => find tangens of the positive drift angle
i = len(vd)-1
while vd[i-1] > vd[i]: i -= 1 # first maximum
while vd[i-1] < vd[i]: i -= 1 # previous minimum
j = i
while vd[j-1] > vd[i]: j += 1 # previous maximum
# shift velocity down by the tangens of the drift angle
vshift = (vd[i]+vd[j]) / (vt[i]+vt[j])
for i in range (0, len(vv)): vv[i] -= vshift
# after velocity has been shifted down, produce displacement envelope
vd = []
d = 0.0
for v in vv:
d = d + v * step # integration of dd / dt = v
vd.append (d)
if acc_plot != None:
plt.clf ()
plt.plot (vt, va)
plt.xlim ((vt[0], vt[-1]))
plt.xlabel ('time $(s)$')
plt.ylabel ('acceleration $(m/s^2)$')
plt.savefig (acc_plot)
if vel_plot != None:
plt.clf ()
plt.plot (vt, vv)
plt.xlim ((vt[0], vt[-1]))
plt.xlabel ('time $(s)$')
plt.ylabel ('velocity $(m/s)$')
plt.savefig (vel_plot)
if dsp_plot != None:
plt.clf ()
plt.plot (vt, vd)
plt.xlim ((vt[0], vt[-1]))
plt.xlabel ('time $(s)$')
plt.ylabel ('displacement $(m)$')
plt.savefig (dsp_plot)
if dsp_envelope != None:
plt.clf ()
plt.plot (vf, vd)
plt.xlim ((vf[0], vf[-1]))
plt.xlabel ('frequency $(Hz)$')
plt.ylabel ('displacement $(m)$')
plt.savefig (dsp_envelope)
return (vt, vd, vv, va)
| 28.916667
| 117
| 0.550694
|
423ef8a3bf8541ae6cf186e2376a44c7a5f4370d
| 1,312
|
py
|
Python
|
kh_site/blog/migrations/0001_initial.py
|
nhuntwalker/kyeisha-site
|
ccccc4a277f354a5f64c38476885a499483f80fc
|
[
"MIT"
] | null | null | null |
kh_site/blog/migrations/0001_initial.py
|
nhuntwalker/kyeisha-site
|
ccccc4a277f354a5f64c38476885a499483f80fc
|
[
"MIT"
] | 7
|
2020-06-05T18:51:41.000Z
|
2022-03-11T23:28:14.000Z
|
kh_site/blog/migrations/0001_initial.py
|
nhuntwalker/kyeisha-site
|
ccccc4a277f354a5f64c38476885a499483f80fc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-19 09:27
from __future__ import unicode_literals
import blog.models
from django.db import migrations, models
import redactor.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=200)),
('slug', models.SlugField()),
('content', redactor.fields.RedactorField()),
('excerpt', redactor.fields.RedactorField(blank=True, null=True)),
('blog_photo', models.ImageField(blank=True, upload_to=blog.models._image_path)),
('date_created', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('date_published', models.DateTimeField()),
('status', models.CharField(choices=[('pb', 'Public'), ('prv', 'Private'), ('dr', 'Draft')], default='dr', max_length=10)),
('tags', models.CharField(blank=True, max_length=30)),
],
),
]
| 37.485714
| 139
| 0.59375
|
7014d5dcf0f54c8ce75d5e7899ae7fd0cb983b24
| 1,198
|
py
|
Python
|
boto3_type_annotations/boto3_type_annotations/machinelearning/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations/boto3_type_annotations/machinelearning/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations/boto3_type_annotations/machinelearning/paginator.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Dict
from botocore.paginate import Paginator
class DescribeBatchPredictions(Paginator):
def paginate(self, FilterVariable: str = None, EQ: str = None, GT: str = None, LT: str = None, GE: str = None, LE: str = None, NE: str = None, Prefix: str = None, SortOrder: str = None, PaginationConfig: Dict = None) -> Dict:
pass
class DescribeDataSources(Paginator):
def paginate(self, FilterVariable: str = None, EQ: str = None, GT: str = None, LT: str = None, GE: str = None, LE: str = None, NE: str = None, Prefix: str = None, SortOrder: str = None, PaginationConfig: Dict = None) -> Dict:
pass
class DescribeEvaluations(Paginator):
def paginate(self, FilterVariable: str = None, EQ: str = None, GT: str = None, LT: str = None, GE: str = None, LE: str = None, NE: str = None, Prefix: str = None, SortOrder: str = None, PaginationConfig: Dict = None) -> Dict:
pass
class DescribeMLModels(Paginator):
def paginate(self, FilterVariable: str = None, EQ: str = None, GT: str = None, LT: str = None, GE: str = None, LE: str = None, NE: str = None, Prefix: str = None, SortOrder: str = None, PaginationConfig: Dict = None) -> Dict:
pass
| 52.086957
| 229
| 0.661937
|
6f8f0bb4ef0d2e5e49466f611f019c593f10f35d
| 6,244
|
py
|
Python
|
notebooks/_build/jupyter_execute/content/analysis.py
|
LandryBulls/py-feat
|
764763270e4da5bc4614d7f61e48ddae5f855d8d
|
[
"MIT"
] | 5
|
2021-04-20T01:06:29.000Z
|
2021-12-09T06:26:14.000Z
|
notebooks/_build/jupyter_execute/content/analysis.py
|
LiQiang0307/py-feat
|
46c018bc469fdb7afd5e189b73714fbc263eb8b3
|
[
"MIT"
] | null | null | null |
notebooks/_build/jupyter_execute/content/analysis.py
|
LiQiang0307/py-feat
|
46c018bc469fdb7afd5e189b73714fbc263eb8b3
|
[
"MIT"
] | null | null | null |
# Preprocessing FEX data
## How to preprocess and analyze facial expression data with Feat.
*Written by Jin Hyun Cheong*
Here we will be using a sample dataset by David Watson on ["A Data-Driven Characterisation Of Natural Facial Expressions When Giving Good And Bad News"](https://journals.plos.org/ploscompbiol/article/peerReview?id=10.1371/journal.pcbi.1008335) by Watson & Johnston 2020. The full dataset is available on [OSF](https://osf.io/6tbwj/).
Let's start by installing Py-FEAT if you have not already done so or using this on Google Colab
!pip install -q py-feat
First, we download the necessary files & videos.
import subprocess
files_to_download = ["4c5mb", "n6rt3", "3gh8v", "twqxs", "nc7d9", "nrwcm", "2rk9c", "mxkzq", "c2na7", "wj7zy", "mxywn",
"6bn3g", "jkwsp", "54gtv", "c3hpm", "utdqj", "hpw4a", "94swe", "qte5y", "aykvu", "3d5ry"]
for fid in files_to_download:
subprocess.run(f"wget --content-disposition https://osf.io/{fid}/download".split())
Check that videos have been downloaded and the attributes file, `clip_attrs.csv) explaining
import os, glob
import numpy as np
import pandas as pd
import seaborn as sns
sns.set_context("talk")
clip_attrs = pd.read_csv("clip_attrs.csv")
videos = np.sort(glob.glob("*.mp4"))
print(videos)
Process each video using our detector.
from feat import Detector
detector = Detector(au_model = "rf", emotion_model = "resmasknet")
for video in videos:
detector.detect_video(video, outputFname = video.replace(".mp4", ".csv"))
from feat.utils import read_feat
import pandas as pd
for ix ,video in enumerate(videos):
outputF = video.replace(".mp4", ".csv")
if ix == 0:
fex = read_feat(outputF)
else:
fex = pd.concat([fex, read_feat(outputF)])
fex = fex.dropna()
# Load in conditions
clip_attrs = pd.read_csv("clip_attrs.csv")
clip_attrs = clip_attrs.assign(input = clip_attrs.clipN.apply(lambda x: str(x).zfill(3)+".mp4"),
condition = clip_attrs['class'].replace({"gn":"goodNews", "ists":"badNews"}))
input_class_map = dict(zip(clip_attrs.input, clip_attrs['condition']))
clip_attrs.head()
## Extract features
You can set the `sessions` attribute to provide a grouping of your experimental setup. This could be the name of each video if you want to extract features per video or it could be conditions to extract features per condition.
# Extract conditions between the two condtiiosn (gn: good news, ists: bad news)
conditions = dict(zip(clip_attrs.input, clip_attrs['condition']))
fex.sessions = fex.input().map(conditions)
average_au_intensity_per_video = fex.extract_mean()
display(average_au_intensity_per_video.head())
Or simply extract features per video
# Extract features per video
fex.sessions = fex.input()
average_au_intensity_per_video = fex.extract_mean()
display(average_au_intensity_per_video.head())
# Analyzing FEX data
## Simple t-test
You can use a simple t-test to test if the average activation of a certain AU is significantly higher than .5 (chance). The results suggests that AU10 (upper lip raiser), 12 (lip corner puller), and 14 (dimpler) is significantly activitated when providing good news.
average_au_intensity_per_video.sessions = average_au_intensity_per_video.index.map(input_class_map)
t, p = average_au_intensity_per_video[average_au_intensity_per_video.sessions=="goodNews"].aus().ttest_1samp(.5)
pd.DataFrame({"t": t, "p": p}, index= average_au_intensity_per_video.au_columns)
## Two sample independent t-test
You can also perform an independent two sample ttest between two sessions which in this case is goodNews vs badNews.
columns2compare = "mean_AU12"
sessions = ("goodNews", "badNews")
t, p = average_au_intensity_per_video.ttest_ind(col = columns2compare, sessions=sessions)
print(f"T-test between {sessions[0]} vs {sessions[1]}: t={t:.2g}, p={p:.3g}")
sns.barplot(x = average_au_intensity_per_video.sessions,
y = columns2compare,
data = average_au_intensity_per_video);
## Prediction
If you want to know what combination of features predic the good news or bad news conditions. To investigate this problem, we can train a Logistc Regression model using emotion labels to predict the conditions. Results suggest that detections of happy expressions predict the delivery of good news.
fex.sessions = fex.input().map(input_class_map)
from sklearn.linear_model import LogisticRegression
clf = fex.predict(X=fex.emotions(), y = fex.sessions, model = LogisticRegression, solver="liblinear")
print(f"score: {clf.score(fex.emotions(), fex.sessions):.3g}")
print(f"coefficients for predicting class: {clf.classes_[1]}")
display(pd.DataFrame(clf.coef_, columns = fex.emotions().columns))
## Regression
We can also run an fMRI style regression to predict the Action Unit activities from a contrast of conditions. This analysis can be conducted through the `regress` method. In this example, we identify the action units that are significantly more active in the good news versus the bad news conditions.
fex.sessions = fex.input().map(input_class_map).replace({"goodNews":.5, "badNews":-.5})
X = pd.DataFrame(fex.sessions)
X['intercept'] = 1
b, t, p, df, residuals = fex.regress(X = X, y = fex.aus())
print("Betas predicting good news estimated for each emotion.")
results = pd.concat([b.round(3).loc[[0]].rename(index={0:"betas"}),
t.round(3).loc[[0]].rename(index={0:"t-stats"}),
p.round(3).loc[[0]].rename(index={0:"p-values"})])
display(results)
## Intersubject (or intervideo) correlations
To compare the similarity of signals over time between subjects or videos, you can use the `isc` method. You can get a sense of how much two signals, such as a certain action unit activity, correlates over time.
In this example, we are calculating the ISC over videos. We want to check how similar AU01 activations are across videos so our session is set to the `input` which is the video name. Executing the `isc` method shows that the temporal profile of AU01 activations form two clusters between the goodNews and the badNews conditions.
fex.sessions = fex.input()
isc = fex.isc(col = "AU01")
sns.heatmap(isc.corr(), center=0, vmin=-1, vmax=1, cmap="RdBu_r");
| 49.165354
| 333
| 0.741031
|
41431e0ec29993b52dba2b51fe2cf6b5797ed6fe
| 1,056
|
py
|
Python
|
example/example_holdout.py
|
jimgoo/auto-sklearn
|
a263efb49f7b7f597963bc1e787105ea7615ea75
|
[
"BSD-3-Clause"
] | 1
|
2017-08-13T13:57:40.000Z
|
2017-08-13T13:57:40.000Z
|
example/example_holdout.py
|
jimgoo/auto-sklearn
|
a263efb49f7b7f597963bc1e787105ea7615ea75
|
[
"BSD-3-Clause"
] | null | null | null |
example/example_holdout.py
|
jimgoo/auto-sklearn
|
a263efb49f7b7f597963bc1e787105ea7615ea75
|
[
"BSD-3-Clause"
] | 1
|
2020-05-06T14:47:17.000Z
|
2020-05-06T14:47:17.000Z
|
import sklearn.model_selection
import sklearn.datasets
import sklearn.metrics
import autosklearn.classification
def main():
X, y = sklearn.datasets.load_digits(return_X_y=True)
X_train, X_test, y_train, y_test = \
sklearn.model_selection.train_test_split(X, y, random_state=1)
automl = autosklearn.classification.AutoSklearnClassifier(
time_left_for_this_task=120, per_run_time_limit=30,
tmp_folder='/tmp/autoslearn_holdout_example_tmp',
output_folder='/tmp/autosklearn_holdout_example_out',
disable_evaluator_output=False)
automl.fit(X_train, y_train, dataset_name='digits')
# Print the final ensemble constructed by auto-sklearn.
print(automl.show_models())
predictions = automl.predict(X_test)
# Print statistics about the auto-sklearn run such as number of
# iterations, number of models failed with a time out.
print(automl.sprint_statistics())
print("Accuracy score", sklearn.metrics.accuracy_score(y_test, predictions))
if __name__ == '__main__':
main()
| 34.064516
| 80
| 0.747159
|
20a1185fe043c5de5e460b334ea81e6a3a3c603b
| 4,530
|
py
|
Python
|
models/sac_models.py
|
hyyh28/SAIL
|
125ad3e64eefcf532931f567b95a5320737851e9
|
[
"MIT"
] | 16
|
2020-04-29T03:25:41.000Z
|
2022-03-22T02:19:38.000Z
|
models/sac_models.py
|
hyyh28/SAIL
|
125ad3e64eefcf532931f567b95a5320737851e9
|
[
"MIT"
] | null | null | null |
models/sac_models.py
|
hyyh28/SAIL
|
125ad3e64eefcf532931f567b95a5320737851e9
|
[
"MIT"
] | 4
|
2020-04-29T03:22:53.000Z
|
2021-12-01T02:40:16.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.distributions import Normal
LOG_SIG_MAX = 2
LOG_SIG_MIN = -20
epsilon = 1e-6
# Initialize Policy weights
def weights_init_(m):
if isinstance(m, nn.Linear):
torch.nn.init.xavier_uniform_(m.weight, gain=1)
torch.nn.init.constant_(m.bias, 0)
class QNetwork(nn.Module):
def __init__(self, num_inputs, num_actions, hidden_dim):
super(QNetwork, self).__init__()
# Q1 architecture
self.linear1 = nn.Linear(num_inputs + num_actions, hidden_dim)
self.linear2 = nn.Linear(hidden_dim, hidden_dim)
self.linear3 = nn.Linear(hidden_dim, 1)
# Q2 architecture
self.linear4 = nn.Linear(num_inputs + num_actions, hidden_dim)
self.linear5 = nn.Linear(hidden_dim, hidden_dim)
self.linear6 = nn.Linear(hidden_dim, 1)
self.apply(weights_init_)
def forward(self, state, action):
xu = torch.cat([state, action], 1)
x1 = F.relu(self.linear1(xu))
x1 = F.relu(self.linear2(x1))
x1 = self.linear3(x1)
x2 = F.relu(self.linear4(xu))
x2 = F.relu(self.linear5(x2))
x2 = self.linear6(x2)
return x1, x2
class GaussianPolicy(nn.Module):
def __init__(self, num_inputs, num_actions, hidden_dim, action_space=None):
super(GaussianPolicy, self).__init__()
self.linear1 = nn.Linear(num_inputs, hidden_dim)
self.linear2 = nn.Linear(hidden_dim, hidden_dim)
self.mean_linear = nn.Linear(hidden_dim, num_actions)
self.log_std_linear = nn.Linear(hidden_dim, num_actions)
self.apply(weights_init_)
# action rescaling
if action_space is None:
self.action_scale = torch.tensor(1.)
self.action_bias = torch.tensor(0.)
else:
self.action_scale = torch.FloatTensor(
(action_space.high - action_space.low) / 2.)
self.action_bias = torch.FloatTensor(
(action_space.high + action_space.low) / 2.)
def forward(self, state):
x = F.relu(self.linear1(state))
x = F.relu(self.linear2(x))
mean = self.mean_linear(x)
log_std = self.log_std_linear(x)
log_std = torch.clamp(log_std, min=LOG_SIG_MIN, max=LOG_SIG_MAX)
return mean, log_std
def sample(self, state):
mean, log_std = self.forward(state)
std = log_std.exp()
normal = Normal(mean, std)
x_t = normal.rsample() # for reparameterization trick (mean + std * N(0,1))
y_t = torch.tanh(x_t)
action = y_t * self.action_scale + self.action_bias
log_prob = normal.log_prob(x_t)
# Enforcing Action Bound
log_prob -= torch.log(self.action_scale * (1 - y_t.pow(2)) + epsilon)
log_prob = log_prob.sum(1, keepdim=True)
return action, log_prob, torch.tanh(mean)
def to(self, device):
self.action_scale = self.action_scale.to(device)
self.action_bias = self.action_bias.to(device)
return super(GaussianPolicy, self).to(device)
class DeterministicPolicy(nn.Module):
def __init__(self, num_inputs, num_actions, hidden_dim, action_space=None):
super(DeterministicPolicy, self).__init__()
self.linear1 = nn.Linear(num_inputs, hidden_dim)
self.linear2 = nn.Linear(hidden_dim, hidden_dim)
self.mean = nn.Linear(hidden_dim, num_actions)
self.noise = torch.Tensor(num_actions)
self.apply(weights_init_)
# action rescaling
if action_space is None:
self.action_scale = 1.
self.action_bias = 0.
else:
self.action_scale = torch.FloatTensor(
(action_space.high - action_space.low) / 2.)
self.action_bias = torch.FloatTensor(
(action_space.high + action_space.low) / 2.)
def forward(self, state):
x = F.relu(self.linear1(state))
x = F.relu(self.linear2(x))
mean = torch.tanh(self.mean(x)) * self.action_scale + self.action_bias
return mean
def sample(self, state):
mean = self.forward(state)
noise = self.noise.normal_(0., std=0.1)
noise = noise.clamp(-0.25, 0.25)
action = mean + noise
return action, torch.tensor(0.), mean
def to(self, device):
self.action_scale = self.action_scale.to(device)
self.action_bias = self.action_bias.to(device)
return super(DeterministicPolicy, self).to(device)
| 33.80597
| 84
| 0.630022
|
bf7548fa775412ccfda1f3da20225332c08f0e89
| 264
|
py
|
Python
|
Practice/balance2.py
|
ashishjayamohan/competitive-programming
|
05c5c560c2c2eb36121c52693b8c7d084f435f9e
|
[
"MIT"
] | null | null | null |
Practice/balance2.py
|
ashishjayamohan/competitive-programming
|
05c5c560c2c2eb36121c52693b8c7d084f435f9e
|
[
"MIT"
] | null | null | null |
Practice/balance2.py
|
ashishjayamohan/competitive-programming
|
05c5c560c2c2eb36121c52693b8c7d084f435f9e
|
[
"MIT"
] | null | null | null |
a=int(input())
for i in range(a):
b=int(input())
if b%4==0:
print("YES")
c = []
for j in range(2, b+1, 2):
c.append(j)
k=1
for j in range(b//2-1):
c.append(k)
k+=2
c.append(sum(c[:b//2+1])-sum(c[b//2+1:])-2)
print(*c)
else:
print("NO")
| 15.529412
| 45
| 0.5
|
f225a44918d26de90bc9db9b9d74934a0f22c030
| 10,054
|
py
|
Python
|
CharakterUebernatuerlich.py
|
brzGatsu/Sephrasto
|
4586ffb87506fd776fc6d6f37d5b222884adb4e9
|
[
"MIT"
] | null | null | null |
CharakterUebernatuerlich.py
|
brzGatsu/Sephrasto
|
4586ffb87506fd776fc6d6f37d5b222884adb4e9
|
[
"MIT"
] | null | null | null |
CharakterUebernatuerlich.py
|
brzGatsu/Sephrasto
|
4586ffb87506fd776fc6d6f37d5b222884adb4e9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'CharakterUebernatuerlich.ui'
#
# Created by: PyQt5 UI code generator 5.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(872, 460)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName("splitter")
self.tableWidget = QtWidgets.QTableWidget(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget.sizePolicy().hasHeightForWidth())
self.tableWidget.setSizePolicy(sizePolicy)
self.tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.tableWidget.setAlternatingRowColors(True)
self.tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.tableWidget.setHorizontalScrollMode(QtWidgets.QAbstractItemView.ScrollPerPixel)
self.tableWidget.setShowGrid(True)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(3)
self.tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tableWidget.setHorizontalHeaderItem(2, item)
self.tableWidget.horizontalHeader().setHighlightSections(False)
self.tableWidget.horizontalHeader().setMinimumSectionSize(80)
self.tableWidget.verticalHeader().setVisible(False)
self.tableWidget.verticalHeader().setDefaultSectionSize(40)
self.tableWidget.verticalHeader().setHighlightSections(True)
self.tableWidget.verticalHeader().setMinimumSectionSize(40)
self.scrollArea = QtWidgets.QScrollArea(self.splitter)
self.scrollArea.setMinimumSize(QtCore.QSize(0, 0))
self.scrollArea.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.scrollArea.setFrameShape(QtWidgets.QFrame.Box)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 258, 434))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.gridLayout_2 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_5 = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.label_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_5.setObjectName("label_5")
self.gridLayout_2.addWidget(self.label_5, 2, 0, 1, 1)
self.spinSF = QtWidgets.QSpinBox(self.scrollAreaWidgetContents)
self.spinSF.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.spinSF.setReadOnly(True)
self.spinSF.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinSF.setObjectName("spinSF")
self.gridLayout_2.addWidget(self.spinSF, 2, 1, 1, 1)
self.line = QtWidgets.QFrame(self.scrollAreaWidgetContents)
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.gridLayout_2.addWidget(self.line, 2, 2, 2, 1)
self.label_7 = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.label_7.setObjectName("label_7")
self.gridLayout_2.addWidget(self.label_7, 2, 3, 1, 1)
self.spinFW = QtWidgets.QSpinBox(self.scrollAreaWidgetContents)
self.spinFW.setAlignment(QtCore.Qt.AlignCenter)
self.spinFW.setReadOnly(False)
self.spinFW.setButtonSymbols(QtWidgets.QAbstractSpinBox.PlusMinus)
self.spinFW.setObjectName("spinFW")
self.gridLayout_2.addWidget(self.spinFW, 2, 4, 1, 1)
self.label_6 = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.label_6.setObjectName("label_6")
self.gridLayout_2.addWidget(self.label_6, 3, 0, 1, 1)
self.spinBasis = QtWidgets.QSpinBox(self.scrollAreaWidgetContents)
self.spinBasis.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.spinBasis.setReadOnly(True)
self.spinBasis.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinBasis.setObjectName("spinBasis")
self.gridLayout_2.addWidget(self.spinBasis, 3, 1, 1, 1)
self.label_8 = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.label_8.setObjectName("label_8")
self.gridLayout_2.addWidget(self.label_8, 3, 3, 1, 1)
self.spinPW = QtWidgets.QSpinBox(self.scrollAreaWidgetContents)
self.spinPW.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.spinPW.setReadOnly(True)
self.spinPW.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinPW.setObjectName("spinPW")
self.gridLayout_2.addWidget(self.spinPW, 3, 4, 1, 1)
self.label_9 = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.label_9.setObjectName("label_9")
self.gridLayout_2.addWidget(self.label_9, 4, 0, 1, 2)
self.buttonAdd = QtWidgets.QPushButton(self.scrollAreaWidgetContents)
self.buttonAdd.setMaximumSize(QtCore.QSize(25, 20))
self.buttonAdd.setObjectName("buttonAdd")
self.gridLayout_2.addWidget(self.buttonAdd, 4, 4, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.listTalente = QtWidgets.QListView(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.listTalente.sizePolicy().hasHeightForWidth())
self.listTalente.setSizePolicy(sizePolicy)
self.listTalente.setMaximumSize(QtCore.QSize(16777215, 80))
self.listTalente.setObjectName("listTalente")
self.horizontalLayout.addWidget(self.listTalente)
self.gridLayout_2.addLayout(self.horizontalLayout, 5, 0, 1, 5)
self.plainText = QtWidgets.QPlainTextEdit(self.scrollAreaWidgetContents)
self.plainText.setFrameShape(QtWidgets.QFrame.Box)
self.plainText.setFrameShadow(QtWidgets.QFrame.Sunken)
self.plainText.setLineWidth(1)
self.plainText.setReadOnly(True)
self.plainText.setBackgroundVisible(False)
self.plainText.setObjectName("plainText")
self.gridLayout_2.addWidget(self.plainText, 6, 0, 1, 5)
self.labelAttribute = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.labelAttribute.setMinimumSize(QtCore.QSize(0, 18))
font = QtGui.QFont()
font.setItalic(True)
self.labelAttribute.setFont(font)
self.labelAttribute.setObjectName("labelAttribute")
self.gridLayout_2.addWidget(self.labelAttribute, 1, 0, 1, 5)
self.labelFertigkeit = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.labelFertigkeit.setMinimumSize(QtCore.QSize(0, 20))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.labelFertigkeit.setFont(font)
self.labelFertigkeit.setObjectName("labelFertigkeit")
self.gridLayout_2.addWidget(self.labelFertigkeit, 0, 0, 1, 5)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
Form.setTabOrder(self.tableWidget, self.scrollArea)
Form.setTabOrder(self.scrollArea, self.spinSF)
Form.setTabOrder(self.spinSF, self.spinFW)
Form.setTabOrder(self.spinFW, self.spinBasis)
Form.setTabOrder(self.spinBasis, self.spinPW)
Form.setTabOrder(self.spinPW, self.buttonAdd)
Form.setTabOrder(self.buttonAdd, self.listTalente)
Form.setTabOrder(self.listTalente, self.plainText)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("Form", "Fertigkeitsname"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("Form", "FW"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("Form", "Talente"))
self.label_5.setText(_translate("Form", "SF:"))
self.label_7.setText(_translate("Form", "FW:"))
self.label_6.setText(_translate("Form", "Basis:"))
self.label_8.setText(_translate("Form", "PW:"))
self.label_9.setText(_translate("Form", "Erworbene Talente:"))
self.buttonAdd.setText(_translate("Form", "+"))
self.labelAttribute.setText(_translate("Form", "Attribute"))
self.labelFertigkeit.setText(_translate("Form", "Fertigkeit"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| 54.053763
| 108
| 0.720708
|
1bbfa0c687b579c87b34df398882316e47f40da5
| 1,353
|
py
|
Python
|
Implementation/DataStructures/python/ArrayStack.py
|
J0sueTM/Competitive-Programming
|
e83418a1ae3feb34917ac17835195964c6ef143a
|
[
"MIT"
] | 2
|
2020-08-20T23:48:02.000Z
|
2020-11-22T18:35:08.000Z
|
Implementation/DataStructures/python/ArrayStack.py
|
J0sueTM/Competitive-Programming
|
e83418a1ae3feb34917ac17835195964c6ef143a
|
[
"MIT"
] | null | null | null |
Implementation/DataStructures/python/ArrayStack.py
|
J0sueTM/Competitive-Programming
|
e83418a1ae3feb34917ac17835195964c6ef143a
|
[
"MIT"
] | null | null | null |
import sys
class Stack:
def __init__(self, stack_capacity):
self.stack = [0] * (stack_capacity + 1)
self.stack_size = stack_capacity
self.current_top_pos = 0
def is_empty(self):
return self.current_top_pos == 0
def is_full(self):
return self.current_top_pos == (self.stack_size - 1)
def push(self, new_data):
if self.is_full():
return sys.maxsize # stack overflow
self.current_top_pos = self.current_top_pos + 1;
self.stack[self.current_top_pos] = new_data
return self.stack[self.current_top_pos]
def pop(self):
if self.is_empty():
return (-sys.maxsize) # stack underflow
popped = self.stack[self.current_top_pos];
self.stack[self.current_top_pos] = 0
self.current_top_pos = self.current_top_pos - 1;
return popped
def top(self):
if self.is_empty():
return (-sys.maxsize) # stack underflow
return self.stack[self.current_top_pos]
test_stack = Stack(4)
test_stack.push(10)
test_stack.push(52)
test_stack.push(238)
test_stack.push(528)
test_stack.push(962)
print(test_stack.pop())
if test_stack.is_empty():
print("empty")
else:
print("not empty")
if test_stack.is_full():
print("full")
else:
print("not full")
print(test_stack.top())
| 22.180328
| 60
| 0.641537
|
8abad7c8acdb0b4da22945151e79e39e2bbfafb6
| 29,574
|
py
|
Python
|
python_modules/dagster/dagster_tests/check_tests/test_check.py
|
shahvineet98/dagster
|
2471d39c52f660e23e8c0d8e8ded873ddc3df036
|
[
"Apache-2.0"
] | 1
|
2019-11-25T19:03:32.000Z
|
2019-11-25T19:03:32.000Z
|
python_modules/dagster/dagster_tests/check_tests/test_check.py
|
shahvineet98/dagster
|
2471d39c52f660e23e8c0d8e8ded873ddc3df036
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster_tests/check_tests/test_check.py
|
shahvineet98/dagster
|
2471d39c52f660e23e8c0d8e8ded873ddc3df036
|
[
"Apache-2.0"
] | null | null | null |
import sys
from collections import defaultdict
from contextlib import contextmanager
import pytest
from dagster import check
from dagster.check import (
CheckError,
ElementCheckError,
NotImplementedCheckError,
ParameterCheckError,
)
def test_int_param():
assert check.int_param(-1, 'param_name') == -1
assert check.int_param(0, 'param_name') == 0
assert check.int_param(1, 'param_name') == 1
with pytest.raises(ParameterCheckError):
check.int_param(None, 'param_name')
with pytest.raises(ParameterCheckError):
check.int_param('s', 'param_name')
def test_int_value_param():
assert check.int_value_param(-1, -1, 'param_name') == -1
with pytest.raises(ParameterCheckError):
check.int_value_param(None, -1, 'param_name')
with pytest.raises(ParameterCheckError):
check.int_value_param(1, 0, 'param_name')
def test_opt_int_param():
assert check.opt_int_param(-1, 'param_name') == -1
assert check.opt_int_param(0, 'param_name') == 0
assert check.opt_int_param(1, 'param_name') == 1
assert check.opt_int_param(None, 'param_name') is None
with pytest.raises(ParameterCheckError):
check.opt_int_param('s', 'param_name')
def test_float_param():
assert check.float_param(-1.0, 'param_name') == -1.0
assert check.float_param(0.0, 'param_name') == 0.0
assert check.float_param(1.1, 'param_name') == 1.1
with pytest.raises(ParameterCheckError):
check.float_param(None, 'param_name')
with pytest.raises(ParameterCheckError):
check.float_param('s', 'param_name')
with pytest.raises(ParameterCheckError):
check.float_param(1, 'param_name')
with pytest.raises(ParameterCheckError):
check.float_param(0, 'param_name')
def test_opt_float_param():
assert check.opt_float_param(-1.0, 'param_name') == -1.0
assert check.opt_float_param(0.0, 'param_name') == 0.0
assert check.opt_float_param(1.1, 'param_name') == 1.1
assert check.opt_float_param(None, 'param_name') is None
with pytest.raises(ParameterCheckError):
check.opt_float_param('s', 'param_name')
def test_list_param():
assert check.list_param([], 'list_param') == []
with pytest.raises(ParameterCheckError):
check.list_param(None, 'list_param')
with pytest.raises(ParameterCheckError):
check.list_param('3u4', 'list_param')
def test_set_param():
assert check.set_param(set(), 'set_param') == set()
with pytest.raises(ParameterCheckError):
check.set_param(None, 'set_param')
with pytest.raises(ParameterCheckError):
check.set_param('3u4', 'set_param')
obj_set = {1}
assert check.set_param(obj_set, 'set_param') == obj_set
obj_set_two = {1, 1, 2}
obj_set_two_deduped = {1, 2}
assert check.set_param(obj_set_two, 'set_param') == obj_set_two_deduped
assert check.set_param(obj_set_two, 'set_param', of_type=int) == obj_set_two_deduped
with pytest.raises(CheckError, match='Did you pass a class'):
check.set_param({str}, 'set_param', of_type=int)
with pytest.raises(CheckError, match='Member of set mismatches type'):
check.set_param({'foo'}, 'set_param', of_type=int)
def test_is_list():
assert check.is_list([]) == []
with pytest.raises(CheckError):
check.is_list(None)
with pytest.raises(CheckError):
check.is_list('3u4')
with pytest.raises(CheckError, match='Did you pass a class'):
check.is_list([str], of_type=int)
def test_typed_list_param():
class Foo(object):
pass
class Bar(object):
pass
assert check.list_param([], 'list_param', Foo) == []
foo_list = [Foo()]
assert check.list_param(foo_list, 'list_param', Foo) == foo_list
with pytest.raises(CheckError):
check.list_param([Bar()], 'list_param', Foo)
with pytest.raises(CheckError):
check.list_param([None], 'list_param', Foo)
def test_typed_is_list():
class Foo(object):
pass
class Bar(object):
pass
assert check.is_list([], Foo) == []
foo_list = [Foo()]
assert check.is_list(foo_list, Foo) == foo_list
with pytest.raises(CheckError):
check.is_list([Bar()], Foo)
with pytest.raises(CheckError):
check.is_list([None], Foo)
def test_opt_list_param():
assert check.opt_list_param(None, 'list_param') == []
assert check.opt_list_param([], 'list_param') == []
obj_list = [1]
assert check.list_param(obj_list, 'list_param') == obj_list
with pytest.raises(ParameterCheckError):
check.opt_list_param(0, 'list_param')
with pytest.raises(ParameterCheckError):
check.opt_list_param('', 'list_param')
with pytest.raises(ParameterCheckError):
check.opt_list_param('3u4', 'list_param')
def test_opt_set_param():
assert check.opt_set_param(None, 'set_param') == set()
assert check.opt_set_param(set(), 'set_param') == set()
assert check.opt_set_param({3}, 'set_param') == {3}
with pytest.raises(ParameterCheckError):
check.opt_set_param(0, 'set_param')
with pytest.raises(ParameterCheckError):
check.opt_set_param('', 'set_param')
with pytest.raises(ParameterCheckError):
check.opt_set_param('3u4', 'set_param')
def test_opt_nullable_list_param():
assert check.opt_nullable_list_param(None, 'list_param') is None
assert check.opt_nullable_list_param([], 'list_param') == []
obj_list = [1]
assert check.opt_nullable_list_param(obj_list, 'list_param') == obj_list
with pytest.raises(ParameterCheckError):
check.opt_nullable_list_param(0, 'list_param')
with pytest.raises(ParameterCheckError):
check.opt_nullable_list_param('', 'list_param')
with pytest.raises(ParameterCheckError):
check.opt_nullable_list_param('3u4', 'list_param')
def test_opt_typed_list_param():
class Foo(object):
pass
class Bar(object):
pass
assert check.opt_list_param(None, 'list_param', Foo) == []
assert check.opt_list_param([], 'list_param', Foo) == []
foo_list = [Foo()]
assert check.opt_list_param(foo_list, 'list_param', Foo) == foo_list
with pytest.raises(CheckError):
check.opt_list_param([Bar()], 'list_param', Foo)
with pytest.raises(CheckError):
check.opt_list_param([None], 'list_param', Foo)
def test_dict_param():
assert check.dict_param({}, 'dict_param') == {}
ddict = {'a': 2}
assert check.dict_param(ddict, 'dict_param') == ddict
with pytest.raises(ParameterCheckError):
check.dict_param(None, 'dict_param')
with pytest.raises(ParameterCheckError):
check.dict_param(0, 'dict_param')
with pytest.raises(ParameterCheckError):
check.dict_param(1, 'dict_param')
with pytest.raises(ParameterCheckError):
check.dict_param('foo', 'dict_param')
with pytest.raises(ParameterCheckError):
check.dict_param(['foo'], 'dict_param')
with pytest.raises(ParameterCheckError):
check.dict_param([], 'dict_param')
def test_dict_param_with_type():
str_to_int = {'str': 1}
assert check.dict_param(str_to_int, 'str_to_int', key_type=str, value_type=int)
assert check.dict_param(str_to_int, 'str_to_int', value_type=int)
assert check.dict_param(str_to_int, 'str_to_int', key_type=str)
assert check.dict_param(str_to_int, 'str_to_int')
assert check.dict_param({}, 'str_to_int', key_type=str, value_type=int) == {}
assert check.dict_param({}, 'str_to_int', value_type=int) == {}
assert check.dict_param({}, 'str_to_int', key_type=str) == {}
assert check.dict_param({}, 'str_to_int') == {}
class Wrong(object):
pass
with pytest.raises(CheckError):
assert check.dict_param(str_to_int, 'str_to_int', key_type=Wrong, value_type=Wrong)
with pytest.raises(CheckError):
assert check.dict_param(str_to_int, 'str_to_int', key_type=Wrong, value_type=int)
with pytest.raises(CheckError):
assert check.dict_param(str_to_int, 'str_to_int', key_type=str, value_type=Wrong)
with pytest.raises(CheckError):
assert check.dict_param(str_to_int, 'str_to_int', key_type=Wrong)
with pytest.raises(CheckError):
assert check.dict_param(str_to_int, 'str_to_int', value_type=Wrong)
def test_opt_dict_param_with_type():
str_to_int = {'str': 1}
assert check.opt_dict_param(str_to_int, 'str_to_int', key_type=str, value_type=int)
assert check.opt_dict_param(str_to_int, 'str_to_int', value_type=int)
assert check.opt_dict_param(str_to_int, 'str_to_int', key_type=str)
assert check.opt_dict_param(str_to_int, 'str_to_int')
assert check.opt_dict_param({}, 'str_to_int', key_type=str, value_type=int) == {}
assert check.opt_dict_param({}, 'str_to_int', value_type=int) == {}
assert check.opt_dict_param({}, 'str_to_int', key_type=str) == {}
assert check.opt_dict_param({}, 'str_to_int') == {}
assert check.opt_dict_param(None, 'str_to_int', key_type=str, value_type=int) == {}
assert check.opt_dict_param(None, 'str_to_int', value_type=int) == {}
assert check.opt_dict_param(None, 'str_to_int', key_type=str) == {}
assert check.opt_dict_param(None, 'str_to_int') == {}
class Wrong(object):
pass
with pytest.raises(CheckError):
assert check.opt_dict_param(str_to_int, 'str_to_int', key_type=Wrong, value_type=Wrong)
with pytest.raises(CheckError):
assert check.opt_dict_param(str_to_int, 'str_to_int', key_type=Wrong, value_type=int)
with pytest.raises(CheckError):
assert check.opt_dict_param(str_to_int, 'str_to_int', key_type=str, value_type=Wrong)
with pytest.raises(CheckError):
assert check.opt_dict_param(str_to_int, 'str_to_int', key_type=Wrong)
with pytest.raises(CheckError):
assert check.opt_dict_param(str_to_int, 'str_to_int', value_type=Wrong)
def test_opt_dict_param():
assert check.opt_dict_param(None, 'opt_dict_param') == {}
assert check.opt_dict_param({}, 'opt_dict_param') == {}
ddict = {'a': 2}
assert check.opt_dict_param(ddict, 'opt_dict_param') == ddict
with pytest.raises(ParameterCheckError):
check.opt_dict_param(0, 'opt_dict_param')
with pytest.raises(ParameterCheckError):
check.opt_dict_param(1, 'opt_dict_param')
with pytest.raises(ParameterCheckError):
check.opt_dict_param('foo', 'opt_dict_param')
with pytest.raises(ParameterCheckError):
check.opt_dict_param(['foo'], 'opt_dict_param')
with pytest.raises(ParameterCheckError):
check.opt_dict_param([], 'opt_dict_param')
def test_opt_nullable_dict_param():
assert check.opt_nullable_dict_param(None, 'opt_nullable_dict_param') is None
assert check.opt_nullable_dict_param({}, 'opt_nullable_dict_param') == {}
ddict = {'a': 2}
assert check.opt_nullable_dict_param(ddict, 'opt_nullable_dict_param') == ddict
class Foo:
pass
class Bar(Foo):
pass
ddict_class = {'a': Bar}
assert (
check.opt_nullable_dict_param(ddict_class, 'opt_nullable_dict_param', value_class=Foo)
== ddict_class
)
with pytest.raises(ParameterCheckError):
check.opt_nullable_dict_param(1, 'opt_nullable_dict_param')
with pytest.raises(ParameterCheckError):
check.opt_nullable_dict_param('foo', 'opt_nullable_dict_param')
def test_str_param():
assert check.str_param('a', 'str_param') == 'a'
assert check.str_param('', 'str_param') == ''
assert check.str_param(u'a', 'unicode_param') == u'a'
with pytest.raises(ParameterCheckError):
check.str_param(None, 'str_param')
with pytest.raises(ParameterCheckError):
check.str_param(0, 'str_param')
with pytest.raises(ParameterCheckError):
check.str_param(1, 'str_param')
def test_opt_str_param():
assert check.opt_str_param('a', 'str_param') == 'a'
assert check.opt_str_param('', 'str_param') == ''
assert check.opt_str_param(u'a', 'unicode_param') == u'a'
assert check.opt_str_param(None, 'str_param') is None
assert check.opt_str_param(None, 'str_param', 'foo') == 'foo'
with pytest.raises(ParameterCheckError):
check.opt_str_param(0, 'str_param')
with pytest.raises(ParameterCheckError):
check.opt_str_param(1, 'str_param')
def test_opt_nonempty_str_param():
assert check.opt_nonempty_str_param('a', 'str_param') == 'a'
assert check.opt_nonempty_str_param('', 'str_param') is None
assert check.opt_nonempty_str_param('', 'str_param', 'foo') == 'foo'
assert check.opt_nonempty_str_param(u'a', 'unicode_param') == u'a'
assert check.opt_nonempty_str_param(None, 'str_param') is None
assert check.opt_nonempty_str_param(None, 'str_param', 'foo') == 'foo'
with pytest.raises(ParameterCheckError):
check.opt_nonempty_str_param(0, 'str_param')
with pytest.raises(ParameterCheckError):
check.opt_nonempty_str_param(1, 'str_param')
def test_bool_param():
assert check.bool_param(True, 'b') is True
assert check.bool_param(False, 'b') is False
with pytest.raises(ParameterCheckError):
check.bool_param(None, 'b')
with pytest.raises(ParameterCheckError):
check.bool_param(0, 'b')
with pytest.raises(ParameterCheckError):
check.bool_param('val', 'b')
def test_opt_bool_param():
assert check.opt_bool_param(True, 'b') is True
assert check.opt_bool_param(False, 'b') is False
assert check.opt_bool_param(None, 'b') is None
assert check.opt_bool_param(None, 'b', True) is True
assert check.opt_bool_param(None, 'b', False) is False
with pytest.raises(ParameterCheckError):
check.opt_bool_param(0, 'b')
with pytest.raises(ParameterCheckError):
check.opt_bool_param('val', 'b')
def test_callable_param():
lamb = lambda: 1
assert check.callable_param(lamb, 'lamb') == lamb
with pytest.raises(ParameterCheckError):
check.callable_param(None, 'lamb')
with pytest.raises(ParameterCheckError):
check.callable_param(2, 'lamb')
def test_opt_callable_param():
lamb = lambda: 1
assert check.opt_callable_param(lamb, 'lamb') == lamb
assert check.opt_callable_param(None, 'lamb') is None
assert check.opt_callable_param(None, 'lamb', default=None) is None
assert check.opt_callable_param(None, 'lamb', default=lamb) == lamb
with pytest.raises(ParameterCheckError):
check.opt_callable_param(2, 'lamb')
def test_param_invariant():
check.param_invariant(True, 'some_param')
num_to_check = 1
check.param_invariant(num_to_check == 1, 'some_param')
with pytest.raises(ParameterCheckError):
check.param_invariant(num_to_check == 2, 'some_param')
with pytest.raises(ParameterCheckError):
check.param_invariant(False, 'some_param')
with pytest.raises(ParameterCheckError):
check.param_invariant(0, 'some_param')
check.param_invariant(1, 'some_param')
with pytest.raises(ParameterCheckError):
check.param_invariant('', 'some_param')
check.param_invariant('1kjkjsf', 'some_param')
with pytest.raises(ParameterCheckError):
check.param_invariant({}, 'some_param')
check.param_invariant({234: '1kjkjsf'}, 'some_param')
with pytest.raises(ParameterCheckError):
check.param_invariant([], 'some_param')
check.param_invariant([234], 'some_param')
def test_string_elem():
ddict = {'a_str': 'a', 'a_num': 1, 'a_none': None}
assert check.str_elem(ddict, 'a_str') == 'a'
with pytest.raises(ElementCheckError):
assert check.str_elem(ddict, 'a_none')
with pytest.raises(ElementCheckError):
check.str_elem(ddict, 'a_num')
def test_opt_string_elem():
ddict = {'a_str': 'a', 'a_num': 1, 'a_none': None}
assert check.opt_str_elem(ddict, 'a_str') == 'a'
assert check.opt_str_elem(ddict, 'a_none') == None
assert check.opt_str_elem(ddict, 'nonexistentkey') == None
with pytest.raises(ElementCheckError):
check.opt_str_elem(ddict, 'a_num')
def test_bool_elem():
ddict = {'a_true': True, 'a_str': 'a', 'a_num': 1, 'a_none': None}
assert check.bool_elem(ddict, 'a_true') is True
with pytest.raises(ElementCheckError):
check.bool_elem(ddict, 'a_none')
with pytest.raises(ElementCheckError):
check.bool_elem(ddict, 'a_num')
with pytest.raises(ElementCheckError):
check.bool_elem(ddict, 'a_str')
def test_invariant():
assert check.invariant(True)
with pytest.raises(CheckError):
check.invariant(False)
with pytest.raises(CheckError, match='Some Unique String'):
check.invariant(False, 'Some Unique String')
empty_list = []
with pytest.raises(CheckError, match='Invariant failed'):
check.invariant(empty_list)
def test_failed():
with pytest.raises(CheckError, match='some desc'):
check.failed('some desc')
with pytest.raises(CheckError, match='must be a string'):
check.failed(0)
def test_not_implemented():
with pytest.raises(NotImplementedCheckError, match='some string'):
check.not_implemented('some string')
with pytest.raises(CheckError, match='desc argument must be a string'):
check.not_implemented(None)
def test_inst():
class Foo(object):
pass
class Bar(object):
pass
obj = Foo()
assert check.inst(obj, Foo) == obj
with pytest.raises(CheckError, match='not a Bar'):
check.inst(Foo(), Bar)
with pytest.raises(CheckError, match='Desc: Expected only a Bar'):
check.inst(Foo(), Bar, 'Expected only a Bar')
def test_inst_param():
class Foo(object):
pass
class Bar(object):
pass
class Baaz(object):
pass
obj = Foo()
assert check.inst_param(obj, 'obj', Foo) == obj
with pytest.raises(ParameterCheckError, match='not a Bar'):
check.inst_param(None, 'obj', Bar)
with pytest.raises(ParameterCheckError, match='not a Bar'):
check.inst_param(Bar, 'obj', Bar)
with pytest.raises(ParameterCheckError, match='not a Bar'):
check.inst_param(Foo(), 'obj', Bar)
with pytest.raises(ParameterCheckError, match=r"not one of \['Bar', 'Foo'\]"):
check.inst_param(None, 'obj', (Foo, Bar))
with pytest.raises(ParameterCheckError, match=r"not one of \['Bar', 'Foo'\]"):
check.inst_param(Baaz(), 'obj', (Foo, Bar))
def test_opt_inst_param():
class Foo(object):
pass
class Bar(object):
pass
class Baaz(object):
pass
obj = Foo()
assert check.opt_inst_param(obj, 'obj', Foo) == obj
assert check.opt_inst_param(None, 'obj', Foo) is None
assert check.opt_inst_param(None, 'obj', Bar) is None
with pytest.raises(ParameterCheckError, match='not a Bar'):
check.opt_inst_param(Bar, 'obj', Bar)
with pytest.raises(ParameterCheckError, match='not a Bar'):
check.opt_inst_param(Foo(), 'obj', Bar)
# check defaults
default_obj = Foo()
assert check.opt_inst_param(None, 'obj', Foo, default_obj) is default_obj
assert check.opt_inst_param(None, 'obj', (Foo, Bar)) is None
with pytest.raises(ParameterCheckError, match=r"not one of \['Bar', 'Foo'\]"):
check.inst_param(Baaz(), 'obj', (Foo, Bar))
def test_dict_elem():
dict_value = {'blah': 'blahblah'}
ddict = {'dictkey': dict_value, 'stringkey': 'A', 'nonekey': None}
assert check.dict_elem(ddict, 'dictkey') == dict_value
with pytest.raises(CheckError):
check.dict_elem(ddict, 'stringkey')
with pytest.raises(CheckError):
check.dict_elem(ddict, 'nonekey')
with pytest.raises(CheckError):
check.dict_elem(ddict, 'nonexistantkey')
def test_opt_dict_elem():
dict_value = {'blah': 'blahblah'}
ddict = {'dictkey': dict_value, 'stringkey': 'A', 'nonekey': None}
assert check.opt_dict_elem(ddict, 'dictkey') == dict_value
assert check.opt_dict_elem(ddict, 'nonekey') == {}
assert check.opt_dict_elem(ddict, 'nonexistantkey') == {}
with pytest.raises(CheckError):
check.opt_dict_elem(ddict, 'stringkey')
def test_list_elem():
list_value = ['blah', 'blahblah']
ddict = {'listkey': list_value, 'stringkey': 'A', 'nonekey': None}
assert check.list_elem(ddict, 'listkey') == list_value
with pytest.raises(CheckError):
assert check.list_elem(ddict, 'nonekey') == []
with pytest.raises(CheckError):
assert check.list_elem(ddict, 'nonexistantkey') == []
with pytest.raises(CheckError):
check.list_elem(ddict, 'stringkey')
def test_opt_list_elem():
list_value = ['blah', 'blahblah']
ddict = {'listkey': list_value, 'stringkey': 'A', 'nonekey': None}
assert check.opt_list_elem(ddict, 'listkey') == list_value
assert check.opt_list_elem(ddict, 'nonekey') == []
assert check.opt_list_elem(ddict, 'nonexistantkey') == []
with pytest.raises(CheckError):
check.opt_list_elem(ddict, 'stringkey')
def test_not_none_param():
assert check.not_none_param(1, 'fine')
check.not_none_param(0, 'zero is fine')
check.not_none_param('', 'empty str is fine')
with pytest.raises(CheckError):
check.not_none_param(None, 'none fails')
def test_is_callable():
def fn():
pass
assert check.is_callable(fn) == fn
assert check.is_callable(lambda: None)
assert check.is_callable(lambda: None, 'some desc')
with pytest.raises(CheckError):
check.is_callable(None)
with pytest.raises(CheckError):
check.is_callable(1)
with pytest.raises(CheckError, match='some other desc'):
check.is_callable(1, 'some other desc')
def test_tuple_param():
assert check.tuple_param((1, 2), 'something')
with pytest.raises(CheckError):
assert check.tuple_param(None, 'something')
with pytest.raises(CheckError):
assert check.tuple_param(1, 'something')
with pytest.raises(CheckError):
assert check.tuple_param([1], 'something')
with pytest.raises(CheckError):
assert check.tuple_param({1: 2}, 'something')
with pytest.raises(CheckError):
assert check.tuple_param('kdjfkd', 'something')
def test_opt_tuple_param():
assert check.opt_tuple_param((1, 2), 'something')
assert check.opt_tuple_param(None, 'something') is None
assert check.opt_tuple_param(None, 'something', (2)) == (2)
with pytest.raises(CheckError):
assert check.opt_tuple_param(1, 'something')
with pytest.raises(CheckError):
assert check.opt_tuple_param([1], 'something')
with pytest.raises(CheckError):
assert check.opt_tuple_param({1: 2}, 'something')
with pytest.raises(CheckError):
assert check.opt_tuple_param('kdjfkd', 'something')
def test_opt_type_param():
class Foo(object):
pass
assert check.opt_type_param(int, 'foo')
assert check.opt_type_param(Foo, 'foo')
assert check.opt_type_param(None, 'foo') is None
assert check.opt_type_param(None, 'foo', Foo) is Foo
with pytest.raises(CheckError):
check.opt_type_param(check, 'foo')
with pytest.raises(CheckError):
check.opt_type_param(234, 'foo')
with pytest.raises(CheckError):
check.opt_type_param('bar', 'foo')
with pytest.raises(CheckError):
check.opt_type_param(Foo(), 'foo')
def test_type_param():
class Bar(object):
pass
assert check.type_param(int, 'foo')
assert check.type_param(Bar, 'foo')
with pytest.raises(CheckError):
check.type_param(None, 'foo')
with pytest.raises(CheckError):
check.type_param(check, 'foo')
with pytest.raises(CheckError):
check.type_param(234, 'foo')
with pytest.raises(CheckError):
check.type_param('bar', 'foo')
with pytest.raises(CheckError):
check.type_param(Bar(), 'foo')
def test_subclass_param():
class Super(object):
pass
class Sub(Super):
pass
class Alone(object):
pass
assert check.subclass_param(Sub, 'foo', Super)
with pytest.raises(CheckError):
assert check.subclass_param(Alone, 'foo', Super)
with pytest.raises(CheckError):
assert check.subclass_param('value', 'foo', Super)
assert check.opt_subclass_param(Sub, 'foo', Super)
assert check.opt_subclass_param(None, 'foo', Super) is None
with pytest.raises(CheckError):
assert check.opt_subclass_param(Alone, 'foo', Super)
with pytest.raises(CheckError):
assert check.opt_subclass_param('value', 'foo', Super)
@contextmanager
def raises_with_message(exc_type, message_text):
with pytest.raises(exc_type) as exc_info:
yield
assert str(exc_info.value) == message_text
def is_python_three():
return sys.version_info[0] >= 3
def test_two_dim_dict():
assert check.two_dim_dict_param({}, 'foo') == {}
assert check.two_dim_dict_param({'key': {}}, 'foo')
assert check.two_dim_dict_param({'key': {'key2': 2}}, 'foo')
# make sure default dict passes
default_dict = defaultdict(dict)
default_dict['key']['key2'] = 2
assert check.two_dim_dict_param(default_dict, 'foo')
with raises_with_message(
CheckError,
'''Param "foo" is not a dict. Got None which is type <class 'NoneType'>.'''
if is_python_three()
else '''Param "foo" is not a dict. Got None which is type <type 'NoneType'>.''',
):
check.two_dim_dict_param(None, 'foo')
with raises_with_message(
CheckError,
"Value in dictionary mismatches expected type for key int_value. Expected value "
"of type <class 'dict'>. Got value 2 of type <class 'int'>."
if is_python_three()
else "Value in dictionary mismatches expected type for key int_value. Expected value "
"of type <type 'dict'>. Got value 2 of type <type 'int'>.",
):
check.two_dim_dict_param({'int_value': 2}, 'foo')
with raises_with_message(
CheckError,
"Value in dictionary mismatches expected type for key level_two_value_mismatch. "
"Expected value of type (<class 'str'>,). Got value 2 of type <class 'int'>."
if is_python_three()
else "Value in dictionary mismatches expected type for key level_two_value_mismatch. "
"Expected value of type (<type 'basestring'>,). Got value 2 of type <type 'int'>.",
):
check.two_dim_dict_param(
{'level_one_key': {'level_two_value_mismatch': 2}}, 'foo', value_type=str
)
with raises_with_message(
CheckError,
"Key in dictionary mismatches type. Expected <class 'int'>. Got 'key'"
if is_python_three()
else "Key in dictionary mismatches type. Expected <type 'int'>. Got 'key'",
):
assert check.two_dim_dict_param({'key': {}}, 'foo', key_type=int)
with raises_with_message(
CheckError,
"Key in dictionary mismatches type. Expected <class 'int'>. Got 'level_two_key'"
if is_python_three()
else "Key in dictionary mismatches type. Expected <type 'int'>. Got 'level_two_key'",
):
assert check.two_dim_dict_param({1: {'level_two_key': 'something'}}, 'foo', key_type=int)
def test_opt_two_dim_dict_parm():
assert check.opt_two_dim_dict_param({}, 'foo') == {}
assert check.opt_two_dim_dict_param({'key': {}}, 'foo')
assert check.opt_two_dim_dict_param({'key': {'key2': 2}}, 'foo')
assert check.opt_two_dim_dict_param(None, 'foo') == {}
with pytest.raises(CheckError):
assert check.opt_two_dim_dict_param('str', 'foo')
def test_generator_param():
def _test_gen():
yield 1
assert check.generator_param(_test_gen(), 'gen')
gen = _test_gen()
assert check.generator(gen)
assert list(gen) == [1]
assert check.generator(gen)
assert list(gen) == []
with pytest.raises(ParameterCheckError):
assert check.generator_param(list(gen), 'gen')
with pytest.raises(ParameterCheckError):
assert check.generator_param(None, 'gen')
with pytest.raises(ParameterCheckError):
assert check.generator_param(_test_gen, 'gen')
def test_opt_generator_param():
def _test_gen():
yield 1
assert check.opt_generator_param(_test_gen(), 'gen')
assert check.opt_generator_param(None, 'gen') is None
with pytest.raises(ParameterCheckError):
assert check.opt_generator_param(_test_gen, 'gen')
def test_generator():
def _test_gen():
yield 1
assert check.generator(_test_gen())
gen = _test_gen()
assert check.generator(gen)
with pytest.raises(ParameterCheckError):
assert check.generator(list(gen))
with pytest.raises(ParameterCheckError):
assert check.generator(None)
with pytest.raises(ParameterCheckError):
assert check.generator(_test_gen)
def test_opt_generator():
def _test_gen():
yield 1
assert check.opt_generator(_test_gen())
gen = _test_gen()
assert check.opt_generator(gen)
assert check.opt_generator(None) is None
with pytest.raises(ParameterCheckError):
assert check.opt_generator(list(gen))
with pytest.raises(ParameterCheckError):
assert check.opt_generator(_test_gen)
def test_internals():
with pytest.raises(CheckError):
check._check_key_value_types(None, str, str) # pylint: disable=protected-access
| 29.963526
| 97
| 0.675492
|
c7290f865a95e7d9c0b93eb60eb594cdcfcbc698
| 11,356
|
py
|
Python
|
library/azure_rm_devtestlabschedule.py
|
portos060474/azure_modules
|
008332fb07f79507cf00ac71f8249bc668bd7c7e
|
[
"MIT"
] | 31
|
2018-01-24T08:39:30.000Z
|
2021-12-19T00:01:04.000Z
|
library/azure_rm_devtestlabschedule.py
|
portos060474/azure_modules
|
008332fb07f79507cf00ac71f8249bc668bd7c7e
|
[
"MIT"
] | 17
|
2018-02-01T12:35:57.000Z
|
2020-04-22T20:47:52.000Z
|
library/azure_rm_devtestlabschedule.py
|
portos060474/azure_modules
|
008332fb07f79507cf00ac71f8249bc668bd7c7e
|
[
"MIT"
] | 39
|
2018-02-01T11:56:03.000Z
|
2021-12-19T18:34:49.000Z
|
#!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_devtestlabschedule
version_added: "2.8"
short_description: Manage Azure DevTest Lab Schedule instance
description:
- Create, update and delete instance of Azure DecTest Lab Schedule.
options:
resource_group:
description:
- The name of the resource group.
required: True
lab_name:
description:
- The name of the lab.
required: True
name:
description:
- The name of the schedule.
required: True
choices:
- lab_vms_startup
- lab_vms_shutdown
time:
description:
- The time of day the schedule will occur.
time_zone_id:
description:
- The time zone ID.
state:
description:
- Assert the state of the Schedule.
- Use C(present) to create or update an Schedule and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Create (or update) DevTest Lab Schedule
azure_rm_devtestlabschedule:
resource_group: myResourceGroup
lab_name: myLab
name: lab_vms_shutdown
time: "1030"
time_zone_id: "UTC+12"
'''
RETURN = '''
id:
description:
- The identifier of the resource.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourcegroups/myResourceGroup/providers/microsoft.devtestlab/labs/myLab/schedules/l
abVmsShutdown"
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _snake_to_camel
try:
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from msrestazure.azure_operation import AzureOperationPoller
from azure.mgmt.devtestlabs import DevTestLabsClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMSchedule(AzureRMModuleBase):
"""Configuration class for an Azure RM Schedule resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
lab_name=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True,
choices=['lab_vms_startup', 'lab_vms_shutdown']
),
time=dict(
type='str'
),
time_zone_id=dict(
type='str'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.lab_name = None
self.name = None
self.schedule = dict()
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.to_do = Actions.NoAction
required_if = [
('state', 'present', ['time', 'time_zone_id'])
]
super(AzureRMSchedule, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True,
required_if=required_if)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
self.schedule[key] = kwargs[key]
self.schedule['status'] = "Enabled"
if self.name == 'lab_vms_startup':
self.name = 'LabVmsStartup'
self.schedule['task_type'] = 'LabVmsStartupTask'
elif self.name == 'lab_vms_shutdown':
self.name = 'LabVmsShutdown'
self.schedule['task_type'] = 'LabVmsShutdownTask'
if self.state == 'present':
self.schedule['daily_recurrence'] = {'time': self.schedule.pop('time')}
self.schedule['time_zone_id'] = self.schedule['time_zone_id'].upper()
response = None
self.mgmt_client = self.get_mgmt_svc_client(DevTestLabsClient,
base_url=self._cloud_environment.endpoints.resource_manager)
resource_group = self.get_resource_group(self.resource_group)
old_response = self.get_schedule()
if not old_response:
self.log("Schedule instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("Schedule instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
if (not default_compare(self.schedule, old_response, '', self.results)):
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the Schedule instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_schedule()
self.results['changed'] = True
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("Schedule instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_schedule()
# This currently doesnt' work as there is a bug in SDK / Service
if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller):
response = self.get_poller_result(response)
else:
self.log("Schedule instance unchanged")
self.results['changed'] = False
response = old_response
if self.state == 'present':
self.results.update({
'id': response.get('id', None)
})
return self.results
def create_update_schedule(self):
'''
Creates or updates Schedule with the specified configuration.
:return: deserialized Schedule instance state dictionary
'''
self.log("Creating / Updating the Schedule instance {0}".format(self.name))
try:
response = self.mgmt_client.schedules.create_or_update(resource_group_name=self.resource_group,
lab_name=self.lab_name,
name=self.name,
schedule=self.schedule)
if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the Schedule instance.')
self.fail("Error creating the Schedule instance: {0}".format(str(exc)))
return response.as_dict()
def delete_schedule(self):
'''
Deletes specified Schedule instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the Schedule instance {0}".format(self.name))
try:
response = self.mgmt_client.schedules.delete(resource_group_name=self.resource_group,
lab_name=self.lab_name,
name=self.name)
except CloudError as e:
self.log('Error attempting to delete the Schedule instance.')
self.fail("Error deleting the Schedule instance: {0}".format(str(e)))
return True
def get_schedule(self):
'''
Gets the properties of the specified Schedule.
:return: deserialized Schedule instance state dictionary
'''
self.log("Checking if the Schedule instance {0} is present".format(self.name))
found = False
try:
response = self.mgmt_client.schedules.get(resource_group_name=self.resource_group,
lab_name=self.lab_name,
name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("Schedule instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the Schedule instance.')
if found is True:
return response.as_dict()
return False
def default_compare(new, old, path, result):
if new is None:
return True
elif isinstance(new, dict):
if not isinstance(old, dict):
result['compare'] = 'changed [' + path + '] old dict is null'
return False
for k in new.keys():
if not default_compare(new.get(k), old.get(k, None), path + '/' + k, result):
return False
return True
elif isinstance(new, list):
if not isinstance(old, list) or len(new) != len(old):
result['compare'] = 'changed [' + path + '] length is different or null'
return False
if isinstance(old[0], dict):
key = None
if 'id' in old[0] and 'id' in new[0]:
key = 'id'
elif 'name' in old[0] and 'name' in new[0]:
key = 'name'
else:
key = list(old[0])[0]
new = sorted(new, key=lambda x: x.get(key, None))
old = sorted(old, key=lambda x: x.get(key, None))
else:
new = sorted(new)
old = sorted(old)
for i in range(len(new)):
if not default_compare(new[i], old[i], path + '/*', result):
return False
return True
else:
if path == '/location':
new = new.replace(' ', '').lower()
old = new.replace(' ', '').lower()
if new == old:
return True
else:
result['compare'] = 'changed [' + path + '] ' + str(new) + ' != ' + str(old)
return False
def main():
"""Main execution"""
AzureRMSchedule()
if __name__ == '__main__':
main()
| 33.204678
| 149
| 0.554773
|
657fc33c4ceb0bf4f03a5a5419b3d2c05c7d7ca1
| 1,133
|
py
|
Python
|
designate-8.0.0/designate/backend/impl_powerdns/migrate_repo/versions/007_add_recordset_id_col.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | null | null | null |
designate-8.0.0/designate/backend/impl_powerdns/migrate_repo/versions/007_add_recordset_id_col.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
designate-8.0.0/designate/backend/impl_powerdns/migrate_repo/versions/007_add_recordset_id_col.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Column
from designate.sqlalchemy.types import UUID
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
recordset_id = Column('designate_recordset_id', UUID())
recordset_id.create(records_table)
def downgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
records_table.c.designate_recordset_id.drop()
| 29.051282
| 75
| 0.752868
|
86dde59ae68b9c2defc8e727af3b19c1754dbf09
| 3,226
|
py
|
Python
|
rnnparser/RecursiveNN/npRNN/rnn.py
|
uphere-co/nlp-prototype
|
c4623927e5c5c5f9c3e702eb36497ea1d9fd1ff3
|
[
"BSD-3-Clause"
] | null | null | null |
rnnparser/RecursiveNN/npRNN/rnn.py
|
uphere-co/nlp-prototype
|
c4623927e5c5c5f9c3e702eb36497ea1d9fd1ff3
|
[
"BSD-3-Clause"
] | null | null | null |
rnnparser/RecursiveNN/npRNN/rnn.py
|
uphere-co/nlp-prototype
|
c4623927e5c5c5f9c3e702eb36497ea1d9fd1ff3
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
from plain import *
from param import Param
from node import RNNnode
from tree_utils import Node, NodeTree
float_type = np.float32
int_type = np.int64
class Parser(object):
def __init__(self, activation_f, activation_df):
self.activation_f = activation_f
self.activation_df= activation_df
def forward(self, words_vec, param):
n_word = len(words_vec)
n_phrase = n_word - 1
W = np.tile(param.W,(n_phrase,1,1))
bias = np.tile(param.bias,(n_phrase,1))
u_score = param.u_score
return self.forward_separted_param(words_vec, W,bias,u_score)
def forward_separted_param(self, words_vec, W,bias,u_score):
n_word = len(words_vec)
n_phrase = n_word - 1
n_iter= n_phrase
merge_history=[]
phrases=np.empty((n_phrase,words_vec.shape[1]))
words = words_vec
scores = np.empty(n_iter)
for i in range(n_iter):
wordLRs = np.concatenate([words[:-1],words[1:]], axis=1)
xs = hidden_vectorized(W[i],bias[i],wordLRs)
hs = self.activation_f(xs)
hs_scores = scoring(u_score, hs)
loc=np.argmax(hs_scores)
words = np.concatenate([words[:loc+1], words[loc+2:]],axis=0)
words[loc]=hs[loc]
merge_history.append(loc)
phrases[i]=hs[loc]
scores[i]=hs_scores[loc]
whole_words = np.concatenate([words_vec, phrases],axis=0)
return merge_history, scores, whole_words
def backward(self, phrases, param):
grad =param.zero()
grad.W =self.backward_W(phrases, param)
grad.bias =self.backward_b(phrases, param)
grad.u_score =self.backward_u(phrases, param)
return grad
def backward_W_partial(self, node, param, n_phrase):
left_factor,W,b, =param.u_score, param.W, param.bias
grads=np.zeros((n_phrase,)+W.shape)
back_propagation(node, left_factor,W,b, grads, is_W=True)
return np.sum(grads,0)
def backward_W(self, phrases, param):
grad = np.zeros(param.W.shape)
for node in phrases:
grad +=self.backward_W_partial(node, param, len(phrases))
return grad
def backward_b_partial(self, node, param, n_phrase):
left_factor,W,b, =param.u_score, param.W, param.bias
grads=np.zeros((n_phrase,)+b.shape)
back_propagation(node, left_factor,W,b, grads, is_W=False)
return np.sum(grads,0)
def backward_b(self, phrases, param):
grad = np.zeros(param.bias.shape)
for node in phrases:
grad +=self.backward_b_partial(node, param, len(phrases))
return grad
def backward_u(self, phrases, param):
grad = np.zeros(param.u_score.shape)
for node in phrases:
grad += node.vec
return grad
def merge_words(self, words,wordvec, param):
merge_history,_, wordvecs=self.forward(wordvec, param)
leaf_nodes=[RNNnode(word) for word in words]
nodes, _=NodeTree.directed_merge(leaf_nodes,merge_history)
RNNnode.set_value_views(nodes, wordvecs)
phrases=nodes[len(words):]
return phrases
| 37.511628
| 73
| 0.631432
|
0aba517604f9ab2af30a112098e8f56e814d8aff
| 7,658
|
py
|
Python
|
contrib/devtools/update-translations.py
|
herzogvonbelder/Alpenschilling
|
74f0b13f2feaae6e84c82b416cacaf18167573b5
|
[
"MIT"
] | 4
|
2018-04-26T15:39:34.000Z
|
2018-10-05T18:27:53.000Z
|
contrib/devtools/update-translations.py
|
herzogvonbelder/Alpenschilling
|
74f0b13f2feaae6e84c82b416cacaf18167573b5
|
[
"MIT"
] | 1
|
2018-06-05T23:31:25.000Z
|
2018-06-05T23:31:25.000Z
|
contrib/devtools/update-translations.py
|
Alpenschilling/Alpenschilling
|
76ba3422d26b63228a8465adeadc6f6827e846b9
|
[
"MIT"
] | 5
|
2018-06-23T12:54:53.000Z
|
2018-12-14T07:43:31.000Z
|
#!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'alps_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
#assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
# fetch_all_translations()
postprocess_translations()
| 37.539216
| 124
| 0.629277
|
0c7eaaf3e1f46c8fa1c25824d0632d5c3bfa58e8
| 141
|
py
|
Python
|
neo4j/datadog_checks/neo4j/__init__.py
|
Dcanzano/integrations-extras
|
0e5c9cde116798a724d9903ab2cefb356318f1d0
|
[
"BSD-3-Clause"
] | null | null | null |
neo4j/datadog_checks/neo4j/__init__.py
|
Dcanzano/integrations-extras
|
0e5c9cde116798a724d9903ab2cefb356318f1d0
|
[
"BSD-3-Clause"
] | 3
|
2020-03-30T12:26:28.000Z
|
2021-08-25T11:58:47.000Z
|
neo4j/datadog_checks/neo4j/__init__.py
|
Dcanzano/integrations-extras
|
0e5c9cde116798a724d9903ab2cefb356318f1d0
|
[
"BSD-3-Clause"
] | 1
|
2017-09-01T04:29:17.000Z
|
2017-09-01T04:29:17.000Z
|
from .__about__ import __version__
from .neo4j import GLOBAL_DB_NAME, NAMESPACE, Config, Neo4jCheck
__all__ = ['__version__', 'Neo4jCheck']
| 28.2
| 64
| 0.794326
|
c42ab6756aba551e795bc3e8909f6c90016cc406
| 1,109
|
py
|
Python
|
setup.py
|
ine-rmotr-projects/itp-w2-hangman
|
39dce7d585cca9f6d523ede868e20a7a7985c93e
|
[
"MIT"
] | null | null | null |
setup.py
|
ine-rmotr-projects/itp-w2-hangman
|
39dce7d585cca9f6d523ede868e20a7a7985c93e
|
[
"MIT"
] | 1
|
2017-02-25T21:25:43.000Z
|
2017-02-25T21:25:43.000Z
|
setup.py
|
ine-rmotr-projects/itp-w2-hangman
|
39dce7d585cca9f6d523ede868e20a7a7985c93e
|
[
"MIT"
] | 89
|
2016-11-01T21:53:47.000Z
|
2019-01-11T05:02:54.000Z
|
import os
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ["tests/"]
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import sys, pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='rmotr.com | Hangman',
version='0.0.1',
description="rmotr.com Group Project | Hangman",
author='rmotr.com',
author_email='questions@rmotr.com',
license='CC BY-SA 4.0 License',
packages=['hangman'],
maintainer='rmotr.com',
tests_require=[
'pytest==3.0.5',
'pytest-cov==2.4.0',
'coverage==4.2',
'six==1.10.0',
'mock==2.0.0'
],
zip_safe=False,
cmdclass={'test': PyTest},
)
| 25.204545
| 74
| 0.616772
|
3762c83f090f017376061eb5658b1171eb447bca
| 506
|
py
|
Python
|
opencv_threshold_python.py
|
vaishnavidatir/Opencv
|
a35edde63bae336e27ecbaa99c72335ea041f94d
|
[
"Apache-2.0"
] | null | null | null |
opencv_threshold_python.py
|
vaishnavidatir/Opencv
|
a35edde63bae336e27ecbaa99c72335ea041f94d
|
[
"Apache-2.0"
] | null | null | null |
opencv_threshold_python.py
|
vaishnavidatir/Opencv
|
a35edde63bae336e27ecbaa99c72335ea041f94d
|
[
"Apache-2.0"
] | null | null | null |
import cv2
img = cv2.imread("lena.jpg")
_,th1 = cv2.threshold(img,50,250,cv2.THRESH_BINARY)
_,th2= cv2.threshold(img,50,250,cv2.THRESH_BINARY_INV)
_,th3 = cv2.threshold(img,50,250,cv2.THRESH_TRUNC)
_,th4 = cv2.threshold(img,50,250,cv2.THRESH_TOZERO)
_,th5 = cv2.threshold(img,50,250,cv2.THRESH_TOZERO_INV)
cv2.imshow("image",img)
cv2.imshow("th1",th1)
cv2.imshow("th2",th2)
cv2.imshow("th3",th3)
cv2.imshow("th4",th4)
cv2.imshow("th1",th5)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 23
| 56
| 0.70751
|
3c29b9dcb2f7b4b008f02e8f29f17d0abebbaeb7
| 520
|
py
|
Python
|
audiovisual/indico_audiovisual/blueprint.py
|
ThiefMaster/indico-plugins-cern
|
0082a66dd21ac093c1a31316d12c338f52ffe2d0
|
[
"MIT"
] | null | null | null |
audiovisual/indico_audiovisual/blueprint.py
|
ThiefMaster/indico-plugins-cern
|
0082a66dd21ac093c1a31316d12c338f52ffe2d0
|
[
"MIT"
] | null | null | null |
audiovisual/indico_audiovisual/blueprint.py
|
ThiefMaster/indico-plugins-cern
|
0082a66dd21ac093c1a31316d12c338f52ffe2d0
|
[
"MIT"
] | null | null | null |
# This file is part of the CERN Indico plugins.
# Copyright (C) 2014 - 2021 CERN
#
# The CERN Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License; see
# the LICENSE file for more details.
from indico.core.plugins import IndicoPluginBlueprint
from indico_audiovisual.controllers import RHRequestList
blueprint = IndicoPluginBlueprint('audiovisual', __name__, url_prefix='/service/audiovisual')
blueprint.add_url_rule('/', 'request_list', RHRequestList)
| 34.666667
| 93
| 0.792308
|
e0c8b028e6c51153b0c2b7e69226cd2691f29ec7
| 2,092
|
py
|
Python
|
code.py
|
debapriyaroy95/python-mini-challenges
|
3a28b0ecf778eeefda524e4e849243613242d131
|
[
"MIT"
] | null | null | null |
code.py
|
debapriyaroy95/python-mini-challenges
|
3a28b0ecf778eeefda524e4e849243613242d131
|
[
"MIT"
] | null | null | null |
code.py
|
debapriyaroy95/python-mini-challenges
|
3a28b0ecf778eeefda524e4e849243613242d131
|
[
"MIT"
] | null | null | null |
# --------------
#Code starts here
import sys
#Function to check for palindrome
#Function to find the smallest palindrome
def palindrome(num):
for i in range(num+1,sys.maxsize):
if str(i) == str(i)[::-1]:
return i
print(palindrome(123))
#palindrome_check(55)
print(palindrome(1331))
#Code ends here
# --------------
# code starts here
from collections import Counter
def a_scramble(str_1,str_2):
"""Test if all the letters of word a are contained in word b"""
str_1 = str_1.upper()
str_2 = str_2.upper()
letters = Counter(str_1)
letters.subtract(Counter(str_2))
return all(v >= 0 for v in letters.values())
# --------------
#Code starts here
from math import sqrt
#Code starts here
#Function to check for perfect square
#Function to check for fibonacci number
def check_fib(num):
first = 0
second = 1
fibolist = []
next=0
for x in range(first,sys.maxsize):
#next=0
if next <= num:
next = first+second
first=second
second = next
fibolist.append(next)
else:
break
result = num in fibolist
return result
check_fib(145)
check_fib(377)
# --------------
#Code starts here
#Code starts here
#Function to compress string
def compress(word):
word = word.lower()
word_copy = ''
counter = 1
#Add in first character
word_copy = word_copy+word[0]
for i in range(len(word)-1):
if(word[i] == word[i+1]):
counter=counter+1
else:
word_copy = word_copy+str(counter)
word_copy = word_copy+word[i+1]
counter = 1
word_copy = word_copy+str(counter)
return word_copy
compress("xxcccdex")
#Code ends here
# --------------
#Code starts here
def k_distinct(string,k):
string =string.upper()
string2 = [x for x in string]
string2=set(string2)
result = len(string2)==k
return result
print(k_distinct('Messoptamia',8))
print(k_distinct('banana',4))
| 19.018182
| 68
| 0.585086
|
246f2c39576be72c148dd79124a1467b91827e75
| 33,185
|
py
|
Python
|
conference.py
|
chenlu2015/AppEngineConferenceCenter
|
11e8920c59f05bdc026ddd172e49a723a17aea6c
|
[
"Apache-2.0"
] | 1
|
2016-01-12T21:44:54.000Z
|
2016-01-12T21:44:54.000Z
|
conference.py
|
chenlu2015/AppEngineConferenceCenter
|
11e8920c59f05bdc026ddd172e49a723a17aea6c
|
[
"Apache-2.0"
] | null | null | null |
conference.py
|
chenlu2015/AppEngineConferenceCenter
|
11e8920c59f05bdc026ddd172e49a723a17aea6c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from models import ConflictException
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import ProfileForms
from models import StringMessage
from models import BooleanMessage
from models import Conference
from models import ConferenceForm
from models import ConferenceForms
from models import ConferenceQueryForm
from models import ConferenceQueryForms
from models import TeeShirtSize
from models import SessionType
from models import Session
from models import SessionForm
from models import SessionForms
from models import SessionWishList
from settings import WEB_CLIENT_ID
from settings import ANDROID_CLIENT_ID
from settings import IOS_CLIENT_ID
from settings import ANDROID_AUDIENCE
from utils import getUserId
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
ANNOUNCEMENT_TPL = ('Last chance to attend! The following conferences '
'are nearly sold out: %s')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
DEFAULTS = {
"city": "Default City",
"maxAttendees": 0,
"seatsAvailable": 0,
"topics": [ "Default", "Topic" ],
}
SESSION_DEFAULTS = {
"highlights": "Default Highlights",
"speaker": "To Be Announced",
"duration": 60,
"location": "Room TBD",
# "typeOfSession": str(SessionType.NOT_SPECIFIED)
}
OPERATORS = {
'EQ': '=',
'GT': '>',
'GTEQ': '>=',
'LT': '<',
'LTEQ': '<=',
'NE': '!='
}
FIELDS = {
'CITY': 'city',
'TOPIC': 'topics',
'MONTH': 'month',
'MAX_ATTENDEES': 'maxAttendees',
}
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
SESSION_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
SESSION_GET_BY_TYPE_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
sessionType=messages.StringField(2),
)
SESSION_GET_BY_SPEAKER_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
speaker=messages.StringField(1),
)
SESSION_POST_REQUEST = endpoints.ResourceContainer(
SessionForm,
websafeConferenceKey=messages.StringField(1)
)
WISHLIST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
sessionKey=messages.StringField(1)
)
FEATURED_SPEAKER_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1)
)
GET_POPULAR_CONF_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
num = messages.IntegerField(1)
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api(name='conference', version='v1', audiences=[ANDROID_AUDIENCE],
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID, ANDROID_CLIENT_ID, IOS_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Conference objects - - - - - - - - - - - - - - - - -
def _copyConferenceToForm(self, conf, displayName=None):
"""Copy relevant fields from Conference to ConferenceForm."""
cf = ConferenceForm()
for field in cf.all_fields():
if hasattr(conf, field.name):
# convert Date to date string; just copy others
if field.name.endswith('Date'):
setattr(cf, field.name, str(getattr(conf, field.name)))
else:
setattr(cf, field.name, getattr(conf, field.name))
elif field.name == "websafeKey":
setattr(cf, field.name, conf.key.urlsafe())
if displayName:
setattr(cf, 'organizerDisplayName', displayName)
cf.check_initialized()
return cf
def _createConferenceObject(self, request):
"""Create or update Conference object, returning ConferenceForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.name:
raise endpoints.BadRequestException("Conference 'name' field required")
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeKey']
del data['organizerDisplayName']
# add default values for those missing (both data model & outbound Message)
for df in DEFAULTS:
if data[df] in (None, []):
data[df] = DEFAULTS[df]
setattr(request, df, DEFAULTS[df])
# if data['maxAttendees'] > 0:
# data['ratioAvailable'] = float(data['seatsAvailable'])/float(data['maxAttendees'])
# request.ratioAvailable = data['ratioAvailable']
# convert dates from strings to Date objects; set month based on start_date
if data['startDate']:
data['startDate'] = datetime.strptime(data['startDate'][:10], "%Y-%m-%d").date()
data['month'] = data['startDate'].month
else:
data['month'] = 0
if data['endDate']:
data['endDate'] = datetime.strptime(data['endDate'][:10], "%Y-%m-%d").date()
# set seatsAvailable to be same as maxAttendees on creation
if data["maxAttendees"] > 0:
data["seatsAvailable"] = data["maxAttendees"]
# generate Profile Key based on user ID and Conference
# ID based on Profile key get Conference key from ID
p_key = ndb.Key(Profile, user_id)
c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
c_key = ndb.Key(Conference, c_id, parent=p_key)
data['key'] = c_key
data['organizerUserId'] = request.organizerUserId = user_id
# create Conference, send email to organizer confirming
# creation of Conference & return (modified) ConferenceForm
Conference(**data).put()
taskqueue.add(params={'email': user.email(),
'conferenceInfo': repr(request)},
url='/tasks/send_confirmation_email'
)
return request
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
# update existing conference
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(ConferenceForm, ConferenceForm, path='conference',
http_method='POST', name='createConference')
def createConference(self, request):
"""Create new conference."""
return self._createConferenceObject(request)
@endpoints.method(CONF_POST_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='PUT', name='updateConference')
def updateConference(self, request):
"""Update conference w/provided fields & return w/updated info."""
return self._updateConferenceObject(request)
@endpoints.method(CONF_GET_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='GET', name='getConference')
def getConference(self, request):
"""Return requested conference (by websafeConferenceKey)."""
# get Conference object from request; bail if not found
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
prof = conf.key.parent().get()
# return ConferenceForm
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='getConferencesCreated',
http_method='POST', name='getConferencesCreated')
def getConferencesCreated(self, request):
"""Return conferences created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
confs = Conference.query(ancestor=ndb.Key(Profile, user_id))
prof = ndb.Key(Profile, user_id).get()
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, getattr(prof, 'displayName')) for conf in confs]
)
def _getQuery(self, request):
"""Return formatted query from the submitted filters."""
q = Conference.query()
inequality_filter, filters = self._formatFilters(request.filters)
# If exists, sort on inequality filter first
if not inequality_filter:
q = q.order(Conference.name)
else:
q = q.order(ndb.GenericProperty(inequality_filter))
q = q.order(Conference.name)
for filtr in filters:
if filtr["field"] in ["month", "maxAttendees"]:
filtr["value"] = int(filtr["value"])
formatted_query = ndb.query.FilterNode(filtr["field"], filtr["operator"], filtr["value"])
q = q.filter(formatted_query)
return q
def _formatFilters(self, filters):
"""Parse, check validity and format user supplied filters."""
formatted_filters = []
inequality_field = None
for f in filters:
filtr = {field.name: getattr(f, field.name) for field in f.all_fields()}
try:
filtr["field"] = FIELDS[filtr["field"]]
filtr["operator"] = OPERATORS[filtr["operator"]]
except KeyError:
raise endpoints.BadRequestException("Filter contains invalid field or operator.")
# Every operation except "=" is an inequality
if filtr["operator"] != "=":
# check if inequality operation has been used in previous filters
# disallow the filter if inequality was performed on a different field before
# track the field on which the inequality operation is performed
if inequality_field and inequality_field != filtr["field"]:
raise endpoints.BadRequestException("Inequality filter is allowed on only one field.")
else:
inequality_field = filtr["field"]
formatted_filters.append(filtr)
return (inequality_field, formatted_filters)
@endpoints.method(ConferenceQueryForms, ConferenceForms,
path='queryConferences',
http_method='POST',
name='queryConferences')
def queryConferences(self, request):
"""Query for conferences."""
conferences = self._getQuery(request)
# need to fetch organiser displayName from profiles
# get all keys and use get_multi for speed
organisers = [(ndb.Key(Profile, conf.organizerUserId)) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return individual ConferenceForm object per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, names[conf.organizerUserId]) for conf in \
conferences]
)
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# get Profile from datastore
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
# create new Profile if not there
if not profile:
profile = Profile(
key = p_key,
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
#if field == 'teeShirtSize':
# setattr(prof, field, str(val).upper())
#else:
# setattr(prof, field, val)
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# - - - Announcements - - - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = ANNOUNCEMENT_TPL % (
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/announcement/get',
http_method='GET', name='getAnnouncement')
def getAnnouncement(self, request):
"""Return Announcement from memcache."""
return StringMessage(data=memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY) or "")
# - - - Featured Speaker - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheFeaturedSpeaker(websafeConferenceKey, speaker_name):
"""Create a string containing featured speaker / session names & assign to memcache;
"""
#use websafeConferenceKey + speaker_name as key
MEMCACHE_FEATURED_SPEAKER_KEY = websafeConferenceKey
ss = Session.query(ancestor=ndb.Key(urlsafe=websafeConferenceKey))
ss = ss.filter(Session.speaker == speaker_name)
if ss and ss.count() > 1:
message ="Featuring: "+ speaker_name + " Sessions: "
for s in ss:
message += s.name + ", "
memcache.set(MEMCACHE_FEATURED_SPEAKER_KEY, message)
@endpoints.method(FEATURED_SPEAKER_REQUEST, StringMessage,
path='conference/{websafeConferenceKey}/featured',
http_method='GET', name='getFeaturedSpeaker')
def getFeaturedSpeaker(self, request):
"""Return featured speaker from memcache."""
KEY = request.websafeConferenceKey
return StringMessage(data=memcache.get(KEY) or "")
# - - - Registration - - - - - - - - - - - - - - - - - - - -
@ndb.transactional(xg=True)
def _conferenceRegistration(self, request, reg=True):
"""Register or unregister user for selected conference."""
retval = None
prof = self._getProfileFromUser() # get user Profile
# check if conf exists given websafeConfKey
# get conference; check that it exists
wsck = request.websafeConferenceKey
conf = ndb.Key(urlsafe=wsck).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % wsck)
# register
if reg:
# check if user already registered otherwise add
if wsck in prof.conferenceKeysToAttend:
raise ConflictException(
"You have already registered for this conference")
# check if seats avail
if conf.seatsAvailable <= 0:
raise ConflictException(
"There are no seats available.")
# register user, take away one seat
prof.conferenceKeysToAttend.append(wsck)
conf.seatsAvailable -= 1
retval = True
# unregister
else:
# check if user already registered
if wsck in prof.conferenceKeysToAttend:
# unregister user, add back one seat
prof.conferenceKeysToAttend.remove(wsck)
conf.seatsAvailable += 1
retval = True
else:
retval = False
# conf.ratioAvailable = float(conf.seatsAvailable)/float(conf.maxAttendees)
# write things back to the datastore & return
prof.put()
conf.put()
return BooleanMessage(data=retval)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='conferences/attending',
http_method='GET', name='getConferencesToAttend')
def getConferencesToAttend(self, request):
"""Get list of conferences that user has registered for."""
prof = self._getProfileFromUser() # get user Profile
conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend]
conferences = ndb.get_multi(conf_keys)
# get organizers
organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return set of ConferenceForm objects per Conference
return ConferenceForms(items=[self._copyConferenceToForm(conf, names[conf.organizerUserId])\
for conf in conferences]
)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='POST', name='registerForConference')
def registerForConference(self, request):
"""Register user for selected conference."""
return self._conferenceRegistration(request)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='DELETE', name='unregisterFromConference')
def unregisterFromConference(self, request):
"""Unregister user for selected conference."""
return self._conferenceRegistration(request, reg=False)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='filterPlayground',
http_method='GET', name='filterPlayground')
def filterPlayground(self, request):
"""Filter Playground"""
q = Conference.query()
# field = "city"
# operator = "="
# value = "London"
# f = ndb.query.FilterNode(field, operator, value)
# q = q.filter(f)
q = q.filter(Conference.city=="London")
q = q.filter(Conference.topics=="Medical Innovations")
q = q.filter(Conference.month==6)
return ConferenceForms(
items=[self._copyConferenceToForm(conf, "") for conf in q]
)
# - - - Sessions - - - - - - - - - - - - - - - - - - - -
@endpoints.method(SessionForm, SessionForm, path='session',
http_method='POST', name='createSession')
def createSession(self, request):
"""Create new session."""
return self._createSessionObject(request)
def _createSessionObject(self, request):
"""Create or update _createSessionObject object, returning SessionForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
#check if authorized
user_id = getUserId(user)
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if conf.organizerUserId != user_id:
raise endpoints.UnauthorizedException('Incorrect Authorization')
if not request.name:
raise endpoints.BadRequestException("Session 'name' field required")
#copy data into a dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeConferenceKey']
del data['websafeSessionKey']
if not data['typeOfSession']:
data['typeOfSession'] = str(SessionType.NOT_SPECIFIED)
else:
data['typeOfSession']= str(data['typeOfSession'])
for df in SESSION_DEFAULTS:
if data[df] in (None, []):
data[df] = SESSION_DEFAULTS[df]
setattr(request, df, SESSION_DEFAULTS[df])
# convert dates from strings to Date objects; set month based on start_date
if data['date']:
data['date'] = datetime.strptime(data['date'][:10], "%Y-%m-%d").date()
else:
data['date'] = datetime.now()
if data['startTime']:
data['startTime'] = datetime.strptime(data['startTime'], '%I:%M%p').time()
else:
data['startTime'] = datetime.strptime('12:00AM', '%I:%M%p').time()
# generate session Key/ID based on conference key
c_key = ndb.Key(urlsafe=request.websafeConferenceKey)
s_id = Session.allocate_ids(size=1, parent=c_key)[0]
s_key = ndb.Key(Session, s_id, parent=c_key)
data['key'] = s_key
#create session
Session(**data).put()
taskqueue.add(params={'websafeConferenceKey': c_key.urlsafe(),
'speaker_name': data['speaker']},
url='/tasks/set_featured_speaker'
)
print s_key.get();
return self._copySessionToForm(s_key.get())
@endpoints.method(SESSION_GET_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/session',
http_method='GET', name='getSessionsCreated')
def getSessionsCreated(self, request):
"""Return sessions for conference"""
# create ancestor query for all key matches for this user
ss = Session.query(ancestor=ndb.Key(urlsafe=request.websafeConferenceKey))
# return set of ConferenceForm objects per Conference
return SessionForms(
sessions=[self._copySessionToForm(s) for s in ss]
)
@endpoints.method(SESSION_GET_BY_TYPE_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/session/{sessionType}',
http_method='GET', name='getSessionsByType')
def getSessionsByType(self, request):
"""Return sessions for conference with requested type"""
if request.sessionType not in SessionType.to_dict():
raise endpoints.BadRequestException('Invalid session type')
# create ancestor query for all key matches for this conference
ss = Session.query(ancestor=ndb.Key(urlsafe=request.websafeConferenceKey)).filter(
Session.typeOfSession==str(request.sessionType))
# return set of SessionForm objects per session
return SessionForms(
sessions=[self._copySessionToForm(s) for s in ss]
)
@endpoints.method(SESSION_GET_BY_SPEAKER_REQUEST, SessionForms,
path='/session/{speaker}',
http_method='GET', name='getSessionsBySpeaker')
def getSessionsBySpeaker(self, request):
"""Return sessions for conference with requested type"""
# get all sessions
ss = Session.query().filter(Session.speaker==request.speaker) #filter by speaker
# return set of SessionForm objects per session
return SessionForms(
sessions=[self._copySessionToForm(s) for s in ss]
)
def _copySessionToForm(self, sess):
"""Copy relevant fields from Conference to ConferenceForm."""
sf = SessionForm()
for field in sf.all_fields():
print field.name
if hasattr(sess, field.name):
# convert Dates to date string;
if field.name == 'date' or field.name == 'startTime':
setattr(sf, field.name, str(getattr(sess, field.name)))
# convert t-shirt string to Enum;
elif field.name == 'typeOfSession':
setattr(sf, field.name, getattr(SessionType, getattr(sess, field.name)))
else:
setattr(sf, field.name, getattr(sess, field.name))
elif field.name == 'websafeSessionKey':
setattr(sf, field.name, sess.key.urlsafe())
sf.check_initialized()
return sf
# - - - Wishlist - - - - - - - - - - - - - - - - - - - -
@endpoints.method(WISHLIST_REQUEST, ProfileForm,
path='wishlist',
http_method='POST', name='addToWishlist')
def addToWishlist(self, request):
"""Add session with given key into wishlist"""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
profile = self._getProfileFromUser()
if request.sessionKey not in profile.SessionKeysInWishList:
profile.SessionKeysInWishList.append(request.sessionKey)
else:
raise endpoints.BadRequestException('Session already in wishlist')
profile.put()# save profile
return self._copyProfileToForm(profile) # return Profile
@endpoints.method(message_types.VoidMessage, SessionForms,
path='wishlist',
http_method='GET', name='getWishlist')
def getWishlist(self, request):
"""Return sessions in wishlist"""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
session_keys = [(ndb.Key(urlsafe= s_key)) for s_key in profile.SessionKeysInWishList]
for s_key in profile.SessionKeysInWishList:
print ndb.Key(Session,s_key).get()
sessions = ndb.get_multi(session_keys)
return SessionForms(
sessions=[self._copySessionToForm(s) for s in sessions]
)
@endpoints.method(WISHLIST_REQUEST, BooleanMessage,
path='wishlist',
http_method='DELETE', name='deleteFromWishlist')
def deleteFromWishlist(self, request):
"""Remove session with given key from wishlist."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
session = ndb.Key(urlsafe=request.sessionKey).get()
if not session:
raise endpoints.NotFoundException(
'No session found with key: %s' %request.sessionKey)
if request.sessionKey in profile.SessionKeysInWishList:
profile.SessionKeysInWishList.remove(request.sessionKey)
profile.put()
return BooleanMessage(data=True)
# - - - TASK 3 Queries - - - - - - - - - - - - - - - - - - - - - -
@endpoints.method(GET_POPULAR_CONF_REQUEST, ConferenceForms,
path='popularConferences',
http_method='POST', name='getPopularConferences')
def getPopularConferences(self, request):
"""Return x number of most popular conferences."""
# make sure user is authed
if request.num < 1:
raise endpoints.BadRequestException('Please choose a number > 0.')
confs = Conference.query().order(Conference.ratioAvailable) #ascending, less available = more popular
confs = confs.fetch(request.num)
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf) for conf in confs]
)
@endpoints.method(CONF_GET_REQUEST, ProfileForms,
path='conference/{websafeConferenceKey}/attending',
http_method='GET', name='getAttendeesForConference')
def getAttendeesForConference(self, request):
"""Get attendees for given conference"""
#check if user is owner
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if conf.organizerUserId != user_id:
raise endpoints.UnauthorizedException('Incorrect Authorization')
ps = Profile.query(Profile.conferenceKeysToAttend == request.websafeConferenceKey)
return ProfileForms(
profiles=[self._copyProfileToForm(p) for p in ps]
)
return self._conferenceRegistration(request)
api = endpoints.api_server([ConferenceApi]) # register API
| 37.753129
| 109
| 0.626789
|
3da7d04b8af0cd886f8e74ecde7189ada89ac7c1
| 1,633
|
py
|
Python
|
keras_frcnn/config.py
|
TuanDH94/AutoLicensePlateRecognition
|
41d4d68b1d3ded6729067ab3469657d0d5b700df
|
[
"Apache-2.0"
] | null | null | null |
keras_frcnn/config.py
|
TuanDH94/AutoLicensePlateRecognition
|
41d4d68b1d3ded6729067ab3469657d0d5b700df
|
[
"Apache-2.0"
] | null | null | null |
keras_frcnn/config.py
|
TuanDH94/AutoLicensePlateRecognition
|
41d4d68b1d3ded6729067ab3469657d0d5b700df
|
[
"Apache-2.0"
] | null | null | null |
from keras import backend as K
import math
class Config:
def __init__(self):
self.verbose = True
# self.network = 'resnet50'
self.network = 'vgg'
# setting for data augmentation
self.use_horizontal_flips = False
self.use_vertical_flips = False
self.rot_90 = False
# anchor box scales
self.anchor_box_scales = [128, 256, 512]
# anchor box ratios
self.anchor_box_ratios = [[1, 1], [1./math.sqrt(2), 2./math.sqrt(2)], [2./math.sqrt(2), 1./math.sqrt(2)]]
# size to resize the smallest side of the image
self.im_size = 600
# image channel-wise mean to subtract
self.img_channel_mean = [103.939, 116.779, 123.68]
self.img_scaling_factor = 1.0
# number of ROIs at once
self.num_rois = 4
# stride at the RPN (this depends on the network configuration)
self.rpn_stride = 16
self.balanced_classes = False
# scaling the stdev
self.std_scaling = 4.0
self.classifier_regr_std = [8.0, 8.0, 4.0, 4.0]
# overlaps for RPN
self.rpn_min_overlap = 0.3
self.rpn_max_overlap = 0.7
# overlaps for classifier ROIs
self.classifier_min_overlap = 0.1
self.classifier_max_overlap = 0.5
# placeholder for the class mapping, automatically generated by the parser
self.class_mapping = None
#location of pretrained weights for the base network
# weight files can be found at:
# https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_th_dim_ordering_th_kernels_notop.h5
# https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5
self.model_path = 'model_frcnn.vgg.hdf5'
| 27.216667
| 128
| 0.731782
|
5a9bd2173680fb9e521a3900429f5220f9084347
| 1,820
|
py
|
Python
|
quantum_regression/protac_linear_regression_aq.py
|
jt667/protac
|
94eecd9b8deec0a9745d1b49f8f875d71798c6b1
|
[
"Apache-2.0"
] | 1
|
2022-02-25T08:24:29.000Z
|
2022-02-25T08:24:29.000Z
|
quantum_regression/protac_linear_regression_aq.py
|
jt667/protac
|
94eecd9b8deec0a9745d1b49f8f875d71798c6b1
|
[
"Apache-2.0"
] | null | null | null |
quantum_regression/protac_linear_regression_aq.py
|
jt667/protac
|
94eecd9b8deec0a9745d1b49f8f875d71798c6b1
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
#import dimod
from azure.quantum import Workspace
from azure.quantum.optimization import Problem, ProblemType, Term, SimulatedAnnealing
# Quantum Workspace details
# These can be found in the Azure Portal in the workspace overview
workspace = Workspace (
subscription_id = "",
resource_group = "",
name = "",
location = ""
)
df = pd.read_csv("protac_cleaned.csv")
df = df.drop(df[ df["DC50 (nM)"] > 30000].index)
y = np.log2(df["DC50 (nM)"])
df = df.drop(["DC50 (nM)"],axis=1)
scaler = StandardScaler()
X = scaler.fit_transform(df)
X = np.hstack((X,np.ones([X.shape[0],1])))
P = np.array([-1, -0.5, 0.5, 1, 2, 4])
curlyP = np.kron(np.eye(12),P)
XP = np.matmul(X,curlyP)
A = np.matmul(XP.transpose(),XP)
b = -2*np.matmul(XP.transpose(), df_transformed[:,0])
# Create problem terms
terms = []
#bqm = dimod.AdjVectorBQM(dimod.Vartype.BINARY)
# Add linear terms
for k in range(A.shape[0]):
terms.append(Term(c=float(b[k]), indices=[k]))
#bqm.set_linear('x' + str(k), b[k])
# Add quadratic terms
for i in range(A.shape[0]):
for j in range(i + 1, A.shape[0]):
if not A[i,j] == 0:
terms.append(Term(c=float(A[i,j]), indices=[i,j]))
#bqm.set_quadratic('x' + str(i), 'x' + str(j), A[i,j])
#sampler = EmbeddingComposite(DWaveSampler())
#sampleset = sampler.sample(bqm)
#, num_reads=5000, label='Protac Regression Test 1'
#print(sampleset)
# Name our problem
problem = Problem(name="Protac", problem_type=ProblemType.pubo)
problem.add_terms(terms=terms)
# Choose our solver
solver = SimulatedAnnealing(workspace, timeout=100)
# Submit the problem to the solver
result = solver.optimize(problem)
print(result)
| 27.575758
| 86
| 0.655495
|
a1f0182d61839374f6eca81c3ce9708c6b61ad39
| 4,154
|
py
|
Python
|
platform_test/cores/larson.py
|
zignig/tinybx_stuff
|
5c62665c87c30fb7eb34a651992041bbdef10b0b
|
[
"MIT"
] | 5
|
2019-03-30T01:08:13.000Z
|
2020-05-04T06:02:41.000Z
|
platform_test/cores/larson.py
|
zignig/tinybx_stuff
|
5c62665c87c30fb7eb34a651992041bbdef10b0b
|
[
"MIT"
] | null | null | null |
platform_test/cores/larson.py
|
zignig/tinybx_stuff
|
5c62665c87c30fb7eb34a651992041bbdef10b0b
|
[
"MIT"
] | 1
|
2020-05-04T06:02:17.000Z
|
2020-05-04T06:02:17.000Z
|
from nmigen import *
from nmigen.cli import main, pysim
class OnOff(Elaboratable):
def __init__(self, stretch=200):
self.stretcher = Signal(max=stretch + 1)
self.stretch = stretch
self.o = Signal()
def elaborate(self, platform):
m = Module()
m.d.sync += self.stretcher.eq(self.stretcher + 1)
with m.If(self.stretch == self.stretcher):
m.d.sync += self.stretcher.eq(0)
m.d.sync += self.o.eq(~self.o)
return m
# PWM up and down ramping on enable
class Fade(Elaboratable):
def __init__(self, width=8, stretch=3):
self.width = width
self.counter = Signal(width)
self.value = Signal(width)
self.enable = Signal(reset=1)
self.active = Signal()
# Attach this to your led
self.o = Signal()
self.stretcher = Signal(max=stretch + 1)
self.stretch = stretch
def elaborate(self, platform):
m = Module()
with m.If(self.enable):
# PWM
with m.If(self.value == 2 ** self.width):
m.d.comb += self.o.eq(1)
with m.Elif(self.counter <= self.value):
m.d.comb += self.o.eq(1)
with m.If(self.value == 0):
m.d.comb += self.o.eq(0)
with m.Elif(self.counter > self.value):
m.d.comb += self.o.eq(0)
m.d.sync += self.counter.eq(self.counter + 1)
# Fade up on active
m.d.sync += self.stretcher.eq(self.stretcher + 1)
with m.If(self.stretcher == self.stretch):
m.d.sync += self.stretcher.eq(0)
with m.If(self.active == 1):
with m.If(self.value < 2 ** self.width - 1):
m.d.sync += self.value.eq(self.value + 1)
with m.If(self.active == 0):
with m.If(self.value > 0):
m.d.sync += self.value.eq(self.value - 1)
with m.Else():
m.d.comb += self.o.eq(0)
return m
class Larson(Elaboratable):
def __init__(self, width=5, stretch=10):
# width of the scanner
self.width = width
self.enable = Signal(reset=1)
self.track = Signal(width + 2, reset=1)
self.stretcher = Signal(max=stretch)
self.stretch = stretch
self.updown = Signal(reset=1)
def elaborate(self, platform):
m = Module()
with m.If(self.enable):
m.d.sync += self.stretcher.eq(self.stretcher + 1)
with m.If(self.stretcher == self.stretch):
m.d.sync += self.stretcher.eq(0)
with m.If(self.updown):
with m.If(self.track[self.width + 1] == 1):
# m.d.sync += self.track[0].eq(1)
m.d.sync += self.updown.eq(0)
with m.Else():
m.d.sync += self.track.eq(self.track << 1)
with m.If(~self.updown):
with m.If(self.track[0] == 1):
m.d.sync += self.updown.eq(1)
with m.Else():
m.d.sync += self.track.eq(self.track >> 1)
with m.Else():
m.d.sync += self.track.eq(0)
return m
class FadeTest(Elaboratable):
def __init__(self, stretch=500):
self.o = Signal()
self.stretch = stretch
def elaborate(self, platform):
m = Module()
fader = Fade()
onoff = OnOff(stretch=self.stretch)
m.d.sync += fader.active.eq(onoff.o)
m.d.comb += self.o.eq(fader.o)
m.submodules.fader = fader
m.submodules.onoff = onoff
return m
if __name__ == "__main__":
# b = Larson(width=4, stretch=50)
# pins = (b.track, b.stretcher, b.stretch, b.updown)
# main(b, pins, name="top")
# f = Fade()
# pins = (f.o,f.active,f.counter,f.value,f.enable)
# main(f,pins,name="top")
# c = OnOff(stretch=500)
# pins = (c.o,c.stretch,c.stretcher)
# main(c,pins,name="top")
f = FadeTest(stretch=4000)
pins = f.o
main(f, pins, name="fader")
| 31.469697
| 66
| 0.508907
|
3ecdff2445beca50741f74ec44e20db58c28e7d2
| 8,477
|
py
|
Python
|
src/clifunzone/xml2json.py
|
Justin-W/clifunland
|
c3d5b3d3a5b72611b215d6d793de752e061fefa4
|
[
"BSD-2-Clause"
] | 1
|
2016-02-26T02:55:06.000Z
|
2016-02-26T02:55:06.000Z
|
src/clifunzone/xml2json.py
|
Justin-W/clifunland
|
c3d5b3d3a5b72611b215d6d793de752e061fefa4
|
[
"BSD-2-Clause"
] | null | null | null |
src/clifunzone/xml2json.py
|
Justin-W/clifunland
|
c3d5b3d3a5b72611b215d6d793de752e061fefa4
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
"""xml2json.py Convert XML to JSON
Relies on ElementTree for the XML parsing. This is based on
pesterfish.py but uses a different XML->JSON mapping.
The XML->JSON mapping is described at
http://www.xml.com/pub/a/2006/05/31/converting-between-xml-and-json.html
Rewritten to a command line utility by Hay Kranen < github.com/hay > with
contributions from George Hamilton (gmh04) and Dan Brown (jdanbrown)
XML JSON
<e/> "e": null
<e>text</e> "e": "text"
<e name="value" /> "e": { "@name": "value" }
<e name="value">text</e> "e": { "@name": "value", "#text": "text" }
<e> <a>text</a ><b>text</b> </e> "e": { "a": "text", "b": "text" }
<e> <a>text</a> <a>text</a> </e> "e": { "a": ["text", "text"] }
<e> text <a>text</a> </e> "e": { "#text": "text", "a": "text" }
This is very similar to the mapping used for Yahoo Web Services
(http://developer.yahoo.com/common/json.html#xml).
This is a mess in that it is so unpredictable -- it requires lots of testing
(e.g. to see if values are lists or strings or dictionaries). For use
in Python this could be vastly cleaner. Think about whether the internal
form can be more self-consistent while maintaining good external
characteristics for the JSON.
Look at the Yahoo version closely to see how it works. Maybe can adopt
that completely if it makes more sense...
R. White, 2006 November 6
"""
import json
import optparse
import sys
import xml.etree.cElementTree as ET
from collections import OrderedDict
def strip_tag(tag):
strip_ns_tag = tag
split_array = tag.split('}')
if len(split_array) > 1:
strip_ns_tag = split_array[1]
tag = strip_ns_tag
return tag
def elem_to_internal(elem, strip_attribute=False, strip_namespace=True, strip_whitespace=True, factory=None):
"""
Convert an Element into an internal dictionary (not JSON!).
:param elem: the <ElementTree.Element> to convert.
:param strip_attribute: If True, attributes will be ignored.
:param strip_namespace: If True, namespaces will be ignored.
:param strip_whitespace: If True, 'unimportant' whitespace will be ignored.
:param factory: a dict-like object type. Defaults to <collections.OrderedDict>.
:return: a dict-like object.
"""
if factory is None:
# factory = dict
factory = OrderedDict
d = factory()
elem_tag = elem.tag
if strip_namespace:
elem_tag = strip_tag(elem.tag)
if not strip_attribute:
for key, value in list(elem.attrib.items()):
d['@' + key] = value
# loop over subelements to merge them
for subelem in elem:
v = elem_to_internal(subelem, strip_attribute=strip_attribute, strip_namespace=strip_namespace,
strip_whitespace=strip_whitespace, factory=factory)
tag = subelem.tag
if strip_namespace:
tag = strip_tag(subelem.tag)
value = v[tag]
try:
# add to existing list for this tag
d[tag].append(value)
except AttributeError:
# turn existing entry into a list
d[tag] = [d[tag], value]
except KeyError:
# add a new non-list entry
d[tag] = value
text = elem.text
tail = elem.tail
if strip_whitespace:
# ignore leading and trailing whitespace
if text:
text = text.strip()
if tail:
tail = tail.strip()
if tail:
d['#tail'] = tail
if d:
# use #text element if other attributes exist
if text:
d["#text"] = text
else:
# text is the value if no attributes
d = text or None
return factory([(elem_tag, d)])
def internal_to_elem(pfsh, factory=ET.Element):
"""Convert an internal dictionary (not JSON!) into an Element.
Whatever Element implementation we could import will be
used by default; if you want to use something else, pass the
Element class as the factory parameter.
"""
attribs = {}
text = None
tail = None
sublist = []
tag = list(pfsh.keys())
if len(tag) != 1:
raise ValueError("Illegal structure with multiple tags: %s" % tag)
tag = tag[0]
value = pfsh[tag]
if isinstance(value, dict):
for k, v in list(value.items()):
if k[:1] == "@":
attribs[k[1:]] = v
elif k == "#text":
text = v
elif k == "#tail":
tail = v
elif isinstance(v, list):
for v2 in v:
sublist.append(internal_to_elem({k: v2}, factory=factory))
else:
sublist.append(internal_to_elem({k: v}, factory=factory))
else:
text = value
e = factory(tag, attribs)
for sub in sublist:
e.append(sub)
e.text = text
e.tail = tail
return e
def elem2json(elem, strip_attribute=False, strip_namespace=True, strip_whitespace=True, factory=None, pretty=False):
"""Convert an ElementTree or Element into a JSON string."""
if hasattr(elem, 'getroot'):
elem = elem.getroot()
d = elem_to_internal(elem, strip_attribute=strip_attribute, strip_namespace=strip_namespace,
strip_whitespace=strip_whitespace, factory=factory)
if pretty:
return json.dumps(d, indent=4, separators=(',', ': '))
else:
return json.dumps(d)
def json2elem(json_data, factory=ET.Element):
"""Convert a JSON string into an Element.
Whatever Element implementation we could import will be used by
default; if you want to use something else, pass the Element class
as the factory parameter.
"""
return internal_to_elem(json.loads(json_data), factory)
def xml2json(xmlstring, strip_attribute=0, strip_namespace=1, strip_whitespace=1, factory=None, pretty=False):
"""Convert an XML string into a JSON string."""
elem = ET.fromstring(xmlstring)
return elem2json(elem, strip_attribute=strip_attribute, strip_namespace=strip_namespace,
strip_whitespace=strip_whitespace, factory=factory, pretty=pretty)
def json2xml(json_data, factory=ET.Element):
"""Convert a JSON string into an XML string.
Whatever Element implementation we could import will be used by
default; if you want to use something else, pass the Element class
as the factory parameter.
"""
if not isinstance(json_data, dict):
json_data = json.loads(json_data)
elem = internal_to_elem(json_data, factory)
return ET.tostring(elem)
def main():
p = optparse.OptionParser(
description='Converts XML to JSON or the other way around. Reads from standard input by default, or from file if given.',
prog='xml2json',
usage='%prog -t xml2json -o file.json [file]'
)
p.add_option('--type', '-t', help="'xml2json' or 'json2xml'", default="xml2json")
p.add_option('--out', '-o', help="Write to OUT instead of stdout")
p.add_option(
'--strip_text', action="store_true",
dest="strip_text", help="Strip text for xml2json")
p.add_option(
'--pretty', action="store_true",
dest="pretty", help="Format JSON output so it is easier to read")
p.add_option(
'--strip_namespace', action="store_true",
dest="strip_ns", help="Strip namespace for xml2json")
p.add_option(
'--strip_newlines', action="store_true",
dest="strip_nl", help="Strip newlines for xml2json")
options, arguments = p.parse_args()
inputstream = sys.stdin
if len(arguments) == 1:
try:
inputstream = open(arguments[0])
except:
sys.stderr.write("Problem reading '{0}'\n".format(arguments[0]))
p.print_help()
sys.exit(-1)
input = inputstream.read()
strip = 0
strip_ns = 0
if options.strip_text:
strip = 1
if options.strip_ns:
strip_ns = 1
if options.strip_nl:
input = input.replace('\n', '').replace('\r', '')
if (options.type == "xml2json"):
out = xml2json(input, pretty=options.pretty, strip_namespace=strip_ns, strip_whitespace=strip)
else:
out = json2xml(input)
if (options.out):
file = open(options.out, 'w')
file.write(out)
file.close()
else:
print(out)
if __name__ == "__main__":
main()
| 33.374016
| 130
| 0.62121
|
13cb0ffa79b3d1ef4b793641b9cc37330e2a4212
| 150
|
py
|
Python
|
setup.py
|
d1b/fetch-grsec-patches-and-kernel
|
4a2a4710d62bdfab8d6c2249623d1f1a65f6b503
|
[
"MIT"
] | 1
|
2017-08-04T16:52:26.000Z
|
2017-08-04T16:52:26.000Z
|
setup.py
|
d1b/fetch-grsec-patches-and-kernel
|
4a2a4710d62bdfab8d6c2249623d1f1a65f6b503
|
[
"MIT"
] | null | null | null |
setup.py
|
d1b/fetch-grsec-patches-and-kernel
|
4a2a4710d62bdfab8d6c2249623d1f1a65f6b503
|
[
"MIT"
] | 1
|
2021-08-10T13:00:44.000Z
|
2021-08-10T13:00:44.000Z
|
#!/usr/bin/python
from setuptools import setup
setup(
setup_requires=['pbr==3.0.1'],
pbr=True,
platforms=['any'],
zip_safe=False,
)
| 13.636364
| 34
| 0.626667
|
224644798efa0013bcfa56c4b212c47e73ff970b
| 3,169
|
py
|
Python
|
run_webapp.py
|
AlphaStan/alphacross
|
11dbcb3a9490e9547dc9fb30c62e8beff0489e05
|
[
"BSD-3-Clause"
] | 2
|
2020-06-19T21:29:16.000Z
|
2020-10-27T08:50:21.000Z
|
run_webapp.py
|
AlphaStan/alphacross
|
11dbcb3a9490e9547dc9fb30c62e8beff0489e05
|
[
"BSD-3-Clause"
] | 10
|
2019-11-10T17:42:38.000Z
|
2020-05-27T15:09:11.000Z
|
run_webapp.py
|
AlphaStan/alphacross
|
11dbcb3a9490e9547dc9fb30c62e8beff0489e05
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import Flask, render_template, current_app
import json
from src.main.environment.errors import ColumnIsFullError
from webapp.factory import create_app
app = create_app()
@app.route("/")
def home():
return render_template('index.html',
title=app.config["PANE_TITLE"],
n_rows=current_app.game.get_n_rows(current_app.game.get_state()),
n_columns=current_app.game.get_n_columns(current_app.game.get_state()),
grid=current_app.game.get_state())
@app.route("/<column_id>/<is_ai_active>", methods=['GET'])
def update_grid(column_id, is_ai_active):
column_id = int(column_id)
is_ai_active = False if is_ai_active=='false' else True
player_id = current_app.game.current_token_id
column_is_full = False
has_won = False
is_blocked = False
try:
current_app.game.apply_action(column_id)
if current_app.game._is_winning_move(current_app.game.get_state(), column_id, player_id):
has_won = True
elif current_app.game.is_blocked():
is_blocked = True
except ColumnIsFullError:
column_is_full = True
row_id = 0
for i, token in enumerate(current_app.game._grid[column_id][::-1]):
if token == player_id:
break
row_id += 1
update = {'player_id': player_id,
'has_won': has_won,
'draw': is_blocked,
'row_id': row_id,
'col_id': column_id,
'column_is_full': column_is_full}
# TODO: on page refresh ai_active stays True but AI token are not displayed, reset to False upon refresh
if is_ai_active and not has_won and not column_is_full:
agent_id = current_app.game.current_token_id
agent_has_won = False
agent_has_played = False
agent_column_id = 0
agent_row_id = 0
while not agent_has_played:
try:
agent_column_id = current_app.agent.select_action(current_app.game)
current_app.game.apply_action(agent_column_id)
agent_has_played = True
if current_app.game._is_winning_move(current_app.game.get_state(), agent_column_id, agent_id):
agent_has_won = True
elif current_app.game.is_blocked():
is_blocked = True
except ColumnIsFullError:
pass
for i, token in enumerate(current_app.game._grid[agent_column_id][::-1]):
if token == agent_id:
break
agent_row_id += 1
update['agent_id'] = agent_id
update['agent_has_won'] = agent_has_won
update['draw'] = is_blocked
update['agent_row_id'] = agent_row_id
update['agent_col_id'] = int(agent_column_id) # needs conversion because numpy.int64 is not JSON serializable
json_file = json.dumps(update)
return json_file
@app.route("/reset", methods=['GET', 'POST'])
def reset():
current_app.game.reset()
return json.dumps({"result": "SUCCESS"})
if __name__ == "__main__":
app.run(debug=app.config["DEBUG"])
| 36.848837
| 118
| 0.628274
|
5ce9a5384f62df9675655ea29fc80c53d23d68f6
| 2,069
|
py
|
Python
|
scripts/util/list_stale_autoplots.py
|
jamayfieldjr/iem
|
275b77a65f3b12e26e6cbdb230786b9c7d2b9c9a
|
[
"MIT"
] | 1
|
2019-10-07T17:01:24.000Z
|
2019-10-07T17:01:24.000Z
|
scripts/util/list_stale_autoplots.py
|
jamayfieldjr/iem
|
275b77a65f3b12e26e6cbdb230786b9c7d2b9c9a
|
[
"MIT"
] | null | null | null |
scripts/util/list_stale_autoplots.py
|
jamayfieldjr/iem
|
275b77a65f3b12e26e6cbdb230786b9c7d2b9c9a
|
[
"MIT"
] | null | null | null |
"""Look into which autoplots have not been used in a while"""
from __future__ import print_function
import re
import pandas as pd
from pyiem.util import get_dbconn
QRE = re.compile("q=([0-9]+)")
NO_FEATURES = [
17, # is referenced by canonical page
28, # too complex and generally hated by the public
31, # temp change over x-days, too pidgeon holed
38, # radiation plot that is complex
68, # nws unique VTEC types per year
96, # one-off showing precip biases
94, # one-off showing temp biases
102, # LSR report types
110, 111, 112, 113, 114, 115, 117,
118, 119, 120, 121, 122, 123, 124, # climodat text-only reports
143, 141, # yieldfx plots
144, # soil temp periods, too fragile of data to be useful
152, # growing season differences, too noisey
158, # Tall towers plot
177, # ISUSM plot linked to other app
203, # Handled by dedicated PHP page
]
def main():
"""DO Something"""
pgconn = get_dbconn('mesosite', user='nobody')
cursor = pgconn.cursor()
cursor.execute("""SELECT valid, appurl from feature WHERE appurl is not null
and appurl != ''
""")
rows = {}
for row in cursor:
appurl = row[1]
valid = row[0]
if appurl.find("/plotting/auto/") != 0:
continue
tokens = QRE.findall(appurl)
if not tokens:
print("appurl: %s valid: %s failed RE" % (appurl, valid))
continue
appid = int(tokens[0])
if appid in NO_FEATURES:
continue
res = rows.setdefault(appid, valid)
if res < valid:
rows[appid] = valid
if not rows:
print("No data found")
return
df = pd.DataFrame.from_dict(rows, orient='index')
df.columns = ['valid']
maxval = df.index.max()
for i in range(1, maxval):
if i not in rows and i not in NO_FEATURES:
print("No entries for: %4i" % (i, ))
df.sort_values(by='valid', inplace=True)
print(df.head(10))
if __name__ == '__main__':
main()
| 29.985507
| 80
| 0.59884
|
2d80ea2ee36b625968c2a5fdbfa324e90b01da42
| 200
|
py
|
Python
|
About_KV/python/global.py
|
NCDerek/duedge-recipes
|
b52358b9f8128c5e349d1f1420ada886624e6302
|
[
"MIT"
] | null | null | null |
About_KV/python/global.py
|
NCDerek/duedge-recipes
|
b52358b9f8128c5e349d1f1420ada886624e6302
|
[
"MIT"
] | null | null | null |
About_KV/python/global.py
|
NCDerek/duedge-recipes
|
b52358b9f8128c5e349d1f1420ada886624e6302
|
[
"MIT"
] | null | null | null |
async def handler(event):
v = await event.kv.get_global('my_key1')
if v is not None:
return {'status': 200, 'body': v}
else:
return {'status': 404, 'body': b'not found!'}
| 25
| 53
| 0.57
|
d0f988883a222e62685366a308101dde3703e9a4
| 22,831
|
py
|
Python
|
tencentcloud/rce/v20201103/models.py
|
qin5506/tencentcloud-sdk-python
|
e9c59d80beabf75fb96456bb8d7a53400346fe9a
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/rce/v20201103/models.py
|
qin5506/tencentcloud-sdk-python
|
e9c59d80beabf75fb96456bb8d7a53400346fe9a
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/rce/v20201103/models.py
|
qin5506/tencentcloud-sdk-python
|
e9c59d80beabf75fb96456bb8d7a53400346fe9a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AccountInfo(AbstractModel):
"""账号信息。
"""
def __init__(self):
"""
:param AccountType: 用户账号类型(默认开通 QQ 开放账号、手机号,手机 MD5 账号类型查询。如需使用微信开放账号,则需要 提交工单 由腾讯云进行资格审核,审核通过后方可正常使用微信开放账号):
1:QQ开放账号。
2:微信开放账号。
4:手机号(暂仅支持国内手机号)。
8:设备号(imei/imeiMD5/idfa/idfaMD5)。
0:其他。
10004:支持手机号MD5加密和sha256加密;
标准中国大陆手机号11位,MD5加密后取长度32位小写字符串;例如:手机号13112345678的Md5加密结果为手"dafc728802534d51fbf85c70313a2bd2"
标准中国大陆手机号11位,sha256加密后取长度为64位的小写字符串;例如:手机号13112345678的sha256加密的结果为“9f46715cff1a9ac969ec01924111f7f3697a97ad98a4fd53e15a78d79d1f3551”
:type AccountType: int
:param QQAccount: QQ账号信息,AccountType是1时,该字段必填。
:type QQAccount: :class:`tencentcloud.rce.v20201103.models.QQAccountInfo`
:param WeChatAccount: 微信账号信息,AccountType是2时,该字段必填。
:type WeChatAccount: :class:`tencentcloud.rce.v20201103.models.WeChatAccountInfo`
:param OtherAccount: 其它账号信息,AccountType是0、4、8或10004时,该字段必填。
:type OtherAccount: :class:`tencentcloud.rce.v20201103.models.OtherAccountInfo`
"""
self.AccountType = None
self.QQAccount = None
self.WeChatAccount = None
self.OtherAccount = None
def _deserialize(self, params):
self.AccountType = params.get("AccountType")
if params.get("QQAccount") is not None:
self.QQAccount = QQAccountInfo()
self.QQAccount._deserialize(params.get("QQAccount"))
if params.get("WeChatAccount") is not None:
self.WeChatAccount = WeChatAccountInfo()
self.WeChatAccount._deserialize(params.get("WeChatAccount"))
if params.get("OtherAccount") is not None:
self.OtherAccount = OtherAccountInfo()
self.OtherAccount._deserialize(params.get("OtherAccount"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class InputDetails(AbstractModel):
"""入参的详细参数信息
"""
def __init__(self):
"""
:param FieldName: 字段名称
:type FieldName: str
:param FieldValue: 字段值
:type FieldValue: str
"""
self.FieldName = None
self.FieldValue = None
def _deserialize(self, params):
self.FieldName = params.get("FieldName")
self.FieldValue = params.get("FieldValue")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class InputManageMarketingRisk(AbstractModel):
"""全栈式风控引擎入参
"""
def __init__(self):
"""
:param Account: 账号信息。
:type Account: :class:`tencentcloud.rce.v20201103.models.AccountInfo`
:param SceneCode: 场景类型:场景SceneCode, 控制台上新建对应的场景并获取对应的值;
例如:e_register_protection_1521184361
控制台链接:https://console.cloud.tencent.com/rce/risk/sceneroot;
:type SceneCode: str
:param UserIp: 登录来源的外网IP
:type UserIp: str
:param PostTime: 用户操作时间戳,单位秒(格林威治时间精确到秒,如1501590972)。
:type PostTime: int
:param UserId: 用户唯一标识。
:type UserId: str
:param DeviceToken: 设备指纹token。
:type DeviceToken: str
:param DeviceBusinessId: 设备指纹BusinessId
:type DeviceBusinessId: int
:param BusinessId: 业务ID。网站或应用在多个业务中使用此服务,通过此ID区分统计数据。
:type BusinessId: int
:param Nickname: 昵称,UTF-8 编码。
:type Nickname: str
:param EmailAddress: 用户邮箱地址(非系统自动生成)。
:type EmailAddress: str
:param CheckDevice: 是否识别设备异常:
0:不识别。
1:识别。
:type CheckDevice: int
:param CookieHash: 用户HTTP请求中的Cookie进行2次hash的值,只要保证相同Cookie的hash值一致即可。
:type CookieHash: str
:param Referer: 用户HTTP请求的Referer值。
:type Referer: str
:param UserAgent: 用户HTTP请求的User-Agent值。
:type UserAgent: str
:param XForwardedFor: 用户HTTP请求的X-Forwarded-For值。
:type XForwardedFor: str
:param MacAddress: MAC地址或设备唯一标识。
:type MacAddress: str
:param VendorId: 手机制造商ID,如果手机注册,请带上此信息。
:type VendorId: str
:param DeviceType: 设备类型:
1:Android
2:IOS
:type DeviceType: int
:param Details: 详细信息
FieldName 清单
Android serial_number String 否 设备序列号
Android carrier String 否 运营商;-1: 获取失败,0: 其他,1: 移动,2: 联通,3: 电信,4: 铁通
Android mcc_mnc String 否 netOperator MCC+MNC
Android model String 否 手机型号
Android os_system String 否 操作系统
Android vendor_id String 否 设备品牌 “华为”“oppo”“小米”
Android device_version String 否 设备版本
Android android_api_level String 否 安卓API等级
Android phone_chip_info String 否 手机芯片信息
Android resolution_w String 否 屏幕分辨率宽,保留整数
Android resolution_h String 否 屏幕分辨率高,保留整数
Android brightness String 否 屏幕亮度
Android bluetooth_address String 否 蓝牙地址
Android baseband_version String 否 基带版本
Android kernel_version String 否 kernel 版本
Android cpu_core String 否 CPU 核数
Android cpu_model String 否 CPU 型号
Android memory String 否 内存容量,单位转换为 GB
Android storage String 否 存储容量,单位转换为 GB
Android volume String 否 手机音量
Android battery_power String 否 电池电量
Android language String 否 语言
Android package_name String 否 软件包名
Android App_version String 否 App 版本号
Android App_name String 否 App 显示名称
Android is_debug String 否 是否 debug;0 为正常模式,1 为 debug 模式;其他值无效
Android is_root String 否 是否越狱;0 为正常,1 为越狱;其他值无效
Android is_proxy String 否 是否启动代理;0 为未开启,1 为开启;其他值无效
Android is_emulator String 否 是否模拟器;0 为未开启,1 为开启;其他值无效
Android charge_status String 否 充电状态;1-不在充电,2-USB充电,3-电源充电
Android network_type String 否 网络类型:2G/3G/4G/5G/WiFi/WWAN/other
Android wifi_mac String 否 WiFi MAC地址
IOS model String 否 机器型号 iPhone11
IOS memory String 否 内存容量,单位转换为 GB
IOS os_system String 否 操作系统
IOS device_version String 否 设备版本
IOS phone_chip_info String 否 手机芯片信息
IOS device_name String 否 设备名称 "xxx 的 iPhone","xxx's IPhone" 等等
IOS uptime String 否 开机时间
IOS language String 否 系统语言
IOS carrier String 否 运营商
IOS cpu_model String 否 CPU 型号
IOS cpu_core String 否 CPU 个数
IOS volume String 否 手机音量
IOS battery_power String 否 电池电量
IOS resolution_w String 否 屏幕分辨率宽,保留整数
IOS resolution_h String 否 屏幕分辨率高,保留整数
IOS package_name String 否 App 包名
IOS App_version String 否 App 版本号
IOS App_name String 否 App 显示名称
IOS is_debug String 否 是否 debug;0 为正常模式,1 为 debug 模式;其他值无效
IOS is_root String 否 是否越狱;0 为正常,1 为越狱;其他值无效
IOS is_proxy String 否 是否启动代理;0 为未开启,1 为开启;其他值无效
IOS is_emulator String 否 是否模拟器;0 为未开启,1 为开启;其他值无效
IOS charge_status String 否 充电状态;1-不在充电,2-USB充电,3-电源充电
IOS network_type String 否 网络类型:2G/3G/4G/5G/WiFi/WWAN/other
IOS wifi_mac String 否 WiFi MAC地址
其他 os_system String 否 操作系统
其他 browser String 否 浏览器信息
其他 from_url String 否 来源链接
:type Details: list of InputDetails
:param Sponsor: 可选填写。详情请跳转至SponsorInfo查看。
:type Sponsor: :class:`tencentcloud.rce.v20201103.models.SponsorInfo`
:param OnlineScam: 可选填写。详情请跳转至OnlineScamInfo查看。
:type OnlineScam: :class:`tencentcloud.rce.v20201103.models.OnlineScamInfo`
"""
self.Account = None
self.SceneCode = None
self.UserIp = None
self.PostTime = None
self.UserId = None
self.DeviceToken = None
self.DeviceBusinessId = None
self.BusinessId = None
self.Nickname = None
self.EmailAddress = None
self.CheckDevice = None
self.CookieHash = None
self.Referer = None
self.UserAgent = None
self.XForwardedFor = None
self.MacAddress = None
self.VendorId = None
self.DeviceType = None
self.Details = None
self.Sponsor = None
self.OnlineScam = None
def _deserialize(self, params):
if params.get("Account") is not None:
self.Account = AccountInfo()
self.Account._deserialize(params.get("Account"))
self.SceneCode = params.get("SceneCode")
self.UserIp = params.get("UserIp")
self.PostTime = params.get("PostTime")
self.UserId = params.get("UserId")
self.DeviceToken = params.get("DeviceToken")
self.DeviceBusinessId = params.get("DeviceBusinessId")
self.BusinessId = params.get("BusinessId")
self.Nickname = params.get("Nickname")
self.EmailAddress = params.get("EmailAddress")
self.CheckDevice = params.get("CheckDevice")
self.CookieHash = params.get("CookieHash")
self.Referer = params.get("Referer")
self.UserAgent = params.get("UserAgent")
self.XForwardedFor = params.get("XForwardedFor")
self.MacAddress = params.get("MacAddress")
self.VendorId = params.get("VendorId")
self.DeviceType = params.get("DeviceType")
if params.get("Details") is not None:
self.Details = []
for item in params.get("Details"):
obj = InputDetails()
obj._deserialize(item)
self.Details.append(obj)
if params.get("Sponsor") is not None:
self.Sponsor = SponsorInfo()
self.Sponsor._deserialize(params.get("Sponsor"))
if params.get("OnlineScam") is not None:
self.OnlineScam = OnlineScamInfo()
self.OnlineScam._deserialize(params.get("OnlineScam"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class ManageMarketingRiskRequest(AbstractModel):
"""ManageMarketingRisk请求参数结构体
"""
def __init__(self):
"""
:param BusinessSecurityData: 业务入参
:type BusinessSecurityData: :class:`tencentcloud.rce.v20201103.models.InputManageMarketingRisk`
"""
self.BusinessSecurityData = None
def _deserialize(self, params):
if params.get("BusinessSecurityData") is not None:
self.BusinessSecurityData = InputManageMarketingRisk()
self.BusinessSecurityData._deserialize(params.get("BusinessSecurityData"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class ManageMarketingRiskResponse(AbstractModel):
"""ManageMarketingRisk返回参数结构体
"""
def __init__(self):
"""
:param Data: 业务出参
:type Data: :class:`tencentcloud.rce.v20201103.models.OutputManageMarketingRisk`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Data = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Data") is not None:
self.Data = OutputManageMarketingRisk()
self.Data._deserialize(params.get("Data"))
self.RequestId = params.get("RequestId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class OnlineScamInfo(AbstractModel):
"""诈骗信息。
"""
def __init__(self):
"""
:param ContentLabel: 内容标签。
:type ContentLabel: str
:param ContentRiskLevel: 内容风险等级:
0:正常。
1:可疑。
:type ContentRiskLevel: int
:param ContentType: 内容产生形式:
0:对话。
1:广播。
:type ContentType: int
:param FraudType: 诈骗账号类型:
1:11位手机号。
2:QQ账号。
:type FraudType: int
:param FraudAccount: 诈骗账号,手机号或QQ账号。
:type FraudAccount: str
"""
self.ContentLabel = None
self.ContentRiskLevel = None
self.ContentType = None
self.FraudType = None
self.FraudAccount = None
def _deserialize(self, params):
self.ContentLabel = params.get("ContentLabel")
self.ContentRiskLevel = params.get("ContentRiskLevel")
self.ContentType = params.get("ContentType")
self.FraudType = params.get("FraudType")
self.FraudAccount = params.get("FraudAccount")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class OtherAccountInfo(AbstractModel):
"""其它账号信息。
"""
def __init__(self):
"""
:param AccountId: 其它账号信息:
AccountType是4时,填入真实的手机号(如13123456789)。
AccountType是8时,支持 imei、idfa、imeiMD5、idfaMD5 入参。
AccountType是0时,填入账号信息。
AccountType是10004时,填入手机号的MD5值。
注:imeiMd5 加密方式为:imei 明文小写后,进行 MD5 加密,加密后取小写值。IdfaMd5 加密方式为:idfa 明文大写后,进行 MD5 加密,加密后取小写值。
:type AccountId: str
:param MobilePhone: 手机号,若 AccountType 是4(手机号)、或10004(手机号 MD5),则无需重复填写,否则填入对应的手机号(如13123456789)。
:type MobilePhone: str
:param DeviceId: 用户设备号。若 AccountType 是8(设备号),则无需重复填写,否则填入对应的设备号。
:type DeviceId: str
"""
self.AccountId = None
self.MobilePhone = None
self.DeviceId = None
def _deserialize(self, params):
self.AccountId = params.get("AccountId")
self.MobilePhone = params.get("MobilePhone")
self.DeviceId = params.get("DeviceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class OutputManageMarketingRisk(AbstractModel):
"""全栈式风控引擎出参
"""
def __init__(self):
"""
:param Code: 返回码。0表示成功,非0标识失败错误码。
注意:此字段可能返回 null,表示取不到有效值。
:type Code: int
:param Message: UTF-8编码,出错消息。
注意:此字段可能返回 null,表示取不到有效值。
:type Message: str
:param Value: 业务详情。
注意:此字段可能返回 null,表示取不到有效值。
:type Value: :class:`tencentcloud.rce.v20201103.models.OutputManageMarketingRiskValue`
:param UUid: 控制台显示的req_id。
注意:此字段可能返回 null,表示取不到有效值。
:type UUid: str
"""
self.Code = None
self.Message = None
self.Value = None
self.UUid = None
def _deserialize(self, params):
self.Code = params.get("Code")
self.Message = params.get("Message")
if params.get("Value") is not None:
self.Value = OutputManageMarketingRiskValue()
self.Value._deserialize(params.get("Value"))
self.UUid = params.get("UUid")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class OutputManageMarketingRiskValue(AbstractModel):
"""全栈式风控引擎出参值
"""
def __init__(self):
"""
:param UserId: 账号ID。对应输入参数:
AccountType是1时,对应QQ的OpenID。
AccountType是2时,对应微信的OpenID/UnionID。
AccountType是4时,对应手机号。
AccountType是8时,对应imei、idfa、imeiMD5或者idfaMD5。
AccountType是0时,对应账号信息。
AccountType是10004时,对应手机号的MD5。
注意:此字段可能返回 null,表示取不到有效值。
:type UserId: str
:param PostTime: 操作时间戳,单位秒(对应输入参数)。
注意:此字段可能返回 null,表示取不到有效值。
:type PostTime: int
:param AssociateAccount: 对应输入参数,AccountType 是 QQ 或微信开放账号时,用于标识 QQ 或微信用户登录后关联业务自身的账号ID。
注意:此字段可能返回 null,表示取不到有效值。
:type AssociateAccount: str
:param UserIp: 操作来源的外网IP(对应输入参数)。
注意:此字段可能返回 null,表示取不到有效值。
:type UserIp: str
:param RiskLevel: 风险值
pass : 无恶意
review:需要人工审核
reject:拒绝,高风险恶意
注意:此字段可能返回 null,表示取不到有效值。
:type RiskLevel: str
:param RiskType: 风险类型,请参考官网风险类型
账号风险
1 账号信用低,账号近期存在因恶意被处罚历史,网络低活跃,被举报等因素
11 疑似 低活跃账号,账号活跃度与正常用户有差异
2 垃圾账号 疑似批量注册小号,近期存在严重违规或大量举报
21 疑似小号 账号有疑似线上养号,小号等行为
22 疑似违规账号 账号曾有违规行为、曾被举报过、曾因违规被处罚过等
3 无效账号 送检账号参数无法成功解析,请检查微信 openid 是否有误/appid与QQopenid无法关联/微信openid权限是否有开通/手机号是否为中国大陆手机号;
4 黑名单 该账号在业务侧有过拉黑记录
5 白名单 业务自行有添加过白名单记录
行为风险
101 批量操作 存在 ip/设备/环境等因素的聚集性异常
1011 疑似 IP 属性聚集,出现 IP 聚集
1012 疑似 设备属性聚集 出现设备聚集
102 自动机 疑似自动机批量请求
103 恶意行为-网赚 疑似网赚
104 微信登录态无效 检查 WeChatAccessToken 参数,是否已经失效;
201 环境风险 环境异常 操作 ip/设备/环境存在异常。当前 ip 为非常用 ip 或恶意 ip 段
2011 疑似 非常用IP 请求 当前请求 IP 非该账号常用 IP
2012 疑似 IP 异常 使用 idc 机房 ip 或 使用代理 ip 或 使用恶意 ip 等
205 非公网有效ip 传进来的 IP 地址为内网 ip 地址或者 ip 保留地址;
设备风险
206 设备异常 该设备存在异常的使用行为
2061 疑似 非常用设备 当前请求的设备非该账号常用设备
2062 疑似 虚拟设备 请求设备为模拟器、脚本、云设备等虚拟设备
2063 疑似 群控设备 请求设备为猫池、手机墙等群控设备
注意:此字段可能返回 null,表示取不到有效值。
:type RiskType: list of int
"""
self.UserId = None
self.PostTime = None
self.AssociateAccount = None
self.UserIp = None
self.RiskLevel = None
self.RiskType = None
def _deserialize(self, params):
self.UserId = params.get("UserId")
self.PostTime = params.get("PostTime")
self.AssociateAccount = params.get("AssociateAccount")
self.UserIp = params.get("UserIp")
self.RiskLevel = params.get("RiskLevel")
self.RiskType = params.get("RiskType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class QQAccountInfo(AbstractModel):
"""QQ账号信息。
"""
def __init__(self):
"""
:param QQOpenId: QQ的OpenID。
:type QQOpenId: str
:param AppIdUser: QQ分配给网站或应用的AppId,用来唯一标识网站或应用。
:type AppIdUser: str
:param AssociateAccount: 用于标识QQ用户登录后所关联业务自身的账号ID。
:type AssociateAccount: str
:param MobilePhone: 账号绑定的手机号。
:type MobilePhone: str
:param DeviceId: 用户设备号。
:type DeviceId: str
"""
self.QQOpenId = None
self.AppIdUser = None
self.AssociateAccount = None
self.MobilePhone = None
self.DeviceId = None
def _deserialize(self, params):
self.QQOpenId = params.get("QQOpenId")
self.AppIdUser = params.get("AppIdUser")
self.AssociateAccount = params.get("AssociateAccount")
self.MobilePhone = params.get("MobilePhone")
self.DeviceId = params.get("DeviceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class SponsorInfo(AbstractModel):
"""网赚防刷相关参数
"""
def __init__(self):
"""
:param SponsorOpenId: 助力场景建议填写:活动发起人微信OpenID。
:type SponsorOpenId: str
:param SponsorDeviceNumber: 助力场景建议填写:发起人设备号。
:type SponsorDeviceNumber: str
:param SponsorPhone: 助力场景建议填写:发起人手机号。
:type SponsorPhone: str
:param SponsorIp: 助力场景建议填写:发起人IP。
:type SponsorIp: str
:param CampaignUrl: 助力场景建议填写:活动链接。
:type CampaignUrl: str
"""
self.SponsorOpenId = None
self.SponsorDeviceNumber = None
self.SponsorPhone = None
self.SponsorIp = None
self.CampaignUrl = None
def _deserialize(self, params):
self.SponsorOpenId = params.get("SponsorOpenId")
self.SponsorDeviceNumber = params.get("SponsorDeviceNumber")
self.SponsorPhone = params.get("SponsorPhone")
self.SponsorIp = params.get("SponsorIp")
self.CampaignUrl = params.get("CampaignUrl")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
class WeChatAccountInfo(AbstractModel):
"""微信账号信息。
"""
def __init__(self):
"""
:param WeChatOpenId: 微信的OpenID/UnionID 。
:type WeChatOpenId: str
:param WeChatSubType: 微信开放账号类型:
1:微信公众号/微信第三方登录。
2:微信小程序。
:type WeChatSubType: int
:param RandStr: 随机串。如果WeChatSubType是2,该字段必填。Token签名随机数,建议16个字符。
:type RandStr: str
:param WeChatAccessToken: 如果WeChatSubType是1,填入授权的access_token(注意:不是普通access_token,详情请参阅官方说明文档。获取网页版本的access_token时,scope字段必需填写snsapi_userinfo。
如果WeChatSubType是2,填入以session_key为密钥签名随机数RandStr(hmac_sha256签名算法)得到的字符串。
:type WeChatAccessToken: str
:param AssociateAccount: 用于标识微信用户登录后所关联业务自身的账号ID。
:type AssociateAccount: str
:param MobilePhone: 账号绑定的手机号。
:type MobilePhone: str
:param DeviceId: 用户设备号。
:type DeviceId: str
"""
self.WeChatOpenId = None
self.WeChatSubType = None
self.RandStr = None
self.WeChatAccessToken = None
self.AssociateAccount = None
self.MobilePhone = None
self.DeviceId = None
def _deserialize(self, params):
self.WeChatOpenId = params.get("WeChatOpenId")
self.WeChatSubType = params.get("WeChatSubType")
self.RandStr = params.get("RandStr")
self.WeChatAccessToken = params.get("WeChatAccessToken")
self.AssociateAccount = params.get("AssociateAccount")
self.MobilePhone = params.get("MobilePhone")
self.DeviceId = params.get("DeviceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set), Warning)
| 33.873887
| 150
| 0.656958
|
1587d74590cffaa9e439da7e4c2f6753d79c5a14
| 817
|
py
|
Python
|
award/urls.py
|
stevekibe/Awards
|
b24f5fb7ac9e1e47ecd365f7794931f7cea0fa2d
|
[
"MIT"
] | null | null | null |
award/urls.py
|
stevekibe/Awards
|
b24f5fb7ac9e1e47ecd365f7794931f7cea0fa2d
|
[
"MIT"
] | null | null | null |
award/urls.py
|
stevekibe/Awards
|
b24f5fb7ac9e1e47ecd365f7794931f7cea0fa2d
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import url
from . import views
urlpatterns=[
url('^$',views.index,name='index'),
url(r'^new/post$',views.new_project, name='new-project'),
url(r'votes/$',views.vote_project, name='vote_project'),
url(r'^user/(\d+)$',views.detail, name='detail'),
url(r'^detail/edit/$', views.edit_detail, name='edit-detail'),
url(r'^viewproject/(\d+)$',views.view_project, name = 'viewproject') ,
url(r'^search/$', views.search_results, name='search-project'),
url(r'^comment/(?P<project_id>\d+)', views.add_comment, name='comment'),
url(r'^vote/(?P<project_id>\d+)', views.vote, name='vote'),
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
| 40.85
| 81
| 0.681763
|
e90472183c9a1e713f54a7502ceddf4c5bb42818
| 25,076
|
py
|
Python
|
example/ui/dw_buttons_pyside2_ui.py
|
dpizetta/QDarkStyleSheet
|
f41405ad791fb118387482ae39b94665c35df662
|
[
"CC-BY-4.0"
] | null | null | null |
example/ui/dw_buttons_pyside2_ui.py
|
dpizetta/QDarkStyleSheet
|
f41405ad791fb118387482ae39b94665c35df662
|
[
"CC-BY-4.0"
] | null | null | null |
example/ui/dw_buttons_pyside2_ui.py
|
dpizetta/QDarkStyleSheet
|
f41405ad791fb118387482ae39b94665c35df662
|
[
"CC-BY-4.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'dw_buttons.ui'
#
# Created: Sat Oct 27 00:00:52 2018
# by: pyside2-uic 2.0.0 running on PySide2 5.6.0~a1
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_DockWidget(object):
def setupUi(self, DockWidget):
DockWidget.setObjectName("DockWidget")
DockWidget.resize(527, 460)
self.dockWidgetContents = QtWidgets.QWidget()
self.dockWidgetContents.setObjectName("dockWidgetContents")
self.gridLayout = QtWidgets.QGridLayout(self.dockWidgetContents)
self.gridLayout.setObjectName("gridLayout")
self.label_72 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_72.setFont(font)
self.label_72.setObjectName("label_72")
self.gridLayout.addWidget(self.label_72, 0, 1, 1, 1)
self.label_73 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_73.setFont(font)
self.label_73.setObjectName("label_73")
self.gridLayout.addWidget(self.label_73, 0, 2, 1, 1)
self.label_26 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_26.setMinimumSize(QtCore.QSize(0, 0))
self.label_26.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_26.setFont(font)
self.label_26.setObjectName("label_26")
self.gridLayout.addWidget(self.label_26, 1, 0, 1, 1)
self.pushButton = QtWidgets.QPushButton(self.dockWidgetContents)
self.pushButton.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.pushButton.setObjectName("pushButton")
self.gridLayout.addWidget(self.pushButton, 1, 1, 1, 1)
self.pushButtonDis = QtWidgets.QPushButton(self.dockWidgetContents)
self.pushButtonDis.setEnabled(False)
self.pushButtonDis.setMinimumSize(QtCore.QSize(0, 0))
self.pushButtonDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.pushButtonDis.setDefault(False)
self.pushButtonDis.setObjectName("pushButtonDis")
self.gridLayout.addWidget(self.pushButtonDis, 1, 2, 1, 1)
self.label_74 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_74.setFont(font)
self.label_74.setObjectName("label_74")
self.gridLayout.addWidget(self.label_74, 2, 0, 1, 1)
self.pushButtonChecked = QtWidgets.QPushButton(self.dockWidgetContents)
self.pushButtonChecked.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.pushButtonChecked.setCheckable(True)
self.pushButtonChecked.setChecked(True)
self.pushButtonChecked.setObjectName("pushButtonChecked")
self.gridLayout.addWidget(self.pushButtonChecked, 2, 1, 1, 1)
self.pushButtonCheckedDis = QtWidgets.QPushButton(self.dockWidgetContents)
self.pushButtonCheckedDis.setEnabled(False)
self.pushButtonCheckedDis.setCheckable(True)
self.pushButtonCheckedDis.setChecked(True)
self.pushButtonCheckedDis.setObjectName("pushButtonCheckedDis")
self.gridLayout.addWidget(self.pushButtonCheckedDis, 2, 2, 1, 1)
self.label_76 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_76.setFont(font)
self.label_76.setObjectName("label_76")
self.gridLayout.addWidget(self.label_76, 3, 0, 1, 1)
self.pushButtonUnchecked = QtWidgets.QPushButton(self.dockWidgetContents)
self.pushButtonUnchecked.setCheckable(True)
self.pushButtonUnchecked.setObjectName("pushButtonUnchecked")
self.gridLayout.addWidget(self.pushButtonUnchecked, 3, 1, 1, 1)
self.pushButtonUncheckedDis = QtWidgets.QPushButton(self.dockWidgetContents)
self.pushButtonUncheckedDis.setEnabled(False)
self.pushButtonUncheckedDis.setCheckable(True)
self.pushButtonUncheckedDis.setObjectName("pushButtonUncheckedDis")
self.gridLayout.addWidget(self.pushButtonUncheckedDis, 3, 2, 1, 1)
self.label_33 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_33.setMinimumSize(QtCore.QSize(0, 0))
self.label_33.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_33.setFont(font)
self.label_33.setObjectName("label_33")
self.gridLayout.addWidget(self.label_33, 4, 0, 1, 1)
self.toolButton = QtWidgets.QToolButton(self.dockWidgetContents)
self.toolButton.setMinimumSize(QtCore.QSize(0, 0))
self.toolButton.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.toolButton.setObjectName("toolButton")
self.gridLayout.addWidget(self.toolButton, 4, 1, 1, 1)
self.toolButtonDis = QtWidgets.QToolButton(self.dockWidgetContents)
self.toolButtonDis.setEnabled(False)
self.toolButtonDis.setMinimumSize(QtCore.QSize(0, 0))
self.toolButtonDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.toolButtonDis.setObjectName("toolButtonDis")
self.gridLayout.addWidget(self.toolButtonDis, 4, 2, 1, 1)
self.label_75 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_75.setFont(font)
self.label_75.setObjectName("label_75")
self.gridLayout.addWidget(self.label_75, 5, 0, 1, 1)
self.radioButtonChecked = QtWidgets.QRadioButton(self.dockWidgetContents)
self.radioButtonChecked.setChecked(True)
self.radioButtonChecked.setAutoExclusive(False)
self.radioButtonChecked.setObjectName("radioButtonChecked")
self.gridLayout.addWidget(self.radioButtonChecked, 5, 1, 1, 1)
self.radioButtonCheckedDis = QtWidgets.QRadioButton(self.dockWidgetContents)
self.radioButtonCheckedDis.setEnabled(False)
self.radioButtonCheckedDis.setChecked(True)
self.radioButtonCheckedDis.setAutoExclusive(False)
self.radioButtonCheckedDis.setObjectName("radioButtonCheckedDis")
self.gridLayout.addWidget(self.radioButtonCheckedDis, 5, 2, 1, 1)
self.label_29 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_29.setMinimumSize(QtCore.QSize(0, 0))
self.label_29.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_29.setFont(font)
self.label_29.setObjectName("label_29")
self.gridLayout.addWidget(self.label_29, 6, 0, 1, 1)
self.radioButtonUnchecked = QtWidgets.QRadioButton(self.dockWidgetContents)
self.radioButtonUnchecked.setMinimumSize(QtCore.QSize(0, 0))
self.radioButtonUnchecked.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.radioButtonUnchecked.setAutoExclusive(False)
self.radioButtonUnchecked.setObjectName("radioButtonUnchecked")
self.gridLayout.addWidget(self.radioButtonUnchecked, 6, 1, 1, 1)
self.radioButtonUncheckedDis = QtWidgets.QRadioButton(self.dockWidgetContents)
self.radioButtonUncheckedDis.setEnabled(False)
self.radioButtonUncheckedDis.setMinimumSize(QtCore.QSize(0, 0))
self.radioButtonUncheckedDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.radioButtonUncheckedDis.setChecked(False)
self.radioButtonUncheckedDis.setAutoExclusive(False)
self.radioButtonUncheckedDis.setObjectName("radioButtonUncheckedDis")
self.gridLayout.addWidget(self.radioButtonUncheckedDis, 6, 2, 1, 1)
self.label_53 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_53.setFont(font)
self.label_53.setObjectName("label_53")
self.gridLayout.addWidget(self.label_53, 7, 0, 1, 1)
self.checkBoxChecked = QtWidgets.QCheckBox(self.dockWidgetContents)
self.checkBoxChecked.setChecked(True)
self.checkBoxChecked.setObjectName("checkBoxChecked")
self.gridLayout.addWidget(self.checkBoxChecked, 7, 1, 1, 1)
self.checkBoxCheckedDis = QtWidgets.QCheckBox(self.dockWidgetContents)
self.checkBoxCheckedDis.setEnabled(False)
self.checkBoxCheckedDis.setChecked(True)
self.checkBoxCheckedDis.setObjectName("checkBoxCheckedDis")
self.gridLayout.addWidget(self.checkBoxCheckedDis, 7, 2, 1, 1)
self.label_30 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_30.setMinimumSize(QtCore.QSize(0, 0))
self.label_30.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_30.setFont(font)
self.label_30.setObjectName("label_30")
self.gridLayout.addWidget(self.label_30, 8, 0, 1, 1)
self.checkBoxEnabled = QtWidgets.QCheckBox(self.dockWidgetContents)
self.checkBoxEnabled.setMinimumSize(QtCore.QSize(0, 0))
self.checkBoxEnabled.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.checkBoxEnabled.setTristate(False)
self.checkBoxEnabled.setObjectName("checkBoxEnabled")
self.gridLayout.addWidget(self.checkBoxEnabled, 8, 1, 1, 1)
self.checkBoxUncheckedDis = QtWidgets.QCheckBox(self.dockWidgetContents)
self.checkBoxUncheckedDis.setEnabled(False)
self.checkBoxUncheckedDis.setMinimumSize(QtCore.QSize(0, 0))
self.checkBoxUncheckedDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.checkBoxUncheckedDis.setChecked(False)
self.checkBoxUncheckedDis.setObjectName("checkBoxUncheckedDis")
self.gridLayout.addWidget(self.checkBoxUncheckedDis, 8, 2, 1, 1)
self.label_31 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_31.setMinimumSize(QtCore.QSize(0, 0))
self.label_31.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_31.setFont(font)
self.label_31.setObjectName("label_31")
self.gridLayout.addWidget(self.label_31, 10, 0, 1, 1)
self.commandLinkButton = QtWidgets.QCommandLinkButton(self.dockWidgetContents)
self.commandLinkButton.setMinimumSize(QtCore.QSize(0, 0))
self.commandLinkButton.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.commandLinkButton.setObjectName("commandLinkButton")
self.gridLayout.addWidget(self.commandLinkButton, 10, 1, 1, 1)
self.commandLinkButtonDIs = QtWidgets.QCommandLinkButton(self.dockWidgetContents)
self.commandLinkButtonDIs.setEnabled(False)
self.commandLinkButtonDIs.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.commandLinkButtonDIs.setObjectName("commandLinkButtonDIs")
self.gridLayout.addWidget(self.commandLinkButtonDIs, 10, 2, 1, 1)
self.label_32 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_32.setMinimumSize(QtCore.QSize(0, 0))
self.label_32.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_32.setFont(font)
self.label_32.setObjectName("label_32")
self.gridLayout.addWidget(self.label_32, 11, 0, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(self.dockWidgetContents)
self.buttonBox.setMinimumSize(QtCore.QSize(0, 0))
self.buttonBox.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 11, 1, 1, 1)
self.buttonBoxDis = QtWidgets.QDialogButtonBox(self.dockWidgetContents)
self.buttonBoxDis.setEnabled(False)
self.buttonBoxDis.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBoxDis.setObjectName("buttonBoxDis")
self.gridLayout.addWidget(self.buttonBoxDis, 11, 2, 1, 1)
spacerItem = QtWidgets.QSpacerItem(20, 4, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 12, 0, 1, 1)
self.label_36 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_36.setAlignment(QtCore.Qt.AlignCenter)
self.label_36.setObjectName("label_36")
self.gridLayout.addWidget(self.label_36, 13, 0, 1, 3)
self.label = QtWidgets.QLabel(self.dockWidgetContents)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 9, 0, 1, 1)
self.checkBoxTristate = QtWidgets.QCheckBox(self.dockWidgetContents)
self.checkBoxTristate.setChecked(False)
self.checkBoxTristate.setTristate(True)
self.checkBoxTristate.setObjectName("checkBoxTristate")
self.gridLayout.addWidget(self.checkBoxTristate, 9, 1, 1, 1)
self.checkBoxTristateDis = QtWidgets.QCheckBox(self.dockWidgetContents)
self.checkBoxTristateDis.setEnabled(False)
self.checkBoxTristateDis.setChecked(False)
self.checkBoxTristateDis.setTristate(True)
self.checkBoxTristateDis.setObjectName("checkBoxTristateDis")
self.gridLayout.addWidget(self.checkBoxTristateDis, 9, 2, 1, 1)
DockWidget.setWidget(self.dockWidgetContents)
self.retranslateUi(DockWidget)
QtCore.QObject.connect(self.radioButtonChecked, QtCore.SIGNAL("clicked(bool)"), self.radioButtonCheckedDis.setChecked)
QtCore.QObject.connect(self.radioButtonUnchecked, QtCore.SIGNAL("clicked(bool)"), self.radioButtonUncheckedDis.setChecked)
QtCore.QObject.connect(self.checkBoxChecked, QtCore.SIGNAL("clicked(bool)"), self.checkBoxCheckedDis.setChecked)
QtCore.QObject.connect(self.checkBoxEnabled, QtCore.SIGNAL("clicked(bool)"), self.checkBoxUncheckedDis.setChecked)
QtCore.QObject.connect(self.checkBoxTristate, QtCore.SIGNAL("clicked(bool)"), self.checkBoxTristateDis.setChecked)
QtCore.QObject.connect(self.commandLinkButton, QtCore.SIGNAL("clicked(bool)"), self.commandLinkButtonDIs.setChecked)
QtCore.QObject.connect(self.toolButton, QtCore.SIGNAL("clicked(bool)"), self.toolButtonDis.setChecked)
QtCore.QObject.connect(self.pushButtonChecked, QtCore.SIGNAL("clicked(bool)"), self.pushButtonCheckedDis.setChecked)
QtCore.QObject.connect(self.pushButtonUnchecked, QtCore.SIGNAL("clicked(bool)"), self.pushButtonUncheckedDis.setChecked)
QtCore.QObject.connect(self.pushButton, QtCore.SIGNAL("clicked(bool)"), self.pushButtonDis.click)
QtCore.QMetaObject.connectSlotsByName(DockWidget)
def retranslateUi(self, DockWidget):
DockWidget.setWindowTitle(QtWidgets.QApplication.translate("DockWidget", "Buttons", None, -1))
self.label_72.setText(QtWidgets.QApplication.translate("DockWidget", "Enabled", None, -1))
self.label_73.setText(QtWidgets.QApplication.translate("DockWidget", "Disabled", None, -1))
self.label_26.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_26.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_26.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_26.setText(QtWidgets.QApplication.translate("DockWidget", "PushButton", None, -1))
self.pushButton.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.pushButton.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.pushButton.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.pushButton.setText(QtWidgets.QApplication.translate("DockWidget", "OK", None, -1))
self.pushButtonDis.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.pushButtonDis.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.pushButtonDis.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.pushButtonDis.setText(QtWidgets.QApplication.translate("DockWidget", "OK", None, -1))
self.label_74.setText(QtWidgets.QApplication.translate("DockWidget", "PushButton", None, -1))
self.pushButtonChecked.setText(QtWidgets.QApplication.translate("DockWidget", "Checked", None, -1))
self.pushButtonCheckedDis.setText(QtWidgets.QApplication.translate("DockWidget", "Checked", None, -1))
self.label_76.setText(QtWidgets.QApplication.translate("DockWidget", "PushButton", None, -1))
self.pushButtonUnchecked.setText(QtWidgets.QApplication.translate("DockWidget", "Unchecked", None, -1))
self.pushButtonUncheckedDis.setText(QtWidgets.QApplication.translate("DockWidget", "Unchecked", None, -1))
self.label_33.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_33.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_33.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_33.setText(QtWidgets.QApplication.translate("DockWidget", "ToolButton", None, -1))
self.toolButton.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.toolButton.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.toolButton.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.toolButton.setText(QtWidgets.QApplication.translate("DockWidget", "Tool", None, -1))
self.toolButtonDis.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.toolButtonDis.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.toolButtonDis.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.toolButtonDis.setText(QtWidgets.QApplication.translate("DockWidget", "Tool", None, -1))
self.label_75.setText(QtWidgets.QApplication.translate("DockWidget", "RadioButton", None, -1))
self.radioButtonChecked.setText(QtWidgets.QApplication.translate("DockWidget", "Checked", None, -1))
self.radioButtonCheckedDis.setText(QtWidgets.QApplication.translate("DockWidget", "Checked", None, -1))
self.label_29.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_29.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_29.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_29.setText(QtWidgets.QApplication.translate("DockWidget", "RadioButton", None, -1))
self.radioButtonUnchecked.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.radioButtonUnchecked.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.radioButtonUnchecked.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.radioButtonUnchecked.setText(QtWidgets.QApplication.translate("DockWidget", "Unchecked", None, -1))
self.radioButtonUncheckedDis.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.radioButtonUncheckedDis.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.radioButtonUncheckedDis.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.radioButtonUncheckedDis.setText(QtWidgets.QApplication.translate("DockWidget", "Unchecked", None, -1))
self.label_53.setText(QtWidgets.QApplication.translate("DockWidget", "CheckBox", None, -1))
self.checkBoxChecked.setText(QtWidgets.QApplication.translate("DockWidget", "Checked", None, -1))
self.checkBoxCheckedDis.setText(QtWidgets.QApplication.translate("DockWidget", "Checked", None, -1))
self.label_30.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_30.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_30.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_30.setText(QtWidgets.QApplication.translate("DockWidget", "CheckBox", None, -1))
self.checkBoxEnabled.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.checkBoxEnabled.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.checkBoxEnabled.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.checkBoxEnabled.setText(QtWidgets.QApplication.translate("DockWidget", "Unchecked", None, -1))
self.checkBoxUncheckedDis.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.checkBoxUncheckedDis.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.checkBoxUncheckedDis.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.checkBoxUncheckedDis.setText(QtWidgets.QApplication.translate("DockWidget", "Unchecked", None, -1))
self.label_31.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_31.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_31.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_31.setText(QtWidgets.QApplication.translate("DockWidget", "CommandLinkButton", None, -1))
self.commandLinkButton.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.commandLinkButton.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.commandLinkButton.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.commandLinkButton.setText(QtWidgets.QApplication.translate("DockWidget", "Command", None, -1))
self.commandLinkButtonDIs.setText(QtWidgets.QApplication.translate("DockWidget", "Command", None, -1))
self.label_32.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_32.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_32.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_32.setText(QtWidgets.QApplication.translate("DockWidget", "ButtonBox", None, -1))
self.buttonBox.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.buttonBox.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.buttonBox.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_36.setToolTip(QtWidgets.QApplication.translate("DockWidget", "This is a tool tip", None, -1))
self.label_36.setStatusTip(QtWidgets.QApplication.translate("DockWidget", "This is a status tip", None, -1))
self.label_36.setWhatsThis(QtWidgets.QApplication.translate("DockWidget", "This is \"what is this\"", None, -1))
self.label_36.setText(QtWidgets.QApplication.translate("DockWidget", "Inside DockWidget", None, -1))
self.label.setText(QtWidgets.QApplication.translate("DockWidget", "CheckBox", None, -1))
self.checkBoxTristate.setText(QtWidgets.QApplication.translate("DockWidget", "Tristate", None, -1))
self.checkBoxTristateDis.setText(QtWidgets.QApplication.translate("DockWidget", "Tristate", None, -1))
| 71.441595
| 135
| 0.724717
|
f7379f0a932918d82994fe95749b56a6277fc071
| 13,020
|
py
|
Python
|
gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py
|
alshedivat/federated
|
100f0e0940282818c42c39156407ae419f26de50
|
[
"Apache-2.0"
] | 2
|
2021-10-19T13:55:11.000Z
|
2021-11-11T11:26:05.000Z
|
federated/gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py
|
luke-who/TFF
|
fe9f44a504bc51b603a3ab9a181148da0aa9612f
|
[
"MIT"
] | null | null | null |
federated/gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py
|
luke-who/TFF
|
fe9f44a504bc51b603a3ab9a181148da0aa9612f
|
[
"MIT"
] | null | null | null |
# Copyright 2019, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for filtering (via class. accuracy) the Federated EMNIST dataset."""
import csv
import functools
import os.path
import tensorflow as tf
import tensorflow_federated as tff
from gans.experiments.emnist import emnist_data_utils
BASE_URL = 'https://storage.googleapis.com/tff-experiments-public/'
CSVS_BASE_PATH = 'gans/csvs/'
@functools.lru_cache(maxsize=1)
def get_unfiltered_client_data_for_training(batch_size):
r"""Returns `tff.simulation.datasets.ClientData` of unfiltered Federated EMNIST data.
The data returned will neither be filtered by user nor by example, so training
can take place with all users and all examples for each user.
Args:
batch_size: Batch size of output dataset. If None, don't batch.
Returns:
A tff.simulation.datasets.ClientData` of real images of numbers/letters. The
data has
not been filtered.
"""
return get_filtered_client_data_for_training(None, None, batch_size)
@functools.lru_cache(maxsize=1)
def get_filtered_by_user_client_data_for_training(invert_imagery_probability,
accuracy_threshold,
batch_size,
cache_dir=None):
r"""Returns `tff.simulation.datasets.ClientData` of filtered Federated EMNIST data.
Input data gets filtered on a per-user basis; users get selected via the
`accuracy_threshold` criterion, and then training can take place with all
examples from only the selected users.
Args:
invert_imagery_probability: The probability that a user\'s image data has
pixel intensity inverted. E.g., `0p1` corresponds to 0.1, or a 10%
probability that a user\'s data is flipped. Note that to save time in
experiment execution, this is precomputed via the ./filter_users.py
script, and the selection here controls which file to read from.
accuracy_threshold: Indicates the classification threshold by which a user
is included in the training population. E.g., `lt0p882` means any user
who\'s data cumulatively classifies with <0.882 accuracy would be used for
training; `gt0p939` means any user who\'s data cumulatively classifies
with >0.939 accuracy would be used for training. To save time in
experiment execution, this assignment is precomputed via the
./filter_users.py script, and the flag selection here is to indicate which
file to read from.
batch_size: Batch size of output dataset. If None, don't batch.
cache_dir: (Optional) base directory to cache the downloaded files. If None,
caches in Keras' default cache directory.
Returns:
A tff.simulation.datasets.ClientData` of real images of numbers/letters. The
data has
been filtered by user classification accuracy as per the input arguments.
"""
path_to_data = os.path.join(CSVS_BASE_PATH,
'inv_prob_{}'.format(invert_imagery_probability),
'filter_by_user',
'acc_{}'.format(accuracy_threshold))
try:
filename = 'client_ids.csv'
path_to_read_inversions_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
except Exception:
msg = ('A URL fetch failure was encountered when trying to retrieve '
'filter-by-user generated csv file with invert_imagery_probability '
'`{}` and accuracy_threshold `{}`. Please run the ./filter_users.py '
'script to generate the missing data, and use the `cache_dir` '
'argument to this method to specify the location of the generated '
'data csv file.'.format(invert_imagery_probability,
accuracy_threshold))
raise ValueError(msg)
return get_filtered_client_data_for_training(path_to_read_inversions_csv,
None, batch_size)
@functools.lru_cache(maxsize=1)
def get_filtered_by_example_client_data_for_training(invert_imagery_probability,
min_num_examples,
example_class_selection,
batch_size,
cache_dir=None):
r"""Returns `tff.simulation.datasets.ClientData` of filtered Federated EMNIST data.
Input data gets filtered on a per-example basis. Any user meeting the
`min_num_examples` criterion is included. The examples are limited to those
that classified according to the `example_class_selection` criterion.
Args:
invert_imagery_probability: The probability that a user\'s image data has
pixel intensity inverted. E.g., `0p1` corresponds to 0.1, or a 10%
probability that a user\'s data is flipped. Note that to save time in
experiment execution, this is precomputed via the ./filter_examples.py
scripts, and the selection here controls which file to read from.
min_num_examples: Indicates the minimum number of examples that are either
correct or incorrect (as set by the `example_class_selection` argument) in
a client\'s local dataset for that client to be considered as part of
training sub-population. To save time in experiment execution, this
assignment is precomputed via the ./filter_examples.py script, and the
flag selection here is to indicate which file to read from.
example_class_selection: Indicates whether to train on a client\'s correct
or incorrect examples. To save time in experiment execution, this
assignment is precomputed via the ./filter_examples.py script, and the
flag selection here is to indicate which file to read from.
batch_size: Batch size of output dataset. If None, don't batch.
cache_dir: (Optional) base directory to cache the downloaded files. If None,
caches in Keras' default cache directory.
Returns:
A `tff.simulation.datasets.ClientData` of real images of numbers/letters.
The data
has been filtered as per the input arguments (either not filtered, filtered
by user classification accuracy, or filtered by example classification
correctness).
"""
path_to_data = os.path.join(CSVS_BASE_PATH,
'inv_prob_{}'.format(invert_imagery_probability),
'filter_by_example',
'min_num_examples_{}'.format(min_num_examples),
'{}'.format(example_class_selection))
try:
filename = 'client_ids.csv'
path_to_read_inversions_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
filename = 'example_indices_map.csv'
path_to_read_example_indices_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
except Exception:
msg = ('A URL fetch failure was encountered when trying to retrieve '
'filter-by-example generated csv files with '
'invert_imagery_probability `{}`, min_num_examples `{}`, and '
'example_class_selection `{}`. Please run the ./filter_examples.py '
'script to generate the missing data, and use the `cache_dir` '
'argument to this method to specify the location of the generated '
'data csv files.'.format(invert_imagery_probability,
min_num_examples, example_class_selection))
raise ValueError(msg)
return get_filtered_client_data_for_training(
path_to_read_inversions_csv, path_to_read_example_indices_csv, batch_size)
def get_filtered_client_data_for_training(path_to_read_inversions_csv,
path_to_read_example_indices_csv,
batch_size):
"""Form ClientData using paths to pixel inversion, example selection data."""
raw_client_data = emnist_data_utils.create_real_images_tff_client_data(
'train')
client_ids = raw_client_data.client_ids
selected_client_ids_inversion_map = None
client_ids_example_indices_map = None
# If filter-by-user or filter-by-example, load the csv data into maps, and
# update the client IDs to just the users that will be part of training.
if path_to_read_inversions_csv is not None:
selected_client_ids_inversion_map, client_ids_example_indices_map = (
_get_client_ids_inversion_and_example_indices_maps(
path_to_read_inversions_csv, path_to_read_example_indices_csv))
client_ids = list(selected_client_ids_inversion_map.keys())
def _get_dataset(client_id):
"""Retrieve/preprocess a tf.data.Dataset for a given client_id."""
raw_ds = raw_client_data.create_tf_dataset_for_client(client_id)
invert_imagery = False
if selected_client_ids_inversion_map:
invert_imagery = selected_client_ids_inversion_map[client_id]
# If filter-by-example, do it here.
if client_ids_example_indices_map:
raw_ds = _filter_by_example(raw_ds, client_ids_example_indices_map,
client_id)
return emnist_data_utils.preprocess_img_dataset(
raw_ds,
invert_imagery=invert_imagery,
include_label=False,
batch_size=batch_size,
shuffle=True,
repeat=False)
return tff.simulation.datasets.ClientData.from_clients_and_fn(
client_ids, _get_dataset)
def _filter_by_example(raw_ds, client_ids_example_indices_map, client_id):
"""Form a tf.data.Dataset from the examples in the map for the client_id."""
example_indices = client_ids_example_indices_map[client_id]
# B/c the csv stores the list as a string, we need to do some slightly
# klugey conversion from a string to list. (We strip off the first and
# last characters in the string, which are [ and ], and then split on
# commas as delimiters, to recover the original list of ints.
example_indices = [int(s) for s in example_indices[1:-1].split(',')]
# Get the elements (OrderedDicts) in the raw data which are at the indices
# indicated by the list above.
elements = []
index = 0
for element in raw_ds:
if index in example_indices:
elements.append(element)
index += 1
# Bind the elements (via a generator fn) into a new tf.data.Dataset.
def _generator():
for element in elements:
yield element
return tf.data.Dataset.from_generator(_generator, raw_ds.output_types,
raw_ds.output_shapes)
def _get_client_ids_inversion_and_example_indices_maps(
path_to_read_inversions_csv, path_to_read_example_indices_csv):
"""Return paths to csv files storing maps indicating the data to train on."""
if path_to_read_inversions_csv is None:
raise ValueError(
'No path provided to the CSV file that stores map from client ids to '
'image inversion data.')
# Load (from CSV file) the specific client IDs that the GAN will train on, and
# whether or not the images on that client are inverted.
selected_client_ids_inversion_map = {}
with tf.io.gfile.GFile(path_to_read_inversions_csv, 'r') as csvfile:
csvreader = csv.reader(csvfile)
for [key, val] in csvreader:
selected_client_ids_inversion_map[key] = (val == 'True')
# If specified (via CSV file), the specific examples on each client ID that
# the GAN will be trained on.
client_ids_example_indices_map = None
if path_to_read_example_indices_csv:
client_ids_example_indices_map = {}
with tf.io.gfile.GFile(path_to_read_example_indices_csv, 'r') as csvfile:
csvreader = csv.reader(csvfile)
for [key, val] in csvreader:
client_ids_example_indices_map[key] = val
set_1 = set(client_ids_example_indices_map.keys())
set_2 = set(selected_client_ids_inversion_map.keys())
symmetric_diff = set_1 ^ set_2
if symmetric_diff:
raise ValueError(
'The CSV files at path_to_read_inversions_csv and '
'path_to_read_example_indices_csv contain different keys.')
return selected_client_ids_inversion_map, client_ids_example_indices_map
| 45.365854
| 87
| 0.701613
|
913b3d2800a3b757b0ac532e70b0143facd53384
| 1,503
|
py
|
Python
|
test/ipr_tests.py
|
genomeannotation/Annie-new
|
4bb39804c4d51877907f531d72e8b2e841c58243
|
[
"MIT"
] | 7
|
2018-06-14T18:19:14.000Z
|
2022-01-12T12:31:50.000Z
|
test/ipr_tests.py
|
genomeannotation/Annie-new
|
4bb39804c4d51877907f531d72e8b2e841c58243
|
[
"MIT"
] | 10
|
2015-05-01T00:42:32.000Z
|
2022-02-08T17:27:31.000Z
|
test/ipr_tests.py
|
genomeannotation/Annie
|
4bb39804c4d51877907f531d72e8b2e841c58243
|
[
"MIT"
] | 5
|
2016-01-22T11:59:08.000Z
|
2022-03-14T06:21:13.000Z
|
#!/usr/bin/env python
import unittest
import io
from src.ipr import read_ipr
class TestIPR(unittest.TestCase):
def setUp(self):
self.ipr_file = io.StringIO(\
'm.98281\tc95b0824ccd627403aa63f9e474649cc\t7571\tSuperFamily\tSSF48726\t5997\t6096\t6.42E-13\tT\t04-04-2014\n\
m.98281\tc95b0824ccd627403aa63f9e474649cc\t7571\tProSiteProfiles\tPS50835\tIg-like domain profile.\t6294\t6382\t12.15\tT\t04-04-2014\tIPR007110\tImmunoglobulin-like domain\tGO:0005515\n\
m.98281\tc95b0824ccd627403aa63f9e474649cc\t7571\tProSiteProfiles\tPS50835\tFibronectin type-III domain profile.\t2548\t2639\t21.089\tT\t04-04-2014\tIPR003961\tFibronectin, type III\tGO:0005515\n')
def test_read_ipr(self):
whitelist = ["superfamily", "prositeprofiles"]
ipr_list = read_ipr(self.ipr_file, whitelist)
self.assertEquals(5, len(ipr_list))
mrna_ids = [ipr_list[i].feature_id for i in range(len(ipr_list))]
keys = [ipr_list[i].key for i in range(len(ipr_list))]
values = [ipr_list[i].value for i in range(len(ipr_list))]
self.assertTrue("m.98281" in mrna_ids)
self.assertTrue("Dbxref" in keys)
self.assertTrue("SUPERFAMILY:SSF48726" in values)
self.assertTrue("GO:0005515" in values)
self.assertTrue("InterPro:IPR007110" in values)
##########################
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestIPR))
return suite
if __name__ == '__main__':
unittest.main()
| 39.552632
| 196
| 0.709914
|
994bdbc6d7e396dc47edaad36bf697a72e1abe9c
| 2,020
|
py
|
Python
|
ooobuild/dyn/smarttags/smart_tag_recognizer_mode.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/dyn/smarttags/smart_tag_recognizer_mode.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/dyn/smarttags/smart_tag_recognizer_mode.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Enum Class
# this is a auto generated file generated by Cheetah
# Namespace: com.sun.star.smarttags
# Libre Office Version: 7.3
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from ooo.helper.enum_helper import uno_enum_class_new
from com.sun.star.smarttags.SmartTagRecognizerMode import (CELL, CHAR, PARAGRAPH, SINGLE_WORD)
def _get_enum():
# Dynamically create class that actually contains UNO enum instances
_dict = {
"__doc__": "Dynamically created class that represents com.sun.star.smarttags.SmartTagRecognizerMode Enum. Class loosly mimics Enum",
"__new__": uno_enum_class_new,
"__ooo_ns__": "com.sun.star.smarttags",
"__ooo_full_ns__": "com.sun.star.smarttags.SmartTagRecognizerMode",
"__ooo_type_name__": "enum",
"CELL": CELL,
"CHAR": CHAR,
"PARAGRAPH": PARAGRAPH,
"SINGLE_WORD": SINGLE_WORD,
}
result = type('SmartTagRecognizerMode', (object,), _dict)
return result
SmartTagRecognizerMode = _get_enum()
else:
from ...lo.smarttags.smart_tag_recognizer_mode import SmartTagRecognizerMode as SmartTagRecognizerMode
__all__ = ['SmartTagRecognizerMode']
| 38.113208
| 144
| 0.718812
|
6db7266170a73cbead4ac4c78a57137569947340
| 3,524
|
py
|
Python
|
py/ffmpeg/enc_dec_so_part2.py
|
YodaEmbedding/experiments
|
567c6a1c18fac2d951fe2af54aaa4917b7d529d2
|
[
"MIT"
] | null | null | null |
py/ffmpeg/enc_dec_so_part2.py
|
YodaEmbedding/experiments
|
567c6a1c18fac2d951fe2af54aaa4917b7d529d2
|
[
"MIT"
] | null | null | null |
py/ffmpeg/enc_dec_so_part2.py
|
YodaEmbedding/experiments
|
567c6a1c18fac2d951fe2af54aaa4917b7d529d2
|
[
"MIT"
] | null | null | null |
import subprocess
from queue import Queue
from threading import Thread
from time import sleep, time
import numpy as np
WIDTH = 224
HEIGHT = 224
NUM_FRAMES = 16
def t(epoch=time()):
return round(time() - epoch, 2)
def make_frames(num_frames):
x = np.arange(WIDTH, dtype=np.uint8)
x = np.broadcast_to(x, (num_frames, HEIGHT, WIDTH))
x = x[..., np.newaxis].repeat(3, axis=-1)
x[..., 1] = x[:, :, ::-1, 1]
scale = np.arange(1, len(x) + 1, dtype=np.uint8)
scale = scale[:, np.newaxis, np.newaxis, np.newaxis]
x *= scale
return x
def encoder_write(writer):
"""Feeds encoder frames to encode"""
frames = make_frames(num_frames=NUM_FRAMES)
for i, frame in enumerate(frames):
writer.write(frame.tobytes())
writer.flush()
print(f"time={t()} frames={i + 1:<3} encoder_write")
sleep(0.1)
writer.close()
def encoder_read(reader, queue):
"""Puts chunks of encoded bytes into queue"""
with open("out_tmp.264", "wb") as f:
while chunk := reader.read1():
queue.put(chunk)
f.write(chunk)
f.flush()
print(f"time={t()} chunk={len(chunk):<4} encoder_read")
queue.put(None)
def decoder_write(writer, queue):
"""Feeds decoder bytes to decode"""
while chunk := queue.get():
writer.write(chunk)
writer.flush()
print(f"time={t()} chunk={len(chunk):<4} decoder_write")
writer.close()
def decoder_read(reader):
"""Retrieves decoded frames"""
buffer = b""
frame_len = HEIGHT * WIDTH * 3
targets = make_frames(num_frames=NUM_FRAMES)
i = 0
while chunk := reader.read1():
buffer += chunk
while len(buffer) >= frame_len:
frame = np.frombuffer(buffer[:frame_len], dtype=np.uint8)
frame = frame.reshape(HEIGHT, WIDTH, 3)
psnr = 10 * np.log10(255 ** 2 / np.mean((frame - targets[i]) ** 2))
buffer = buffer[frame_len:]
i += 1
print(f"time={t()} frames={i:<3} decoder_read psnr={psnr:.1f}")
cmd = (
"ffmpeg "
"-f rawvideo -pix_fmt rgb24 -s 224x224 "
"-i pipe: "
"-vcodec libx264 "
"-f flv "
# "-c:v libx264 "
# "-x264-params aud=1 "
# "-f h264 "
# "-f rtp "
# "-f mpegts "
# "-preset ultrafast "
# "-bf 0 "
"-tune zerolatency "
# "-rtsp_transport tcp "
# "-enable-muxer=rtsp "
# "-enable-muxer=rtp "
# "-enable-protocol=rtp "
# "-enable-protocol=rtsp "
# "rtsp://localhost:5678"
# "-f rtp "
# "rtp://127.0.0.1:5678"
"pipe:"
)
encoder_process = subprocess.Popen(
cmd.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
cmd = (
"ffmpeg "
"-probesize 32 "
"-flags low_delay "
# "-f h264 "
"-f flv "
"-vcodec h264 "
# "-f mpegts "
# "-f rtp "
# "-i rtp://127.0.0.1:5678 "
"-i pipe: "
# "-x264-params aud=1 "
"-f rawvideo -pix_fmt rgb24 -s 224x224 "
"pipe:"
)
decoder_process = subprocess.Popen(
cmd.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
queue = Queue()
threads = [
Thread(
target=encoder_write,
args=(encoder_process.stdin,),
),
Thread(
target=encoder_read,
args=(encoder_process.stdout, queue),
),
Thread(
target=decoder_write,
args=(decoder_process.stdin, queue),
),
Thread(
target=decoder_read,
args=(decoder_process.stdout,),
),
]
for thread in threads:
thread.start()
| 24.136986
| 79
| 0.570091
|
ea6fc9c6db1cfbfc59ede603a8ea211252bbcf8b
| 125
|
py
|
Python
|
python/config.open.py
|
ruslo/configs
|
db80431964e3b252cd03b1b60ad61a9ff44b17a0
|
[
"BSD-2-Clause"
] | 3
|
2016-03-08T04:13:01.000Z
|
2017-05-21T23:55:14.000Z
|
python/config.open.py
|
ruslo/configs
|
db80431964e3b252cd03b1b60ad61a9ff44b17a0
|
[
"BSD-2-Clause"
] | null | null | null |
python/config.open.py
|
ruslo/configs
|
db80431964e3b252cd03b1b60ad61a9ff44b17a0
|
[
"BSD-2-Clause"
] | null | null | null |
[default]
open = gvim.py
[extensions]
# text format
.txt = gvim.py
# c++ sources
.cpp = gvim.py
.hpp = gvim.py
| 11.363636
| 16
| 0.576
|
c4d407a9a3595f64376a7d345444d142c2778e3f
| 729
|
py
|
Python
|
downstream/sws2013/model.py
|
OlegJakushkin/s3prl
|
c0e41f07fa56f0f79b5bf3839b4d0a4cf7c421bf
|
[
"MIT"
] | 856
|
2021-01-15T15:40:32.000Z
|
2022-03-31T07:08:17.000Z
|
downstream/sws2013/model.py
|
shuchengzhang92/s3prl
|
39460b7ab8d19cd2fbe3406ed1073a5dbe96bfc0
|
[
"MIT"
] | 210
|
2021-01-15T13:28:50.000Z
|
2022-03-30T06:13:51.000Z
|
downstream/sws2013/model.py
|
shuchengzhang92/s3prl
|
39460b7ab8d19cd2fbe3406ed1073a5dbe96bfc0
|
[
"MIT"
] | 208
|
2021-01-15T03:03:12.000Z
|
2022-03-31T08:33:27.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class Model(nn.Module):
def __init__(self, input_dim, bottleneck_dim, hidden_dim, **kwargs):
super(Model, self).__init__()
self.connector = nn.Linear(input_dim, bottleneck_dim)
self.fc1 = nn.Linear(bottleneck_dim, hidden_dim)
self.attention_linear = nn.Linear(hidden_dim, 1)
def forward(self, features):
# transforming
hiddens = F.relu(self.connector(features))
hiddens = torch.tanh(self.fc1(hiddens))
# attentive pooling
attention_weights = F.softmax(self.attention_linear(hiddens), dim=1)
embeds = torch.sum(hiddens * attention_weights, dim=1)
return embeds
| 30.375
| 76
| 0.676269
|
3c6a34d920437dcb3a333dde9cf36b236a88649f
| 3,284
|
py
|
Python
|
CIM100/IEC61970/Base/LoadModel/SeasonDayTypeSchedule.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM100/IEC61970/Base/LoadModel/SeasonDayTypeSchedule.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM100/IEC61970/Base/LoadModel/SeasonDayTypeSchedule.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM100.IEC61970.Base.Core.RegularIntervalSchedule import RegularIntervalSchedule
class SeasonDayTypeSchedule(RegularIntervalSchedule):
"""The schedule specialize RegularIntervalSchedule with type curve data for a specific type of day and season. This means that curves of this type cover a 24 hour period.The schedule specialize RegularIntervalSchedule with type curve data for a specific type of day and season. This means that curves of this type cover a 24 hour period.
"""
def __init__(self, Season=None, DayType=None, *args, **kw_args):
"""Initialises a new 'SeasonDayTypeSchedule' instance.
@param Season: Season for the Schedule.
@param DayType: DayType for the Schedule.
"""
self._Season = None
self.Season = Season
self._DayType = None
self.DayType = DayType
super(SeasonDayTypeSchedule, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["Season", "DayType"]
_many_refs = []
def getSeason(self):
"""Season for the Schedule.
"""
return self._Season
def setSeason(self, value):
if self._Season is not None:
filtered = [x for x in self.Season.SeasonDayTypeSchedules if x != self]
self._Season._SeasonDayTypeSchedules = filtered
self._Season = value
if self._Season is not None:
if self not in self._Season._SeasonDayTypeSchedules:
self._Season._SeasonDayTypeSchedules.append(self)
Season = property(getSeason, setSeason)
def getDayType(self):
"""DayType for the Schedule.
"""
return self._DayType
def setDayType(self, value):
if self._DayType is not None:
filtered = [x for x in self.DayType.SeasonDayTypeSchedules if x != self]
self._DayType._SeasonDayTypeSchedules = filtered
self._DayType = value
if self._DayType is not None:
if self not in self._DayType._SeasonDayTypeSchedules:
self._DayType._SeasonDayTypeSchedules.append(self)
DayType = property(getDayType, setDayType)
| 40.04878
| 341
| 0.702192
|
d62de82ec2e842142b14bb6044c24a0db960ae32
| 2,043
|
py
|
Python
|
plotMeanOfACF.py
|
Yu-Chuan/Dendrite_Growth_Model
|
5145a5232712ed26700036fb06265b85a91b8429
|
[
"MIT"
] | null | null | null |
plotMeanOfACF.py
|
Yu-Chuan/Dendrite_Growth_Model
|
5145a5232712ed26700036fb06265b85a91b8429
|
[
"MIT"
] | null | null | null |
plotMeanOfACF.py
|
Yu-Chuan/Dendrite_Growth_Model
|
5145a5232712ed26700036fb06265b85a91b8429
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 24 11:13:56 2019
@author: Yu-Chuan Chen
"""
#%% library
import numpy as np
import os
import matplotlib.pyplot as plt
#%% create folder
dataPath = '/Yu-Chuan/ForYuChuan/python program/dedritic growth model/meanOfACF/'
neuronType = 'Tm'
labname = 'LeeCH'
neuron = ['Tm1', 'Tm2', 'Tm9', 'Tm20']
dendrite = 'Dendrite'
Segment_name = ['Br', 'Te']
folder = 'figure/'
f_dir = dataPath + folder
def folderCreate(f_dir):
try:
os.makedirs(f_dir)
except FileExistsError:
print("The directory has been created on %s" % f_dir)
except OSError:
print ("Creation of the directory %s failed" % f_dir)
else:
print ("Successfully created the directory %s" % f_dir)
return
folderCreate(f_dir)
#%% Running program
neuronPath = []
meanOfACF = []
for name in neuron:
neuron_filesName = []
data_kb_kt = []
for Prop in Segment_name:
fileName = dataPath + 'MeanOfACF_' + neuronType + '_' + labname + '_' + name + '_' +\
dendrite + '_' + Prop + '.csv'
data_kb_kt.append(np.genfromtxt(fileName, delimiter=','))
neuron_filesName.append(fileName)
neuronPath.append(neuron_filesName)
meanOfACF.append(data_kb_kt)
#%% Figure output
##plot kb
savename = 'meanOfACF_' + labname + '_' + neuronType + '_' + Segment_name[0] + '.png'
plt.figure(figsize=(10,6), linewidth = 1.5)
for neuron_n in meanOfACF:
plt.plot(neuron_n[0])
plt.legend(neuron)
plt.xlabel("lags", fontsize = 18)
plt.ylabel("ACF", fontsize = 18)
plt.xlim([-0.5, 60])
plt.savefig(f_dir + savename)
##plot kt
savename = 'meanOfACF_' + labname + '_' + neuronType + '_' + Segment_name[1] + '.png'
plt.figure(figsize=(10,6), linewidth = 1.5)
for neuron_n in meanOfACF:
plt.plot(neuron_n[1])
plt.legend(neuron)
plt.xlabel("lags", fontsize = 18)
plt.xlim([-0.5, 120])
plt.ylabel("ACF", fontsize = 18)
plt.savefig(f_dir + savename)
| 28.375
| 95
| 0.617719
|
1de8a8b7b14279768eced360023467d3be9b4367
| 20,103
|
py
|
Python
|
python/cudf/cudf/core/column/numerical.py
|
mt-jones/cudf
|
1a70ee21aacef4c83971ddf14e1df98448b3fb81
|
[
"Apache-2.0"
] | null | null | null |
python/cudf/cudf/core/column/numerical.py
|
mt-jones/cudf
|
1a70ee21aacef4c83971ddf14e1df98448b3fb81
|
[
"Apache-2.0"
] | null | null | null |
python/cudf/cudf/core/column/numerical.py
|
mt-jones/cudf
|
1a70ee21aacef4c83971ddf14e1df98448b3fb81
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018-2020, NVIDIA CORPORATION.
import numpy as np
import pandas as pd
import pyarrow as pa
from pandas.api.types import is_integer_dtype
import rmm
import cudf._lib as libcudf
import cudf._libxx as libcudfxx
from cudf.core._sort import get_sorted_inds
from cudf.core.buffer import Buffer
from cudf.core.column import as_column, column
from cudf.utils import cudautils, utils
from cudf.utils.dtypes import (
min_numeric_column_type,
min_signed_type,
np_to_pa_dtype,
numeric_normalize_types,
)
class NumericalColumn(column.ColumnBase):
def __init__(self, data, dtype, mask=None, size=None, offset=0):
"""
Parameters
----------
data : Buffer
dtype : np.dtype
The dtype associated with the data Buffer
mask : Buffer, optional
"""
dtype = np.dtype(dtype)
if data.size % dtype.itemsize:
raise ValueError("Buffer size must be divisible by element size")
if size is None:
size = data.size // dtype.itemsize
size = size - offset
super().__init__(
data, size=size, dtype=dtype, mask=mask, offset=offset
)
def __contains__(self, item):
"""
Returns True if column contains item, else False.
"""
# Handles improper item types
# Fails if item is of type None, so the handler.
try:
if np.can_cast(item, self.data_array_view.dtype):
item = self.data_array_view.dtype.type(item)
else:
return False
except Exception:
return False
# TODO: Use `scalar`-based `contains` wrapper
return libcudfxx.search.contains(
self, column.as_column([item], dtype=self.dtype)
).any()
def binary_operator(self, binop, rhs, reflect=False):
int_dtypes = [
np.dtype("int8"),
np.dtype("int16"),
np.dtype("int32"),
np.dtype("int64"),
]
tmp = rhs
if reflect:
tmp = self
if isinstance(rhs, NumericalColumn) or np.isscalar(rhs):
out_dtype = np.result_type(self.dtype, rhs.dtype)
if binop in ["mod", "floordiv"]:
if (tmp.dtype in int_dtypes) and (
(np.isscalar(tmp) and (0 == tmp))
or ((isinstance(tmp, NumericalColumn)) and (0.0 in tmp))
):
out_dtype = np.dtype("float_")
elif rhs is None:
out_dtype = self.dtype
else:
msg = "{!r} operator not supported between {} and {}"
raise TypeError(msg.format(binop, type(self), type(rhs)))
return _numeric_column_binop(
lhs=self, rhs=rhs, op=binop, out_dtype=out_dtype, reflect=reflect
)
def unary_operator(self, unaryop):
return _numeric_column_unaryop(self, op=unaryop)
def unordered_compare(self, cmpop, rhs):
return _numeric_column_compare(self, rhs, op=cmpop)
def ordered_compare(self, cmpop, rhs):
return _numeric_column_compare(self, rhs, op=cmpop)
def _apply_scan_op(self, op):
out_col = column.column_empty_like_same_mask(self, dtype=self.dtype)
libcudf.reduce.scan(self, out_col, op, inclusive=True)
return out_col
def normalize_binop_value(self, other):
if other is None:
return other
other_dtype = np.min_scalar_type(other)
if other_dtype.kind in "biuf":
other_dtype = np.promote_types(self.dtype, other_dtype)
if other_dtype == np.dtype("float16"):
other = np.dtype("float32").type(other)
other_dtype = other.dtype
if other_dtype.kind in "u":
other_dtype = min_signed_type(other)
if np.isscalar(other):
other = np.dtype(other_dtype).type(other)
return other
else:
ary = utils.scalar_broadcast_to(
other, size=len(self), dtype=other_dtype
)
return column.build_column(
data=Buffer.from_array_lik(ary),
dtype=ary.dtype,
mask=self.mask,
)
else:
raise TypeError("cannot broadcast {}".format(type(other)))
def as_string_column(self, dtype, **kwargs):
from cudf.core.column import string, as_column
if len(self) > 0:
return string._numeric_to_str_typecast_functions[
np.dtype(self.dtype)
](self, **kwargs)
else:
return as_column([], dtype="object")
def as_datetime_column(self, dtype, **kwargs):
from cudf.core.column import build_column
return build_column(
data=self.astype("int64").data, dtype=dtype, mask=self.mask
)
def as_numerical_column(self, dtype, **kwargs):
dtype = np.dtype(dtype)
if dtype == self.dtype:
return self
return libcudfxx.unary.cast(self, dtype)
def sort_by_values(self, ascending=True, na_position="last"):
sort_inds = get_sorted_inds(self, ascending, na_position)
col_keys = self[sort_inds]
col_inds = column.build_column(
sort_inds.data, dtype=sort_inds.dtype, mask=sort_inds.mask
)
return col_keys, col_inds
def to_pandas(self, index=None):
if self.has_nulls and self.dtype == np.bool:
# Boolean series in Pandas that contains None/NaN is of dtype
# `np.object`, which is not natively supported in GDF.
ret = self.astype(np.int8).to_array(fillna=-1)
ret = pd.Series(ret, index=index)
ret = ret.where(ret >= 0, other=None)
ret.replace(to_replace=1, value=True, inplace=True)
ret.replace(to_replace=0, value=False, inplace=True)
return ret
else:
return pd.Series(self.to_array(fillna="pandas"), index=index)
def to_arrow(self):
mask = None
if self.nullable:
mask = pa.py_buffer(self.mask_array_view.copy_to_host())
data = pa.py_buffer(self.data_array_view.copy_to_host())
pa_dtype = np_to_pa_dtype(self.dtype)
out = pa.Array.from_buffers(
type=pa_dtype,
length=len(self),
buffers=[mask, data],
null_count=self.null_count,
)
if self.dtype == np.bool:
return out.cast(pa.bool_())
else:
return out
def unique(self, method="sort"):
# method variable will indicate what algorithm to use to
# calculate unique, not used right now
if method != "sort":
msg = "non sort based unique() not implemented yet"
raise NotImplementedError(msg)
segs, sortedvals = self._unique_segments()
# gather result
out_col = column.as_column(sortedvals)[segs]
return out_col
def all(self):
return bool(libcudf.reduce.reduce("all", self, dtype=np.bool_))
def any(self):
if self.valid_count == 0:
return False
return bool(libcudf.reduce.reduce("any", self, dtype=np.bool_))
def min(self, dtype=None):
return libcudf.reduce.reduce("min", self, dtype=dtype)
def max(self, dtype=None):
return libcudf.reduce.reduce("max", self, dtype=dtype)
def sum(self, dtype=None):
return libcudf.reduce.reduce("sum", self, dtype=dtype)
def product(self, dtype=None):
return libcudf.reduce.reduce("product", self, dtype=dtype)
def mean(self, dtype=np.float64):
return libcudf.reduce.reduce("mean", self, dtype=dtype)
def var(self, ddof=1, dtype=np.float64):
return libcudf.reduce.reduce("var", self, dtype=dtype, ddof=ddof)
def std(self, ddof=1, dtype=np.float64):
return libcudf.reduce.reduce("std", self, dtype=dtype, ddof=ddof)
def sum_of_squares(self, dtype=None):
return libcudf.reduce.reduce("sum_of_squares", self, dtype=dtype)
def round(self, decimals=0):
if decimals < 0:
msg = "Decimal values < 0 are not yet supported."
raise NotImplementedError(msg)
if np.issubdtype(self.dtype, np.integer):
return self
data = Buffer(cudautils.apply_round(self.data_array_view, decimals))
return column.build_column(data=data, dtype=self.dtype, mask=self.mask)
def applymap(self, udf, out_dtype=None):
"""Apply a elemenwise function to transform the values in the Column.
Parameters
----------
udf : function
Wrapped by numba jit for call on the GPU as a device function.
out_dtype : numpy.dtype; optional
The dtype for use in the output.
By default, use the same dtype as *self.dtype*.
Returns
-------
result : Column
The mask is preserved.
"""
if out_dtype is None:
out_dtype = self.dtype
out = column.column_applymap(udf=udf, column=self, out_dtype=out_dtype)
return out
def default_na_value(self):
"""Returns the default NA value for this column
"""
dkind = self.dtype.kind
if dkind == "f":
return self.dtype.type(np.nan)
elif dkind in "iu":
return -1
elif dkind == "b":
return False
else:
raise TypeError(
"numeric column of {} has no NaN value".format(self.dtype)
)
def find_and_replace(self, to_replace, replacement, all_nan):
"""
Return col with *to_replace* replaced with *value*.
"""
to_replace_col = _normalize_find_and_replace_input(
self.dtype, to_replace
)
if all_nan:
replacement_col = column.as_column(replacement, dtype=self.dtype)
else:
replacement_col = _normalize_find_and_replace_input(
self.dtype, replacement
)
if len(replacement_col) == 1 and len(to_replace_col) > 1:
replacement_col = column.as_column(
utils.scalar_broadcast_to(
replacement[0], (len(to_replace_col),), self.dtype
)
)
replaced = self.copy()
to_replace_col, replacement_col, replaced = numeric_normalize_types(
to_replace_col, replacement_col, replaced
)
return libcudfxx.replace.replace(
replaced, to_replace_col, replacement_col
)
def fillna(self, fill_value):
"""
Fill null values with *fill_value*
"""
if np.isscalar(fill_value):
# castsafely to the same dtype as self
fill_value_casted = self.dtype.type(fill_value)
if not np.isnan(fill_value) and (fill_value_casted != fill_value):
raise TypeError(
"Cannot safely cast non-equivalent {} to {}".format(
type(fill_value).__name__, self.dtype.name
)
)
fill_value = fill_value_casted
else:
fill_value = column.as_column(fill_value, nan_as_null=False)
# cast safely to the same dtype as self
if is_integer_dtype(self.dtype):
fill_value = _safe_cast_to_int(fill_value, self.dtype)
else:
fill_value = fill_value.astype(self.dtype)
result = libcudfxx.replace.replace_nulls(self, fill_value)
result = column.build_column(result.data, result.dtype, mask=None)
return result
def find_first_value(self, value, closest=False):
"""
Returns offset of first value that matches. For monotonic
columns, returns the offset of the first larger value
if closest=True.
"""
found = 0
if len(self):
found = cudautils.find_first(self.data_array_view, value)
if found == -1 and self.is_monotonic and closest:
if value < self.min():
found = 0
elif value > self.max():
found = len(self)
else:
found = cudautils.find_first(
self.data_array_view, value, compare="gt"
)
if found == -1:
raise ValueError("value not found")
elif found == -1:
raise ValueError("value not found")
return found
def find_last_value(self, value, closest=False):
"""
Returns offset of last value that matches. For monotonic
columns, returns the offset of the last smaller value
if closest=True.
"""
found = 0
if len(self):
found = cudautils.find_last(self.data_array_view, value)
if found == -1 and self.is_monotonic and closest:
if value < self.min():
found = -1
elif value > self.max():
found = len(self) - 1
else:
found = cudautils.find_last(
self.data_array_view, value, compare="lt"
)
if found == -1:
raise ValueError("value not found")
elif found == -1:
raise ValueError("value not found")
return found
@property
def is_monotonic_increasing(self):
if not hasattr(self, "_is_monotonic_increasing"):
if self.nullable and self.has_nulls:
self._is_monotonic_increasing = False
else:
self._is_monotonic_increasing = libcudf.issorted.issorted(
[self]
)
return self._is_monotonic_increasing
@property
def is_monotonic_decreasing(self):
if not hasattr(self, "_is_monotonic_decreasing"):
if self.nullable and self.has_nulls:
self._is_monotonic_decreasing = False
else:
self._is_monotonic_decreasing = libcudf.issorted.issorted(
[self], [1]
)
return self._is_monotonic_decreasing
def can_cast_safely(self, to_dtype):
"""
Returns true if all the values in self can be
safely cast to dtype
"""
if self.dtype.kind == to_dtype.kind:
if self.dtype <= to_dtype:
return True
else:
# Kinds are the same but to_dtype is smaller
if "float" in to_dtype.name:
info = np.finfo(to_dtype)
elif "int" in to_dtype.name:
info = np.iinfo(to_dtype)
min_, max_ = info.min, info.max
if (self.min() > min_) and (self.max() < max_):
return True
else:
return False
# want to cast int to float
elif to_dtype.kind == "f" and self.dtype.kind == "i":
info = np.finfo(to_dtype)
biggest_exact_int = 2 ** (info.nmant + 1)
if (self.min() >= -biggest_exact_int) and (
self.max() <= biggest_exact_int
):
return True
else:
from cudf import Series
if (
Series(self).astype(to_dtype).astype(self.dtype)
== Series(self)
).all():
return True
else:
return False
# want to cast float to int:
elif to_dtype.kind == "i" and self.dtype.kind == "f":
info = np.iinfo(to_dtype)
min_, max_ = info.min, info.max
# best we can do is hope to catch it here and avoid compare
if (self.min() >= min_) and (self.max() <= max_):
from cudf import Series
if (Series(self) % 1 == 0).all():
return True
else:
return False
else:
return False
def _numeric_column_binop(lhs, rhs, op, out_dtype, reflect=False):
if reflect:
lhs, rhs = rhs, lhs
libcudf.nvtx.nvtx_range_push("CUDF_BINARY_OP", "orange")
# Allocate output
masked = False
if np.isscalar(lhs):
masked = rhs.nullable
row_count = len(rhs)
elif np.isscalar(rhs):
masked = lhs.nullable
row_count = len(lhs)
elif rhs is None:
masked = True
row_count = len(lhs)
elif lhs is None:
masked = True
row_count = len(rhs)
else:
masked = lhs.nullable or rhs.nullable
row_count = len(lhs)
is_op_comparison = op in ["lt", "gt", "le", "ge", "eq", "ne"]
out = column.column_empty(row_count, dtype=out_dtype, masked=masked)
_ = libcudf.binops.apply_op(lhs, rhs, out, op)
if is_op_comparison:
out = out.fillna(op == "ne")
libcudf.nvtx.nvtx_range_pop()
return out
def _numeric_column_unaryop(operand, op):
if callable(op):
return libcudfxx.transform.transform(operand, op)
op = libcudfxx.unary.UnaryOp[op.upper()]
return libcudfxx.unary.unary_operation(operand, op)
def _numeric_column_compare(lhs, rhs, op):
return _numeric_column_binop(lhs, rhs, op, out_dtype=np.bool_)
def _safe_cast_to_int(col, dtype):
"""
Cast given NumericalColumn to given integer dtype safely.
"""
assert is_integer_dtype(dtype)
if col.dtype == dtype:
return col
new_col = col.astype(dtype)
if new_col.unordered_compare("eq", col).all():
return new_col
else:
raise TypeError(
"Cannot safely cast non-equivalent {} to {}".format(
col.dtype.type.__name__, np.dtype(dtype).type.__name__
)
)
def _normalize_find_and_replace_input(input_column_dtype, col_to_normalize):
normalized_column = column.as_column(col_to_normalize)
col_to_normalize_dtype = normalized_column.dtype
if isinstance(col_to_normalize, list):
col_to_normalize_dtype = min_numeric_column_type(normalized_column)
# Scalar case
if len(col_to_normalize) == 1:
col_to_normalize_casted = input_column_dtype.type(
col_to_normalize[0]
)
if not np.isnan(col_to_normalize_casted) and (
col_to_normalize_casted != col_to_normalize[0]
):
raise TypeError(
f"Cannot safely cast non-equivalent "
f"{col_to_normalize[0]} "
f"to {input_column_dtype.name}"
)
else:
col_to_normalize_dtype = input_column_dtype
elif hasattr(col_to_normalize, "dtype"):
col_to_normalize_dtype = col_to_normalize.dtype
else:
raise TypeError(f"Type {type(col_to_normalize)} not supported")
if (
col_to_normalize_dtype.kind == "f" and input_column_dtype.kind == "i"
) or (col_to_normalize_dtype > input_column_dtype):
raise TypeError(
f"Potentially unsafe cast for non-equivalent "
f"{col_to_normalize_dtype.name} "
f"to {input_column_dtype.name}"
)
return normalized_column.astype(input_column_dtype)
def digitize(column, bins, right=False):
"""Return the indices of the bins to which each value in column belongs.
Parameters
----------
column : Column
Input column.
bins : np.array
1-D monotonically increasing array of bins with same type as `column`.
right : bool
Indicates whether interval contains the right or left bin edge.
Returns
-------
A device array containing the indices
"""
assert column.dtype == bins.dtype
bins_buf = Buffer(rmm.to_device(bins))
bin_col = NumericalColumn(data=bins_buf, dtype=bins.dtype)
return as_column(
libcudfxx.sort.digitize(column.as_frame(), bin_col.as_frame(), right)
)
| 34.422945
| 79
| 0.57663
|
ea9c33521dcee3f2e9528dc15523aeaeb060e349
| 433
|
py
|
Python
|
cool_mammals_web_application/wsgi.py
|
luiztauffer/cool_mammals_web_application
|
d2f6a5f1e083fc300b9b3a6d525ae548eb8e073f
|
[
"MIT"
] | null | null | null |
cool_mammals_web_application/wsgi.py
|
luiztauffer/cool_mammals_web_application
|
d2f6a5f1e083fc300b9b3a6d525ae548eb8e073f
|
[
"MIT"
] | null | null | null |
cool_mammals_web_application/wsgi.py
|
luiztauffer/cool_mammals_web_application
|
d2f6a5f1e083fc300b9b3a6d525ae548eb8e073f
|
[
"MIT"
] | null | null | null |
"""
WSGI config for cool_mammals_web_application project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cool_mammals_web_application.settings')
application = get_wsgi_application()
| 25.470588
| 88
| 0.806005
|
fa73d9691e05b1095c5893a141aba99b0db0cd1a
| 9,465
|
py
|
Python
|
tutorials/foundations/plot_setup_model.py
|
samtx/pyapprox
|
c926d910e30fbcfed7d0621175d3b0268d59f852
|
[
"MIT"
] | null | null | null |
tutorials/foundations/plot_setup_model.py
|
samtx/pyapprox
|
c926d910e30fbcfed7d0621175d3b0268d59f852
|
[
"MIT"
] | null | null | null |
tutorials/foundations/plot_setup_model.py
|
samtx/pyapprox
|
c926d910e30fbcfed7d0621175d3b0268d59f852
|
[
"MIT"
] | null | null | null |
r"""
Model Definition
----------------
This tutorial describes how to setup a function with random inputs. It also provides examples of how to use model wrappers to time function calls and evaluate a function at multiple samples in parallel.
We start by defining a function of two random variables. We will use the Rosenbrock becnhmark. See :func:`pyapprox.benchmarks.benchmarks.setup_rosenbrock_function`
"""
import pyapprox as pya
from pyapprox.benchmarks.benchmarks import setup_benchmark
benchmark = setup_benchmark('rosenbrock',nvars=2)
#%%
#Print the attributes of the benchmark with
print(benchmark.keys())
#%%
#Any of these attributes, e.g. the Rosenbrock function (the attribute ``fun`` can be accessed using benchmark.fun.
#
#Now lets define the inputs to the function of interest. For independent random variables we use SciPy random variablest to represent each one-dimensional variables. For documentation refer to the `scipy.stats module <https://docs.scipy.org/doc/scipy/reference/stats.html>`_.
#
#We define multivariate random variables by specifying each 1D variable in a list. Here we will setup a 2D variable which is the tensor product of two independent and identically distributed uniform random variables
from scipy import stats
univariate_variables = [stats.uniform(-2,4),stats.uniform(-2,4)]
variable = pya.IndependentMultivariateRandomVariable(univariate_variables)
#%%
#This variable is also defined in the benchmark.variable attribute. To print a summary of the random variable
print(variable)
#%%
#We can draw random samples from variable and evaluate the function using
nsamples = 10
samples = pya.generate_independent_random_samples(variable,nsamples)
values = benchmark.fun(samples)
#%%
#User defined functions
#^^^^^^^^^^^^^^^^^^^^^^
#Pyapprox can be used with pretty much any function provided an appropriate interface is defined. Here will show how to setup a simple function.
#
#PyApprox requires all functions to take 2D np.ndarray with shape (nvars,nsamples) and requires a function to return a 2D np.ndarray with shape (nsampels,nqoi). nqoi==1 for scalar valued functions and nqoi>1 for vectored value functions.
#
#Lets define a function which does not match this criteria and use wrappers provided by PyApprox to convert it to the correct format. Specifically we will define a function that only takes a 1D np.ndarray and returns a scalar
import numpy as np
def fun(sample):
assert sample.ndim==1
return np.sum(sample**2)
from pyapprox.models.wrappers import evaluate_1darray_function_on_2d_array
def pyapprox_fun(samples):
values = evaluate_1darray_function_on_2d_array(fun,samples)
return values
values = pyapprox_fun(samples)
#%%
#The function :func:`pyapprox.models.wrappers.evaluate_1darray_function_on_2d_array` avoids the need to write a for loop but we can do this also and does some checking to make sure values is the correct shape
values_loop = np.array([np.atleast_1d(fun(s)) for s in samples.T])
assert np.allclose(values,values_loop)
#%%
#Timing function evaluations
#^^^^^^^^^^^^^^^^^^^^^^^^^^^
#It is often useful to be able to track the time needed to evaluate a function. We can track this using the :class:`pyapprox.models.wrappers.TimerModelWrapper` and :class:`pyapprox.models.wrappers.WorkTrackingModel` objects which are designed to work together. The former time each evaluation of a function that returns output of shape (nsampels,qoi) and appends the time to the quantities of interest returned by the function, i.e returns a 2D np.ndarray with shape (nsamples,qoi+1). The second extracts the time and removes it from the quantities of interest and returns output with the original shape (nsmaples,nqoi) of the user function.
#
#Lets use the class with a function that takes a random amount of time. We will use the previous function but add a random pause between 0 and .1 seconds
import time
def fun_pause(sample):
assert sample.ndim==1
time.sleep(np.random.uniform(0,.05))
return np.sum(sample**2)
def pyapprox_fun(samples):
return evaluate_1darray_function_on_2d_array(fun_pause,samples)
from pyapprox.models.wrappers import TimerModelWrapper, WorkTrackingModel
timer_fun = TimerModelWrapper(pyapprox_fun)
worktracking_fun = WorkTrackingModel(timer_fun)
values = worktracking_fun(samples)
#%%
#The :class:`pyapprox.models.wrappers.WorkTrackingModel` has an attribute :class:`pyapprox.models.wrappers.WorkTracker` which stores the execution time of each function evaluation as a dictionary. The key corresponds is the model id. For this example the id will always be the same, but the id can vary and this is useful when evaluating mutiple models, e.g. when using multi-fidelity methods. To print the dictionary use
costs = worktracking_fun.work_tracker.costs
print(costs)
#%%
#We can also call the work tracker to query the median cost for a model with a given id. The default id is 0.
fun_id=np.atleast_2d([0])
print(worktracking_fun.work_tracker(fun_id))
#%%
#Evaluating multiple models
#^^^^^^^^^^^^^^^^^^^^^^^^^^
#Now let apply this two an ensemble of models to explore the use of model ids. First create a second function.
def fun_pause_2(sample):
time.sleep(np.random.uniform(.05,.1))
return np.sum(sample**2)
def pyapprox_fun_2(samples):
return evaluate_1darray_function_on_2d_array(fun_pause_2,samples)
#%%
#Now using :class:`pyapprox.control_variate_monte_carlo.ModelEnsemble` we can create a function which takes the random samples plus an additional configure variable which defines which model to evaluate. Lets use half the samples to evaluate the first model and evalaute the second model at the remaining samples
from pyapprox.control_variate_monte_carlo import ModelEnsemble
model_ensemble = ModelEnsemble([pyapprox_fun,pyapprox_fun_2])
timer_fun_ensemble = TimerModelWrapper(model_ensemble)
worktracking_fun_ensemble = WorkTrackingModel(
timer_fun_ensemble,num_config_vars=1)
fun_ids = np.ones(nsamples); fun_ids[:nsamples//2]=0
ensemble_samples = np.vstack([samples,fun_ids])
values = worktracking_fun_ensemble(ensemble_samples)
#%%
#Here we had to pass the number (1) of configure variables to the
#WorkTrackingModel. PyApprox assumes that the configure variables are the last rows of the samples 2D array
#
#Now check that the new values are the same as when using the individual functions directly
assert np.allclose(values[:nsamples//2],pyapprox_fun(samples[:,:nsamples//2]))
assert np.allclose(values[nsamples//2:],pyapprox_fun_2(samples[:,nsamples//2:]))
#%%
#Again we can query the exection times of each model
costs = worktracking_fun_ensemble.work_tracker.costs
print(costs)
query_fun_ids = np.atleast_2d([0,1])
print(worktracking_fun_ensemble.work_tracker(query_fun_ids))
#%%
#As expected there are 5 samples tracked for each model and the median evaluation time of the second function is about twice as large as for the first function.
#%%
#Evaluating functions at multiple samples in parallel
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#For expensive models it is often useful to be able to evaluate each model concurrently. This can be achieved using :class:`pyapprox.models.wrappers.PoolModel`. Note this function is not intended for use with distributed memory systems, but rather is intended to use all the threads of a personal computer or compute node. See :class:`pyapprox.models.async_model.AsynchronousEvaluationModel` if you are interested in running multiple simulations in parallel on a distributed memory system.
#
#PoolModel cannot be used to wrap WorkTrackingModel. However it can still
#be used with WorkTrackingModel using the sequence of wrappers below.
from pyapprox.models.wrappers import PoolModel
max_eval_concurrency=1#set higher
#clear WorkTracker counters
pool_model = PoolModel(timer_fun_ensemble,max_eval_concurrency,assert_omp=False)
worktracking_fun_ensemble.work_tracker.costs = dict()
worktracking_fun_ensemble = WorkTrackingModel(
pool_model,num_config_vars=1)
#create more samples to notice improvement in wall time
nsamples = 10
samples = pya.generate_independent_random_samples(variable,nsamples)
fun_ids = np.ones(nsamples); fun_ids[:nsamples//2]=0
ensemble_samples = np.vstack([samples,fun_ids])
import time
t0= time.time()
values = worktracking_fun_ensemble(ensemble_samples)
t1 = time.time()
print(f'With {max_eval_concurrency} threads that took {t1-t0} seconds')
import os
if ('OMP_NUM_THREADS' not in os.environ or int(os.environ['OMP_NUM_THREADS'])!=1):
#make sure to set OMP_NUM_THREADS=1 to maximize benefit of pool model
print('Warning set OMP_NUM_THREADS=1 for best performance')
max_eval_concurrency=4
pool_model.set_max_eval_concurrency(max_eval_concurrency)
t0= time.time()
values = worktracking_fun_ensemble(ensemble_samples)
t1 = time.time()
print(f'With {max_eval_concurrency} threads that took {t1-t0} seconds')
#%%
#Lets print a summary of the costs to make sure individual function evaluation
#costs are still being recorded correctly
print(worktracking_fun_ensemble.work_tracker)
#%%
#Note
#^^^^
#PoolModel cannot be used with lambda functions. You will get error similar to pickle.PicklingError: Can't pickle <function <lambda> at 0x12b4e6440>: attribute lookup <lambda> on __main__ failed
# sphinx_gallery_thumbnail_path = './figures/cantilever-beam.png'
#%%
#.. gallery thumbnail will say broken if no plots are made in this file so
#.. specify a default file as above
| 50.345745
| 642
| 0.785843
|
27106dccc57b36a78ff94dc7d662d3cf75ee05e0
| 3,574
|
py
|
Python
|
tests/plugins/test_remove_worker_metadata.py
|
ktdreyer/atomic-reactor
|
a3cc6a1a5187c7a178b183bef15faf3a4c9de52a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/plugins/test_remove_worker_metadata.py
|
ktdreyer/atomic-reactor
|
a3cc6a1a5187c7a178b183bef15faf3a4c9de52a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/plugins/test_remove_worker_metadata.py
|
ktdreyer/atomic-reactor
|
a3cc6a1a5187c7a178b183bef15faf3a4c9de52a
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright (c) 2017 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import print_function, unicode_literals
import os
from flexmock import flexmock
from atomic_reactor.core import DockerTasker
from atomic_reactor.constants import PLUGIN_REMOVE_WORKER_METADATA_KEY
from atomic_reactor.inner import DockerBuildWorkflow
from atomic_reactor.plugin import ExitPluginsRunner
from atomic_reactor.util import ImageName
from atomic_reactor.plugins.exit_remove_worker_metadata import (defer_removal)
from osbs.exceptions import OsbsResponseException
from tests.constants import MOCK_SOURCE, TEST_IMAGE, INPUT_IMAGE
from tests.docker_mock import mock_docker
import pytest
class MockConfigMapResponse(object):
def __init__(self, data):
self.data = data
def get_data_by_key(self, key):
return self.data[key]
class MockOSBS(object):
def __init__(self, config_map):
self.config_map = config_map
def delete_config_map(self, name):
if name:
return name in self.config_map
else:
raise OsbsResponseException("Failed to delete config map", 404)
class MockSource(object):
def __init__(self, tmpdir):
tmpdir = str(tmpdir)
self.dockerfile_path = os.path.join(tmpdir, 'Dockerfile')
self.path = tmpdir
def get_dockerfile_path(self):
return self.dockerfile_path, self.path
class MockInsideBuilder(object):
def __init__(self):
mock_docker()
self.tasker = DockerTasker()
self.base_image = ImageName(repo='fedora', tag='25')
self.image_id = 'image_id'
self.image = INPUT_IMAGE
self.df_path = 'df_path'
self.df_dir = 'df_dir'
def simplegen(x, y):
yield "some\u2018".encode('utf-8')
flexmock(self.tasker, build_image_from_path=simplegen)
def get_built_image_info(self):
return {'Id': 'some'}
def inspect_built_image(self):
return None
def ensure_not_built(self):
pass
def mock_workflow(tmpdir):
workflow = DockerBuildWorkflow(MOCK_SOURCE, TEST_IMAGE)
setattr(workflow, 'builder', MockInsideBuilder())
setattr(workflow, 'source', MockSource(tmpdir))
setattr(workflow.builder, 'source', MockSource(tmpdir))
return workflow
@pytest.mark.parametrize('names', [['build-1-x86_64-md'],
['build-1-ppc64le-md'],
['build-1-x86_64-md', 'build-1-ppc64le-md'],
[None]])
@pytest.mark.parametrize('fragment_key', ['metadata.json', None])
def test_remove_worker_plugin(tmpdir, caplog, names, fragment_key):
workflow = mock_workflow(tmpdir)
koji_metadata = {
'foo': 'bar',
'spam': 'bacon',
}
metadata = {'metadata.json': koji_metadata}
for name in names:
osbs = MockOSBS({name: metadata})
defer_removal(workflow, name, osbs)
(flexmock(osbs)
.should_call("delete_config_map")
.with_args(name)
.once()
.and_return(True))
runner = ExitPluginsRunner(
None,
workflow,
[{
'name': PLUGIN_REMOVE_WORKER_METADATA_KEY,
"args": {}
}]
)
runner.run()
for name in names:
if name:
assert "ConfigMap {} deleted".format(name) in caplog.text
else:
assert "Failed to delete ConfigMap None" in caplog.text
| 27.075758
| 79
| 0.654449
|
e113ed63ddf3b69c4713be8892ae27b5d477ac7f
| 18,251
|
py
|
Python
|
Lib/test/libregrtest/main.py
|
DreamerComp/cpython
|
995e31e538d3321959e526d9b7ed4dd60e4eb4bb
|
[
"PSF-2.0"
] | null | null | null |
Lib/test/libregrtest/main.py
|
DreamerComp/cpython
|
995e31e538d3321959e526d9b7ed4dd60e4eb4bb
|
[
"PSF-2.0"
] | null | null | null |
Lib/test/libregrtest/main.py
|
DreamerComp/cpython
|
995e31e538d3321959e526d9b7ed4dd60e4eb4bb
|
[
"PSF-2.0"
] | null | null | null |
import datetime
import faulthandler
import locale
import os
import platform
import random
import re
import sys
import sysconfig
import tempfile
import textwrap
import time
from test.libregrtest.cmdline import _parse_args
from test.libregrtest.runtest import (
findtests, runtest,
STDTESTS, NOTTESTS, PASSED, FAILED, ENV_CHANGED, SKIPPED, RESOURCE_DENIED,
INTERRUPTED, CHILD_ERROR,
PROGRESS_MIN_TIME, format_test_result)
from test.libregrtest.setup import setup_tests
from test import support
try:
import gc
except ImportError:
gc = None
# When tests are run from the Python build directory, it is best practice
# to keep the test files in a subfolder. This eases the cleanup of leftover
# files using the "make distclean" command.
if sysconfig.is_python_build():
TEMPDIR = os.path.join(sysconfig.get_config_var('srcdir'), 'build')
else:
TEMPDIR = tempfile.gettempdir()
TEMPDIR = os.path.abspath(TEMPDIR)
def format_duration(seconds):
if seconds < 1.0:
return '%.0f ms' % (seconds * 1e3)
if seconds < 60.0:
return '%.0f sec' % seconds
minutes, seconds = divmod(seconds, 60.0)
return '%.0f min %.0f sec' % (minutes, seconds)
class Regrtest:
"""Execute a test suite.
This also parses command-line options and modifies its behavior
accordingly.
tests -- a list of strings containing test names (optional)
testdir -- the directory in which to look for tests (optional)
Users other than the Python test suite will certainly want to
specify testdir; if it's omitted, the directory containing the
Python test suite is searched for.
If the tests argument is omitted, the tests listed on the
command-line will be used. If that's empty, too, then all *.py
files beginning with test_ will be used.
The other default arguments (verbose, quiet, exclude,
single, randomize, findleaks, use_resources, trace, coverdir,
print_slow, and random_seed) allow programmers calling main()
directly to set the values that would normally be set by flags
on the command line.
"""
def __init__(self):
# Namespace of command line options
self.ns = None
# tests
self.tests = []
self.selected = []
# test results
self.good = []
self.bad = []
self.skipped = []
self.resource_denieds = []
self.environment_changed = []
self.interrupted = False
# used by --slow
self.test_times = []
# used by --coverage, trace.Trace instance
self.tracer = None
# used by --findleaks, store for gc.garbage
self.found_garbage = []
# used to display the progress bar "[ 3/100]"
self.start_time = time.monotonic()
self.test_count = ''
self.test_count_width = 1
# used by --single
self.next_single_test = None
self.next_single_filename = None
def accumulate_result(self, test, result):
ok, test_time = result
if ok not in (CHILD_ERROR, INTERRUPTED):
self.test_times.append((test_time, test))
if ok == PASSED:
self.good.append(test)
elif ok == FAILED:
self.bad.append(test)
elif ok == ENV_CHANGED:
self.environment_changed.append(test)
elif ok == SKIPPED:
self.skipped.append(test)
elif ok == RESOURCE_DENIED:
self.skipped.append(test)
self.resource_denieds.append(test)
def display_progress(self, test_index, test):
if self.ns.quiet:
return
if self.bad and not self.ns.pgo:
fmt = "{time} [{test_index:{count_width}}{test_count}/{nbad}] {test_name}"
else:
fmt = "{time} [{test_index:{count_width}}{test_count}] {test_name}"
test_time = time.monotonic() - self.start_time
test_time = datetime.timedelta(seconds=int(test_time))
line = fmt.format(count_width=self.test_count_width,
test_index=test_index,
test_count=self.test_count,
nbad=len(self.bad),
test_name=test,
time=test_time)
print(line, flush=True)
def parse_args(self, kwargs):
ns = _parse_args(sys.argv[1:], **kwargs)
if ns.timeout and not hasattr(faulthandler, 'dump_traceback_later'):
print("Warning: The timeout option requires "
"faulthandler.dump_traceback_later", file=sys.stderr)
ns.timeout = None
if ns.threshold is not None and gc is None:
print('No GC available, ignore --threshold.', file=sys.stderr)
ns.threshold = None
if ns.findleaks:
if gc is not None:
# Uncomment the line below to report garbage that is not
# freeable by reference counting alone. By default only
# garbage that is not collectable by the GC is reported.
pass
#gc.set_debug(gc.DEBUG_SAVEALL)
else:
print('No GC available, disabling --findleaks',
file=sys.stderr)
ns.findleaks = False
# Strip .py extensions.
removepy(ns.args)
return ns
def find_tests(self, tests):
self.tests = tests
if self.ns.single:
self.next_single_filename = os.path.join(TEMPDIR, 'pynexttest')
try:
with open(self.next_single_filename, 'r') as fp:
next_test = fp.read().strip()
self.tests = [next_test]
except OSError:
pass
if self.ns.fromfile:
self.tests = []
# regex to match 'test_builtin' in line:
# '0:00:00 [ 4/400] test_builtin -- test_dict took 1 sec'
regex = (r'(?:[0-9]+:[0-9]+:[0-9]+ *)?'
r'(?:\[[0-9/ ]+\] *)?'
r'(test_[a-zA-Z0-9_]+)\b(?:\.py)?')
regex = re.compile(regex)
with open(os.path.join(support.SAVEDCWD, self.ns.fromfile)) as fp:
for line in fp:
line = line.split('#', 1)[0]
line = line.strip()
match = regex.search(line)
if match is not None:
self.tests.append(match.group(1))
removepy(self.tests)
stdtests = STDTESTS[:]
nottests = NOTTESTS.copy()
if self.ns.exclude:
for arg in self.ns.args:
if arg in stdtests:
stdtests.remove(arg)
nottests.add(arg)
self.ns.args = []
# if testdir is set, then we are not running the python tests suite, so
# don't add default tests to be executed or skipped (pass empty values)
if self.ns.testdir:
alltests = findtests(self.ns.testdir, list(), set())
else:
alltests = findtests(self.ns.testdir, stdtests, nottests)
if not self.ns.fromfile:
self.selected = self.tests or self.ns.args or alltests
else:
self.selected = self.tests
if self.ns.single:
self.selected = self.selected[:1]
try:
pos = alltests.index(self.selected[0])
self.next_single_test = alltests[pos + 1]
except IndexError:
pass
# Remove all the selected tests that precede start if it's set.
if self.ns.start:
try:
del self.selected[:self.selected.index(self.ns.start)]
except ValueError:
print("Couldn't find starting test (%s), using all tests"
% self.ns.start, file=sys.stderr)
if self.ns.randomize:
if self.ns.random_seed is None:
self.ns.random_seed = random.randrange(10000000)
random.seed(self.ns.random_seed)
random.shuffle(self.selected)
def list_tests(self):
for name in self.selected:
print(name)
def rerun_failed_tests(self):
self.ns.verbose = True
self.ns.failfast = False
self.ns.verbose3 = False
self.ns.match_tests = None
print("Re-running failed tests in verbose mode")
for test in self.bad[:]:
print("Re-running test %r in verbose mode" % test, flush=True)
try:
self.ns.verbose = True
ok = runtest(self.ns, test)
except KeyboardInterrupt:
self.interrupted = True
# print a newline separate from the ^C
print()
break
else:
if ok[0] in {PASSED, ENV_CHANGED, SKIPPED, RESOURCE_DENIED}:
self.bad.remove(test)
else:
if self.bad:
print(count(len(self.bad), 'test'), "failed again:")
printlist(self.bad)
def display_result(self):
if self.interrupted:
# print a newline after ^C
print()
print("Test suite interrupted by signal SIGINT.")
executed = set(self.good) | set(self.bad) | set(self.skipped)
omitted = set(self.selected) - executed
print(count(len(omitted), "test"), "omitted:")
printlist(omitted)
# If running the test suite for PGO then no one cares about
# results.
if self.ns.pgo:
return
if self.good and not self.ns.quiet:
if (not self.bad
and not self.skipped
and not self.interrupted
and len(self.good) > 1):
print("All", end=' ')
print(count(len(self.good), "test"), "OK.")
if self.ns.print_slow:
self.test_times.sort(reverse=True)
print()
print("10 slowest tests:")
for time, test in self.test_times[:10]:
print("- %s: %s" % (test, format_duration(time)))
if self.bad:
print()
print(count(len(self.bad), "test"), "failed:")
printlist(self.bad)
if self.environment_changed:
print()
print("{} altered the execution environment:".format(
count(len(self.environment_changed), "test")))
printlist(self.environment_changed)
if self.skipped and not self.ns.quiet:
print()
print(count(len(self.skipped), "test"), "skipped:")
printlist(self.skipped)
def run_tests_sequential(self):
if self.ns.trace:
import trace
self.tracer = trace.Trace(trace=False, count=True)
save_modules = sys.modules.keys()
print("Run tests sequentially")
previous_test = None
for test_index, test in enumerate(self.tests, 1):
start_time = time.monotonic()
text = test
if previous_test:
text = '%s -- %s' % (text, previous_test)
self.display_progress(test_index, text)
if self.tracer:
# If we're tracing code coverage, then we don't exit with status
# if on a false return value from main.
cmd = ('result = runtest(self.ns, test); '
'self.accumulate_result(test, result)')
ns = dict(locals())
self.tracer.runctx(cmd, globals=globals(), locals=ns)
result = ns['result']
else:
try:
result = runtest(self.ns, test)
except KeyboardInterrupt:
self.interrupted = True
self.accumulate_result(test, (INTERRUPTED, None))
break
else:
self.accumulate_result(test, result)
previous_test = format_test_result(test, result[0])
test_time = time.monotonic() - start_time
if test_time >= PROGRESS_MIN_TIME:
previous_test = "%s in %s" % (previous_test, format_duration(test_time))
elif result[0] == PASSED:
# be quiet: say nothing if the test passed shortly
previous_test = None
if self.ns.findleaks:
gc.collect()
if gc.garbage:
print("Warning: test created", len(gc.garbage), end=' ')
print("uncollectable object(s).")
# move the uncollectable objects somewhere so we don't see
# them again
self.found_garbage.extend(gc.garbage)
del gc.garbage[:]
# Unload the newly imported modules (best effort finalization)
for module in sys.modules.keys():
if module not in save_modules and module.startswith("test."):
support.unload(module)
if previous_test:
print(previous_test)
def _test_forever(self, tests):
while True:
for test in tests:
yield test
if self.bad:
return
def run_tests(self):
# For a partial run, we do not need to clutter the output.
if (self.ns.verbose
or self.ns.header
or not (self.ns.pgo or self.ns.quiet or self.ns.single
or self.tests or self.ns.args)):
# Print basic platform information
print("==", platform.python_implementation(), *sys.version.split())
print("== ", platform.platform(aliased=True),
"%s-endian" % sys.byteorder)
print("== ", "hash algorithm:", sys.hash_info.algorithm,
"64bit" if sys.maxsize > 2**32 else "32bit")
print("== cwd:", os.getcwd())
print("== encodings: locale=%s, FS=%s"
% (locale.getpreferredencoding(False),
sys.getfilesystemencoding()))
print("Testing with flags:", sys.flags)
if self.ns.randomize:
print("Using random seed", self.ns.random_seed)
if self.ns.forever:
self.tests = self._test_forever(list(self.selected))
self.test_count = ''
self.test_count_width = 3
else:
self.tests = iter(self.selected)
self.test_count = '/{}'.format(len(self.selected))
self.test_count_width = len(self.test_count) - 1
if self.ns.use_mp:
from test.libregrtest.runtest_mp import run_tests_multiprocess
run_tests_multiprocess(self)
else:
self.run_tests_sequential()
def finalize(self):
if self.next_single_filename:
if self.next_single_test:
with open(self.next_single_filename, 'w') as fp:
fp.write(self.next_single_test + '\n')
else:
os.unlink(self.next_single_filename)
if self.tracer:
r = self.tracer.results()
r.write_results(show_missing=True, summary=True,
coverdir=self.ns.coverdir)
print()
duration = time.monotonic() - self.start_time
print("Total duration: %s" % format_duration(duration))
if self.bad:
result = "FAILURE"
elif self.interrupted:
result = "INTERRUPTED"
else:
result = "SUCCESS"
print("Tests result: %s" % result)
if self.ns.runleaks:
os.system("leaks %d" % os.getpid())
def main(self, tests=None, **kwargs):
global TEMPDIR
if sysconfig.is_python_build():
try:
os.mkdir(TEMPDIR)
except FileExistsError:
pass
# Define a writable temp dir that will be used as cwd while running
# the tests. The name of the dir includes the pid to allow parallel
# testing (see the -j option).
test_cwd = 'test_python_{}'.format(os.getpid())
test_cwd = os.path.join(TEMPDIR, test_cwd)
# Run the tests in a context manager that temporarily changes the CWD to a
# temporary and writable directory. If it's not possible to create or
# change the CWD, the original CWD will be used. The original CWD is
# available from support.SAVEDCWD.
with support.temp_cwd(test_cwd, quiet=True):
self._main(tests, kwargs)
def _main(self, tests, kwargs):
self.ns = self.parse_args(kwargs)
if self.ns.slaveargs is not None:
from test.libregrtest.runtest_mp import run_tests_slave
run_tests_slave(self.ns.slaveargs)
if self.ns.wait:
input("Press any key to continue...")
support.PGO = self.ns.pgo
setup_tests(self.ns)
self.find_tests(tests)
if self.ns.list_tests:
self.list_tests()
sys.exit(0)
self.run_tests()
self.display_result()
if self.ns.verbose2 and self.bad:
self.rerun_failed_tests()
self.finalize()
sys.exit(len(self.bad) > 0 or self.interrupted)
def removepy(names):
if not names:
return
for idx, name in enumerate(names):
basename, ext = os.path.splitext(name)
if ext == '.py':
names[idx] = basename
def count(n, word):
if n == 1:
return "%d %s" % (n, word)
else:
return "%d %ss" % (n, word)
def printlist(x, width=70, indent=4):
"""Print the elements of iterable x to stdout.
Optional arg width (default 70) is the maximum line length.
Optional arg indent (default 4) is the number of blanks with which to
begin each line.
"""
blanks = ' ' * indent
# Print the sorted list: 'x' may be a '--random' list or a set()
print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks))
def main(tests=None, **kwargs):
"""Run the Python suite."""
Regrtest().main(tests=tests, **kwargs)
| 34.370998
| 88
| 0.559257
|
2e9de64a4011fcebd148db5b7ccf1f9a27c26956
| 314
|
py
|
Python
|
76-100/80.py
|
tonyyzy/ProjectEuler
|
f52de2f931ebd4df2020e32d12062866b1586e72
|
[
"MIT"
] | null | null | null |
76-100/80.py
|
tonyyzy/ProjectEuler
|
f52de2f931ebd4df2020e32d12062866b1586e72
|
[
"MIT"
] | null | null | null |
76-100/80.py
|
tonyyzy/ProjectEuler
|
f52de2f931ebd4df2020e32d12062866b1586e72
|
[
"MIT"
] | null | null | null |
from decimal import getcontext, Decimal
getcontext().prec = 102
non_natural = []
for i in range(1, 101):
if i ** 0.5 % 1 != 0:
non_natural.append(i)
result = 0
for i in non_natural:
a = Decimal(i).sqrt()
result += sum(int(c) for c in str(a * (10 ** 99))[:100])
print(result)
| 19.625
| 61
| 0.573248
|
b05532f3ba3f67630eb126893b478a7b21d36941
| 18,440
|
py
|
Python
|
flask_restx/inputs.py
|
james-powis/flask-restx
|
ee1fc4ceb93887065b4785f14dadd70d7bc2efe7
|
[
"BSD-3-Clause"
] | 1
|
2021-05-24T14:33:40.000Z
|
2021-05-24T14:33:40.000Z
|
flask_restx/inputs.py
|
james-powis/flask-restx
|
ee1fc4ceb93887065b4785f14dadd70d7bc2efe7
|
[
"BSD-3-Clause"
] | 1
|
2021-06-15T09:49:50.000Z
|
2021-06-15T09:49:50.000Z
|
flask_restx/inputs.py
|
james-powis/flask-restx
|
ee1fc4ceb93887065b4785f14dadd70d7bc2efe7
|
[
"BSD-3-Clause"
] | 1
|
2020-08-16T22:20:31.000Z
|
2020-08-16T22:20:31.000Z
|
# -*- coding: utf-8 -*-
"""
This module provide some helpers for advanced types parsing.
You can define you own parser using the same pattern:
.. code-block:: python
def my_type(value):
if not condition:
raise ValueError('This is not my type')
return parse(value)
# Swagger documentation
my_type.__schema__ = {'type': 'string', 'format': 'my-custom-format'}
The last line allows you to document properly the type in the Swagger documentation.
"""
from __future__ import unicode_literals
import re
import socket
from datetime import datetime, time, timedelta
from email.utils import parsedate_tz, mktime_tz
from six.moves.urllib.parse import urlparse
import aniso8601
import pytz
# Constants for upgrading date-based intervals to full datetimes.
START_OF_DAY = time(0, 0, 0, tzinfo=pytz.UTC)
END_OF_DAY = time(23, 59, 59, 999999, tzinfo=pytz.UTC)
netloc_regex = re.compile(
r"(?:(?P<auth>[^:@]+?(?::[^:@]*?)?)@)?" # basic auth
r"(?:"
r"(?P<localhost>localhost)|" # localhost...
r"(?P<ipv4>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})|" # ...or ipv4
r"(?:\[?(?P<ipv6>[A-F0-9]*:[A-F0-9:]+)\]?)|" # ...or ipv6
r"(?P<domain>(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))" # domain...
r")"
r"(?::(?P<port>\d+))?" # optional port
r"$",
re.IGNORECASE,
)
email_regex = re.compile(
r"^" "(?P<local>[^@]*[^@.])" r"@" r"(?P<server>[^@]+(?:\.[^@]+)*)" r"$",
re.IGNORECASE,
)
time_regex = re.compile(r"\d{2}:\d{2}")
def ipv4(value):
"""Validate an IPv4 address"""
try:
socket.inet_aton(value)
if value.count(".") == 3:
return value
except socket.error:
pass
raise ValueError("{0} is not a valid ipv4 address".format(value))
ipv4.__schema__ = {"type": "string", "format": "ipv4"}
def ipv6(value):
"""Validate an IPv6 address"""
try:
socket.inet_pton(socket.AF_INET6, value)
return value
except socket.error:
raise ValueError("{0} is not a valid ipv4 address".format(value))
ipv6.__schema__ = {"type": "string", "format": "ipv6"}
def ip(value):
"""Validate an IP address (both IPv4 and IPv6)"""
try:
return ipv4(value)
except ValueError:
pass
try:
return ipv6(value)
except ValueError:
raise ValueError("{0} is not a valid ip".format(value))
ip.__schema__ = {"type": "string", "format": "ip"}
class URL(object):
"""
Validate an URL.
Example::
parser = reqparse.RequestParser()
parser.add_argument('url', type=inputs.URL(schemes=['http', 'https']))
Input to the ``URL`` argument will be rejected
if it does not match an URL with specified constraints.
If ``check`` is True it will also be rejected if the domain does not exists.
:param bool check: Check the domain exists (perform a DNS resolution)
:param bool ip: Allow IP (both ipv4/ipv6) as domain
:param bool local: Allow localhost (both string or ip) as domain
:param bool port: Allow a port to be present
:param bool auth: Allow authentication to be present
:param list|tuple schemes: Restrict valid schemes to this list
:param list|tuple domains: Restrict valid domains to this list
:param list|tuple exclude: Exclude some domains
"""
def __init__(
self,
check=False,
ip=False,
local=False,
port=False,
auth=False,
schemes=None,
domains=None,
exclude=None,
):
self.check = check
self.ip = ip
self.local = local
self.port = port
self.auth = auth
self.schemes = schemes
self.domains = domains
self.exclude = exclude
def error(self, value, details=None):
msg = "{0} is not a valid URL"
if details:
msg = ". ".join((msg, details))
raise ValueError(msg.format(value))
def __call__(self, value):
parsed = urlparse(value)
netloc_match = netloc_regex.match(parsed.netloc)
if not all((parsed.scheme, parsed.netloc)):
if netloc_regex.match(
parsed.netloc or parsed.path.split("/", 1)[0].split("?", 1)[0]
):
self.error(value, "Did you mean: http://{0}")
self.error(value)
if parsed.scheme and self.schemes and parsed.scheme not in self.schemes:
self.error(value, "Protocol is not allowed")
if not netloc_match:
self.error(value)
data = netloc_match.groupdict()
if data["ipv4"] or data["ipv6"]:
if not self.ip:
self.error(value, "IP is not allowed")
else:
try:
ip(data["ipv4"] or data["ipv6"])
except ValueError as e:
self.error(value, str(e))
if not self.local:
if data["ipv4"] and data["ipv4"].startswith("127."):
self.error(value, "Localhost is not allowed")
elif data["ipv6"] == "::1":
self.error(value, "Localhost is not allowed")
if self.check:
pass
if data["auth"] and not self.auth:
self.error(value, "Authentication is not allowed")
if data["localhost"] and not self.local:
self.error(value, "Localhost is not allowed")
if data["port"]:
if not self.port:
self.error(value, "Custom port is not allowed")
else:
port = int(data["port"])
if not 0 < port < 65535:
self.error(value, "Port is out of range")
if data["domain"]:
if self.domains and data["domain"] not in self.domains:
self.error(value, "Domain is not allowed")
elif self.exclude and data["domain"] in self.exclude:
self.error(value, "Domain is not allowed")
if self.check:
try:
socket.getaddrinfo(data["domain"], None)
except socket.error:
self.error(value, "Domain does not exists")
return value
@property
def __schema__(self):
return {
"type": "string",
"format": "url",
}
#: Validate an URL
#:
#: Legacy validator, allows, auth, port, ip and local
#: Only allows schemes 'http', 'https', 'ftp' and 'ftps'
url = URL(
ip=True, auth=True, port=True, local=True, schemes=("http", "https", "ftp", "ftps")
)
class email(object):
"""
Validate an email.
Example::
parser = reqparse.RequestParser()
parser.add_argument('email', type=inputs.email(dns=True))
Input to the ``email`` argument will be rejected if it does not match an email
and if domain does not exists.
:param bool check: Check the domain exists (perform a DNS resolution)
:param bool ip: Allow IP (both ipv4/ipv6) as domain
:param bool local: Allow localhost (both string or ip) as domain
:param list|tuple domains: Restrict valid domains to this list
:param list|tuple exclude: Exclude some domains
"""
def __init__(self, check=False, ip=False, local=False, domains=None, exclude=None):
self.check = check
self.ip = ip
self.local = local
self.domains = domains
self.exclude = exclude
def error(self, value, msg=None):
msg = msg or "{0} is not a valid email"
raise ValueError(msg.format(value))
def is_ip(self, value):
try:
ip(value)
return True
except ValueError:
return False
def __call__(self, value):
match = email_regex.match(value)
if not match or ".." in value:
self.error(value)
server = match.group("server")
if self.check:
try:
socket.getaddrinfo(server, None)
except socket.error:
self.error(value)
if self.domains and server not in self.domains:
self.error(value, "{0} does not belong to the authorized domains")
if self.exclude and server in self.exclude:
self.error(value, "{0} belongs to a forbidden domain")
if not self.local and (
server in ("localhost", "::1") or server.startswith("127.")
):
self.error(value)
if self.is_ip(server) and not self.ip:
self.error(value)
return value
@property
def __schema__(self):
return {
"type": "string",
"format": "email",
}
class regex(object):
"""
Validate a string based on a regular expression.
Example::
parser = reqparse.RequestParser()
parser.add_argument('example', type=inputs.regex('^[0-9]+$'))
Input to the ``example`` argument will be rejected if it contains anything
but numbers.
:param str pattern: The regular expression the input must match
"""
def __init__(self, pattern):
self.pattern = pattern
self.re = re.compile(pattern)
def __call__(self, value):
if not self.re.search(value):
message = 'Value does not match pattern: "{0}"'.format(self.pattern)
raise ValueError(message)
return value
def __deepcopy__(self, memo):
return regex(self.pattern)
@property
def __schema__(self):
return {
"type": "string",
"pattern": self.pattern,
}
def _normalize_interval(start, end, value):
"""
Normalize datetime intervals.
Given a pair of datetime.date or datetime.datetime objects,
returns a 2-tuple of tz-aware UTC datetimes spanning the same interval.
For datetime.date objects, the returned interval starts at 00:00:00.0
on the first date and ends at 00:00:00.0 on the second.
Naive datetimes are upgraded to UTC.
Timezone-aware datetimes are normalized to the UTC tzdata.
Params:
- start: A date or datetime
- end: A date or datetime
"""
if not isinstance(start, datetime):
start = datetime.combine(start, START_OF_DAY)
end = datetime.combine(end, START_OF_DAY)
if start.tzinfo is None:
start = pytz.UTC.localize(start)
end = pytz.UTC.localize(end)
else:
start = start.astimezone(pytz.UTC)
end = end.astimezone(pytz.UTC)
return start, end
def _expand_datetime(start, value):
if not isinstance(start, datetime):
# Expand a single date object to be the interval spanning
# that entire day.
end = start + timedelta(days=1)
else:
# Expand a datetime based on the finest resolution provided
# in the original input string.
time = value.split("T")[1]
time_without_offset = re.sub("[+-].+", "", time)
num_separators = time_without_offset.count(":")
if num_separators == 0:
# Hour resolution
end = start + timedelta(hours=1)
elif num_separators == 1:
# Minute resolution:
end = start + timedelta(minutes=1)
else:
# Second resolution
end = start + timedelta(seconds=1)
return end
def _parse_interval(value):
"""
Do some nasty try/except voodoo to get some sort of datetime
object(s) out of the string.
"""
try:
return sorted(aniso8601.parse_interval(value))
except ValueError:
try:
return aniso8601.parse_datetime(value), None
except ValueError:
return aniso8601.parse_date(value), None
def iso8601interval(value, argument="argument"):
"""
Parses ISO 8601-formatted datetime intervals into tuples of datetimes.
Accepts both a single date(time) or a full interval using either start/end
or start/duration notation, with the following behavior:
- Intervals are defined as inclusive start, exclusive end
- Single datetimes are translated into the interval spanning the
largest resolution not specified in the input value, up to the day.
- The smallest accepted resolution is 1 second.
- All timezones are accepted as values; returned datetimes are
localized to UTC. Naive inputs and date inputs will are assumed UTC.
Examples::
"2013-01-01" -> datetime(2013, 1, 1), datetime(2013, 1, 2)
"2013-01-01T12" -> datetime(2013, 1, 1, 12), datetime(2013, 1, 1, 13)
"2013-01-01/2013-02-28" -> datetime(2013, 1, 1), datetime(2013, 2, 28)
"2013-01-01/P3D" -> datetime(2013, 1, 1), datetime(2013, 1, 4)
"2013-01-01T12:00/PT30M" -> datetime(2013, 1, 1, 12), datetime(2013, 1, 1, 12, 30)
"2013-01-01T06:00/2013-01-01T12:00" -> datetime(2013, 1, 1, 6), datetime(2013, 1, 1, 12)
:param str value: The ISO8601 date time as a string
:return: Two UTC datetimes, the start and the end of the specified interval
:rtype: A tuple (datetime, datetime)
:raises ValueError: if the interval is invalid.
"""
if not value:
raise ValueError("Expected a valid ISO8601 date/time interval.")
try:
start, end = _parse_interval(value)
if end is None:
end = _expand_datetime(start, value)
start, end = _normalize_interval(start, end, value)
except ValueError:
msg = (
"Invalid {arg}: {value}. {arg} must be a valid ISO8601 date/time interval."
)
raise ValueError(msg.format(arg=argument, value=value))
return start, end
iso8601interval.__schema__ = {"type": "string", "format": "iso8601-interval"}
def date(value):
"""Parse a valid looking date in the format YYYY-mm-dd"""
date = datetime.strptime(value, "%Y-%m-%d")
return date
date.__schema__ = {"type": "string", "format": "date"}
def _get_integer(value):
try:
return int(value)
except (TypeError, ValueError):
raise ValueError("{0} is not a valid integer".format(value))
def natural(value, argument="argument"):
"""Restrict input type to the natural numbers (0, 1, 2, 3...)"""
value = _get_integer(value)
if value < 0:
msg = "Invalid {arg}: {value}. {arg} must be a non-negative integer"
raise ValueError(msg.format(arg=argument, value=value))
return value
natural.__schema__ = {"type": "integer", "minimum": 0}
def positive(value, argument="argument"):
"""Restrict input type to the positive integers (1, 2, 3...)"""
value = _get_integer(value)
if value < 1:
msg = "Invalid {arg}: {value}. {arg} must be a positive integer"
raise ValueError(msg.format(arg=argument, value=value))
return value
positive.__schema__ = {"type": "integer", "minimum": 0, "exclusiveMinimum": True}
class int_range(object):
"""Restrict input to an integer in a range (inclusive)"""
def __init__(self, low, high, argument="argument"):
self.low = low
self.high = high
self.argument = argument
def __call__(self, value):
value = _get_integer(value)
if value < self.low or value > self.high:
msg = "Invalid {arg}: {val}. {arg} must be within the range {lo} - {hi}"
raise ValueError(
msg.format(arg=self.argument, val=value, lo=self.low, hi=self.high)
)
return value
@property
def __schema__(self):
return {
"type": "integer",
"minimum": self.low,
"maximum": self.high,
}
def boolean(value):
"""
Parse the string ``"true"`` or ``"false"`` as a boolean (case insensitive).
Also accepts ``"1"`` and ``"0"`` as ``True``/``False`` (respectively).
If the input is from the request JSON body, the type is already a native python boolean,
and will be passed through without further parsing.
:raises ValueError: if the boolean value is invalid
"""
if isinstance(value, bool):
return value
if value is None:
raise ValueError("boolean type must be non-null")
elif not value:
return False
value = str(value).lower()
if value in ("true", "1", "on",):
return True
if value in ("false", "0",):
return False
raise ValueError("Invalid literal for boolean(): {0}".format(value))
boolean.__schema__ = {"type": "boolean"}
def datetime_from_rfc822(value):
"""
Turns an RFC822 formatted date into a datetime object.
Example::
inputs.datetime_from_rfc822('Wed, 02 Oct 2002 08:00:00 EST')
:param str value: The RFC822-complying string to transform
:return: The parsed datetime
:rtype: datetime
:raises ValueError: if value is an invalid date literal
"""
raw = value
if not time_regex.search(value):
value = " ".join((value, "00:00:00"))
try:
timetuple = parsedate_tz(value)
timestamp = mktime_tz(timetuple)
if timetuple[-1] is None:
return datetime.fromtimestamp(timestamp).replace(tzinfo=pytz.utc)
else:
return datetime.fromtimestamp(timestamp, pytz.utc)
except Exception:
raise ValueError('Invalid date literal "{0}"'.format(raw))
def datetime_from_iso8601(value):
"""
Turns an ISO8601 formatted date into a datetime object.
Example::
inputs.datetime_from_iso8601("2012-01-01T23:30:00+02:00")
:param str value: The ISO8601-complying string to transform
:return: A datetime
:rtype: datetime
:raises ValueError: if value is an invalid date literal
"""
try:
try:
return aniso8601.parse_datetime(value)
except ValueError:
date = aniso8601.parse_date(value)
return datetime(date.year, date.month, date.day)
except Exception:
raise ValueError('Invalid date literal "{0}"'.format(value))
datetime_from_iso8601.__schema__ = {"type": "string", "format": "date-time"}
def date_from_iso8601(value):
"""
Turns an ISO8601 formatted date into a date object.
Example::
inputs.date_from_iso8601("2012-01-01")
:param str value: The ISO8601-complying string to transform
:return: A date
:rtype: date
:raises ValueError: if value is an invalid date literal
"""
return datetime_from_iso8601(value).date()
date_from_iso8601.__schema__ = {"type": "string", "format": "date"}
| 30.180033
| 109
| 0.601952
|
2e29cb97468a9cf237e3a88b9d446a6f59099f9c
| 1,304
|
py
|
Python
|
loginpass/slack.py
|
authlib/socialism
|
635823a78a2a92cf8630f9935aebb9afcccb8656
|
[
"BSD-3-Clause"
] | 223
|
2018-04-23T14:48:17.000Z
|
2022-03-19T09:27:36.000Z
|
loginpass/slack.py
|
authlib/socialism
|
635823a78a2a92cf8630f9935aebb9afcccb8656
|
[
"BSD-3-Clause"
] | 68
|
2018-04-22T13:55:25.000Z
|
2022-03-15T15:28:42.000Z
|
loginpass/slack.py
|
authlib/socialism
|
635823a78a2a92cf8630f9935aebb9afcccb8656
|
[
"BSD-3-Clause"
] | 86
|
2018-04-24T21:09:26.000Z
|
2022-03-17T08:55:34.000Z
|
"""
loginpass.slack
~~~~~~~~~~~~~~~
Loginpass Backend of Slack (https://slack.com).
Useful Links:
- Create App: https://api.slack.com/apps
- API documentation: https://api.slack.com/
- Sign In: https://api.slack.com/docs/sign-in-with-slack
:copyright: (c) 2018 by Hsiaoming Yang
:license: BSD, see LICENSE for more details.
"""
def normalize_userinfo(client, data):
user = data['user']
picture = None
for s in ['512', '192', '72', '48', '32', '24']:
src = user.get('image_' + s)
if src:
picture = src
break
params = {
'sub': user['id'],
'email': user['email'],
'name': user['name'],
}
if picture:
params['picture'] = picture
return params
class Slack(object):
NAME = 'slack'
OAUTH_CONFIG = {
'api_base_url': 'https://slack.com/api/',
'access_token_url': 'https://slack.com/api/oauth.access',
'authorize_url': 'https://slack.com/oauth/authorize',
'client_kwargs': {
'token_endpoint_auth_method': 'client_secret_post',
'scope': 'identity.basic identity.avatar identity.email',
},
'userinfo_endpoint': 'users.identity',
'userinfo_compliance_fix': normalize_userinfo,
}
| 25.568627
| 69
| 0.567485
|
1b215bc4626976da0ddda0b82575e037040177bc
| 55,053
|
py
|
Python
|
aloha/create_aloha.py
|
mfasDa/MadGraph5
|
9b90feda56d6209b81f74e61dc353a729886a4a5
|
[
"NCSA"
] | null | null | null |
aloha/create_aloha.py
|
mfasDa/MadGraph5
|
9b90feda56d6209b81f74e61dc353a729886a4a5
|
[
"NCSA"
] | null | null | null |
aloha/create_aloha.py
|
mfasDa/MadGraph5
|
9b90feda56d6209b81f74e61dc353a729886a4a5
|
[
"NCSA"
] | 1
|
2021-07-06T08:19:27.000Z
|
2021-07-06T08:19:27.000Z
|
################################################################################
#
# Copyright (c) 2010 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
from __future__ import division
from __future__ import absolute_import
import cmath
import copy
import operator
import six.moves.cPickle
import glob
import logging
import numbers
import os
import re
import shutil
import sys
import time
from madgraph.interface.tutorial_text import output
from six.moves import range
from six.moves import zip
root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
sys.path.append(root_path)
from aloha.aloha_object import *
import aloha
import aloha.aloha_writers as aloha_writers
import aloha.aloha_lib as aloha_lib
import aloha.aloha_object as aloha_object
import aloha.aloha_parsers as aloha_parsers
import aloha.aloha_fct as aloha_fct
import models
try:
import madgraph.iolibs.files as files
import madgraph.various.misc as misc
except Exception:
import aloha.files as files
import aloha.misc as misc
aloha_path = os.path.dirname(os.path.realpath(__file__))
logger = logging.getLogger('ALOHA')
_conjugate_gap = 50
_spin2_mult = 1000
pjoin = os.path.join
ALOHAERROR = aloha.ALOHAERROR
class AbstractRoutine(object):
""" store the result of the computation of Helicity Routine
this is use for storing and passing to writer """
def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
""" store the information """
self.spins = spins
self.expr = expr
self.denominator = denom
self.name = name
self.outgoing = outgoing
self.infostr = infostr
self.symmetries = []
self.combined = []
self.tag = []
self.contracted = {}
def add_symmetry(self, outgoing):
""" add an outgoing """
if not outgoing in self.symmetries:
self.symmetries.append(outgoing)
def add_combine(self, lor_list):
"""add a combine rule """
if lor_list not in self.combined:
self.combined.append(lor_list)
def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
""" write the content of the object """
writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
text = writer.write(mode=mode, **opt)
if combine:
for grouped in self.combined:
if isinstance(text, tuple):
text = tuple([old.__add__(new) for old, new in zip(text,
writer.write_combined(grouped, mode=mode+'no_include', **opt))])
else:
text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
if aloha.mp_precision and 'MP' not in self.tag:
self.tag.append('MP')
text += self.write(output_dir, language, mode, **opt)
return text
def get_info(self, info):
"""return some information on the routine
"""
if info == "rank":
assert isinstance(self.expr, aloha_lib.SplitCoefficient)
rank= 1
for coeff in self.expr:
rank = max(sum(coeff), rank)
return rank -1 # due to the coefficient associate to the wavefunctions
else:
raise ALOHAERROR('%s is not a valid information that can be computed' % info)
class AbstractRoutineBuilder(object):
""" Launch the creation of the Helicity Routine"""
prop_lib = {} # Store computation for the propagator
counter = 0 # counter for statistic only
class AbstractALOHAError(Exception):
""" An error class for ALOHA"""
def __init__(self, lorentz, model=None):
""" initialize the run
lorentz: the lorentz information analyzed (UFO format)
language: define in which language we write the output
modes: 0 for all incoming particles
>0 defines the outgoing part (start to count at 1)
"""
self.spins = [s for s in lorentz.spins]
self.name = lorentz.name
self.conjg = []
self.tag = []
self.outgoing = None
self.lorentz_expr = lorentz.structure
self.routine_kernel = None
self.spin2_massless = False
self.spin32_massless = False
self.contracted = {}
self.fct = {}
self.model = model
self.denominator = None
# assert model
self.lastprint = 0 # to avoid that ALOHA makes too many printout
if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
for formf in lorentz.formfactors:
pat = re.compile(r'\b%s\b' % formf.name)
self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
def compute_routine(self, mode, tag=[], factorize=True):
"""compute the expression and return it"""
self.outgoing = mode
self.tag = tag
if __debug__:
if mode == 0:
assert not any(t.startswith('L') for t in tag)
self.expr = self.compute_aloha_high_kernel(mode, factorize)
return self.define_simple_output()
def define_all_conjugate_builder(self, pair_list):
""" return the full set of AbstractRoutineBuilder linked to fermion
clash"""
solution = []
for i, pair in enumerate(pair_list):
new_builder = self.define_conjugate_builder(pair)
solution.append(new_builder)
solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
return solution
def define_conjugate_builder(self, pairs=1):
""" return a AbstractRoutineBuilder for the conjugate operation.
If they are more than one pair of fermion. Then use pair to claim which
one is conjugated"""
new_builder = copy.copy(self)
new_builder.conjg = self.conjg[:]
try:
for index in pairs:
new_builder.apply_conjugation(index)
except TypeError:
new_builder.apply_conjugation(pairs)
return new_builder
def apply_conjugation(self, pair=1):
""" apply conjugation on self object"""
nb_fermion = len([1 for s in self.spins if s % 2 == 0])
if isinstance(pair, tuple):
if len(pair) ==1 :
pair = pair[0]
else:
raise Exception
if (pair > 1 or nb_fermion >2) and not self.conjg:
# self.conjg avoif multiple check
data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
if not data == target:
text = """Unable to deal with 4(or more) point interactions
in presence of majorana particle/flow violation"""
raise ALOHAERROR(text)
old_id = 2 * pair - 1
new_id = _conjugate_gap + old_id
self.kernel_tag = set()
aloha_lib.KERNEL.use_tag = set()
if not self.routine_kernel or isinstance(self.routine_kernel, str):
self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
self.kernel_tag = aloha_lib.KERNEL.use_tag
# We need to compute C Gamma^T C^-1 = C_ab G_cb (-1) C_cd
# = C_ac G_bc (-1) C_bd = C_ac G_bc C_db
self.routine_kernel = \
C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
(new_id, old_id + 1, new_id + 1, old_id )
self.conjg.append(pair)
def define_simple_output(self):
""" define a simple output for this AbstractRoutine """
infostr = str(self.lorentz_expr)
output = AbstractRoutine(self.expr, self.outgoing, self.spins, self.name, \
infostr, self.denominator)
output.contracted = dict([(name, aloha_lib.KERNEL.reduced_expr2[name])
for name in aloha_lib.KERNEL.use_tag
if name.startswith('TMP')])
output.fct = dict([(name, aloha_lib.KERNEL.reduced_expr2[name])
for name in aloha_lib.KERNEL.use_tag
if name.startswith('FCT')])
output.tag = [t for t in self.tag if not t.startswith('C')]
output.tag += ['C%s' % pair for pair in self.conjg]
return output
def parse_expression(self, expr=None, need_P_sign=False):
"""change the sign of P for outcoming fermion in order to
correct the mismatch convention between HELAS and FR"""
if not expr:
expr = self.lorentz_expr
if need_P_sign:
expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
calc = aloha_parsers.ALOHAExpressionParser()
lorentz_expr = calc.parse(expr)
return lorentz_expr
def compute_aloha_high_kernel(self, mode, factorize=True):
"""compute the abstract routine associate to this mode """
# reset tag for particles
aloha_lib.KERNEL.use_tag=set()
#multiply by the wave functions
nb_spinor = 0
outgoing = self.outgoing
if (outgoing + 1) // 2 in self.conjg:
#flip the outgoing tag if in conjugate
outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
if not self.routine_kernel:
AbstractRoutineBuilder.counter += 1
if self.tag == []:
logger.info('aloha creates %s routines' % self.name)
elif AbstractALOHAModel.lastprint < time.time() - 1:
AbstractALOHAModel.lastprint = time.time()
logger.info('aloha creates %s set of routines with options: %s' \
% (self.name, ','.join(self.tag)) )
try:
lorentz = self.parse_expression()
self.routine_kernel = lorentz
lorentz = eval(lorentz)
except NameError as error:
logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
raise ALOHAERROR('unknow type in Lorentz Evaluation: %s ' % str(error))
else:
self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
elif isinstance(self.routine_kernel,str):
lorentz = eval(self.routine_kernel)
aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
else:
lorentz = copy.copy(self.routine_kernel)
aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
for (i, spin ) in enumerate(self.spins):
id = i + 1
#Check if this is the outgoing particle
if id == outgoing:
# check if we need a special propagator
propa = [t[1:] for t in self.tag if t.startswith('P')]
if propa == ['0']:
if spin == 3 and aloha.unitary_gauge == 2:
misc.sprint(spin)
lorentz *= complex(0,1) * self.get_custom_propa('1PS', spin, id)
continue
else:
massless = True
self.denominator = None
elif propa == []:
massless = False
self.denominator = None
else:
lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
continue
if spin in [1,-1]:
lorentz *= complex(0,1)
elif spin == 2:
# shift and flip the tag if we multiply by C matrices
if (id + 1) // 2 in self.conjg:
id += _conjugate_gap + id % 2 - (id +1) % 2
if (id % 2):
#propagator outcoming
lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
else:
# #propagator incoming
lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
elif spin == 3 :
if massless or not aloha.unitary_gauge:
lorentz *= VectorPropagatorMassless(id, 'I2', id)
else:
lorentz *= VectorPropagator(id, 'I2', id)
elif spin == 4:
# shift and flip the tag if we multiply by C matrices
if (id + 1) // 2 in self.conjg:
spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
else:
spin_id = id
nb_spinor += 1
if not massless and (spin_id % 2):
lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
elif not massless and not (spin_id % 2):
lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
elif spin_id %2:
lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
else :
lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
elif spin == 5 :
#lorentz *= 1 # delayed evaluation (fastenize the code)
if massless:
lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
2 * _spin2_mult + id,'I2','I3')
else:
lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
2 * _spin2_mult + id,'I2','I3', id)
else:
raise self.AbstractALOHAError(
'The spin value %s (2s+1) is not supported yet' % spin)
else:
# This is an incoming particle
if spin in [1,-1]:
lorentz *= Scalar(id)
elif spin == 2:
# shift the tag if we multiply by C matrices
if (id+1) // 2 in self.conjg:
spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
else:
spin_id = id
lorentz *= Spinor(spin_id, id)
elif spin == 3:
lorentz *= Vector(id, id)
elif spin == 4:
# shift the tag if we multiply by C matrices
if (id+1) // 2 in self.conjg:
spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
else:
spin_id = id
nb_spinor += 1
lorentz *= Spin3Half(id, spin_id, id)
elif spin == 5:
lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
else:
raise self.AbstractALOHAError(
'The spin value %s (2s+1) is not supported yet' % spin)
# If no particle OffShell
if not outgoing:
lorentz *= complex(0,-1)
# Propagator are taken care separately
lorentz = lorentz.simplify()
# Modify the expression in case of loop-pozzorini
if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
return self.compute_loop_coefficient(lorentz, outgoing)
lorentz = lorentz.expand()
lorentz = lorentz.simplify()
if factorize:
lorentz = lorentz.factorize()
lorentz.tag = set(aloha_lib.KERNEL.use_tag)
return lorentz
@staticmethod
def mod_propagator_expression(tag, text):
"""Change the index of the propagator to match the current need"""
data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text)
to_change = {}
for old, new in tag.items():
if isinstance(new, str):
new='\'%s\'' % new
else:
new = str(new)
to_change[r'%s' % old] = new
pos=-2
while pos +3 < len(data):
pos = pos+3
ltype = data[pos]
if ltype != 'complex':
data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change),
lambda x: to_change[x.group()], data[pos+1])
data[pos+1] = '(%s)' % data[pos+1]
text=''.join(data)
return text
def get_custom_propa(self, propa, spin, id):
"""Return the ALOHA object associated to the user define propagator"""
if not propa.startswith('1'):
propagator = getattr(self.model.propagators, propa)
numerator = propagator.numerator
denominator = propagator.denominator
elif propa == "1L":
numerator = "EPSL(1,id) * EPSL(2,id)"
denominator = "-1*PVec(-2,id)*PVec(-2,id)*P(-3,id)*P(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
elif propa == "1T":
numerator = "-1*PVec(-2,id)*PVec(-2,id) * EPST2(1,id)*EPST2(2,id) + EPST1(1,id)*EPST1(2,id)"
denominator = "PVec(-2,id)*PVec(-2,id) * PT(-3,id)*PT(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
elif propa == "1A":
numerator = "(P(-2,id)**2 - Mass(id)**2) * P(1,id) * P(2,id)"
denominator = "P(-2,id)**2 * Mass(id)**2 * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
elif propa in ["1P"]:
# shift and flip the tag if we multiply by C matrices
spin_id = id
if (id + 1) // 2 in self.conjg:
spin_id += _conjugate_gap + id % 2 - (id +1) % 2
if (spin_id % 2):
numerator = "UFP(1,id)*UFPC(2,id)"
else:
numerator = "VFP(1,id)*VFPC(2,id)"
denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
elif propa == "1M":
# shift and flip the tag if we multiply by C matrices
spin_id = id
if (id + 1) // 2 in self.conjg:
spin_id += _conjugate_gap + id % 2 - (id +1) % 2
if (spin_id % 2):
numerator = "UFM(1,id)*UFMC(2,id)"
else:
numerator = "VFM(1,id)*VFMC(2,id)"
denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
elif propa == "1PS":
numerator = "(-1*(P(-1,id)*PBar(-1,id)) * Metric(1, 2) + P(1,id)*PBar(2,id) + PBar(1,id)*P(2,id))"
denominator = "(P(-3,id)*PBar(-3,id))*P(-2,id)**2"
else:
raise Exception
# Find how to make the replacement for the various tag in the propagator expression
needPflipping = False
if spin in [1,-1]:
tag = {'id': id}
elif spin == 2:
# shift and flip the tag if we multiply by C matrices
if (id + 1) // 2 in self.conjg:
spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
else:
spin_id = id
if (spin_id % 2):
#propagator outcoming
needPflipping = True
tag ={'1': spin_id, '2': 'I2', 'id': id}
else:
tag ={'1': 'I2', '2': spin_id, 'id': id}
elif spin == 3 :
tag ={'1': id, '2': 'I2', 'id': id}
elif spin == 4:
delta = lambda i,j: aloha_object.Identity(i,j)
deltaL = lambda i,j: aloha_object.IdentityL(i,j)
# shift and flip the tag if we multiply by C matrices
if (id + 1) // 2 in self.conjg:
spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
else:
spin_id = id
tag = {'1': 'pr1', '2': 'pr2', 'id':id}
if spin_id % 2:
needPflipping = True
# propaR is needed to do the correct contraction since we need to distinguish spin from lorentz index
propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3')
else:
propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3')
#numerator += "*deltaL('pr_1',id) * deltaL('pr_2', 'I2') * delta('pr_1', spin_id) * delta('pr_2', 'I3')"
elif spin == 5 :
tag = {'1': _spin2_mult + id, '2': 'I2',
'51': 2 * _spin2_mult + id, '52': 'I3', 'id':id}
numerator = self.mod_propagator_expression(tag, numerator)
if denominator:
denominator = self.mod_propagator_expression(tag, denominator)
numerator = self.parse_expression(numerator, needPflipping)
if denominator:
self.denominator = self.parse_expression(denominator, needPflipping)
self.denominator = eval(self.denominator)
if not isinstance(self.denominator, numbers.Number):
self.denominator = self.denominator.simplify().expand().simplify().get((0,))
needPflipping = False
if spin ==4:
return eval(numerator) * propaR
else:
return eval(numerator)
def compute_loop_coefficient(self, lorentz, outgoing):
l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
if (l_in + 1) // 2 in self.conjg:
#flip the outgoing tag if in conjugate
l_in = l_in + l_in % 2 - (l_in +1) % 2
assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
# modify the expression for the momenta
# P_i -> P_i + P_L and P_o -> -P_o - P_L
Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
if P.startswith('_P')]
Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
for P in Pdep:
if P.particle == l_in:
sign = 1
else:
sign = -1
id = P.id
lorentz_ind = P.lorentz_ind[0]
P_Lid = aloha_object.P(lorentz_ind, 'L')
P_obj = aloha_object.P(lorentz_ind, P.particle)
new_expr = sign*(P_Lid + P_obj)
lorentz = lorentz.replace(id, new_expr)
# Compute the variable from which we need to split the expression
var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
size = aloha_writers.WriteALOHA.type_to_size[spin]-1
var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
# compute their unique identifiant
veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
lorentz = lorentz.expand(veto = veto_ids)
lorentz = lorentz.simplify()
coeff_expr = lorentz.split(veto_ids)
for key, expr in coeff_expr.items():
expr = expr.simplify()
coeff_expr[key] = expr.factorize()
coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
return coeff_expr
def define_lorentz_expr(self, lorentz_expr):
"""Define the expression"""
self.expr = lorentz_expr
def define_routine_kernel(self, lorentz=None):
"""Define the kernel at low level"""
if not lorentz:
logger.info('compute kernel %s' % self.counter)
AbstractRoutineBuilder.counter += 1
lorentz = eval(self.lorentz_expr)
if isinstance(lorentz, numbers.Number):
self.routine_kernel = lorentz
return lorentz
lorentz = lorentz.simplify()
lorentz = lorentz.expand()
lorentz = lorentz.simplify()
self.routine_kernel = lorentz
return lorentz
@staticmethod
def get_routine_name(name, outgoing):
"""return the name of the """
name = '%s_%s' % (name, outgoing)
return name
@classmethod
def load_library(cls, tag):
# load the library
if tag in cls.prop_lib:
return
else:
cls.prop_lib = create_prop_library(tag, cls.aloha_lib)
class CombineRoutineBuilder(AbstractRoutineBuilder):
"""A special builder for combine routine if needed to write those
explicitely.
"""
def __init__(self, l_lorentz, model=None):
""" initialize the run
l_lorentz: list of lorentz information analyzed (UFO format)
language: define in which language we write the output
modes: 0 for all incoming particles
>0 defines the outgoing part (start to count at 1)
"""
AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
lorentz = l_lorentz[0]
self.spins = lorentz.spins
l_name = [l.name for l in l_lorentz]
self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
self.conjg = []
self.tag = []
self.outgoing = None
self.lorentz_expr = []
for i, lor in enumerate(l_lorentz):
self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
self.lorentz_expr = ' + '.join(self.lorentz_expr)
self.routine_kernel = None
self.contracted = {}
self.fct = {}
class AbstractALOHAModel(dict):
""" A class to build and store the full set of Abstract ALOHA Routine"""
lastprint = 0
def __init__(self, model_name, write_dir=None, format='Fortran',
explicit_combine=False):
""" load the UFO model and init the dictionary """
# Option
self.explicit_combine = explicit_combine
# Extract the model name if combined with restriction
model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
model_name_re = model_name_pattern.match(model_name)
if model_name_re:
name = model_name_re.group('name')
rest = model_name_re.group("rest")
if rest == 'full' or \
os.path.isfile(os.path.join(root_path, "models", name,
"restrict_%s.dat" % rest)):
model_name = model_name_re.group("name")
# load the UFO model
self.model = models.load_model(model_name)
#
# try:
# python_pos = model_name
# __import__(python_pos)
# except Exception:
# python_pos = 'models.%s' % model_name
# __import__(python_pos)
# self.model = sys.modules[python_pos]
# find the position on the disk
self.model_pos = os.path.dirname(self.model.__file__)
# list the external routine
self.external_routines = []
# init the dictionary
dict.__init__(self)
self.symmetries = {}
self.multiple_lor = {}
if write_dir:
self.main(write_dir,format=format)
def main(self, output_dir, format='Fortran'):
""" Compute if not already compute.
Write file in models/MY_MODEL/MY_FORMAT.
copy the file to output_dir
"""
ext = {'Fortran':'f','Python':'py','CPP':'h'}
# Check if a pickle file exists
if not self.load():
self.compute_all()
logger.info(' %s aloha routine' % len(self))
# Check that output directory exists
if not output_dir:
output_dir = os.path.join(self.model_pos, format.lower())
logger.debug('aloha output dir is %s' % output_dir)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# Check that all routine are generated at default places:
for (name, outgoing), abstract in self.items():
routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
abstract.write(output_dir, format)
else:
logger.info('File for %s already present, skip the writing of this file' % routine_name)
def save(self, filepos=None):
""" save the current model in a pkl file """
logger.info('save the aloha abstract routine in a pickle file')
if not filepos:
filepos = os.path.join(self.model_pos,'aloha.pkl')
fsock = open(filepos, 'w')
t=dict(self)
try:
six.moves.cPickle.dump(dict(self), fsock)
except:
logger.info('aloha not saved')
def load(self, filepos=None):
""" reload the pickle file """
return False
if not filepos:
filepos = os.path.join(self.model_pos,'aloha.pkl')
if os.path.exists(filepos):
fsock = open(filepos, 'r')
self.update(six.moves.cPickle.load(fsock))
return True
else:
return False
def get(self, lorentzname, outgoing):
""" return the AbstractRoutine with a given lorentz name, and for a given
outgoing particle """
try:
return self[(lorentzname, outgoing)]
except Exception:
logger.warning('(%s, %s) is not a valid key' %
(lorentzname, outgoing) )
return None
def get_info(self, info, lorentzname, outgoing, tag, cached=False):
"""return some information about the aloha routine
- "rank": return the rank of the loop function
If the cached option is set to true, then the result is stored and
recycled if possible.
"""
if not aloha.loop_mode and any(t.startswith('L') for t in tag):
aloha.loop_mode = True
returned_dict = {}
# Make sure the input argument is a list
if isinstance(info, str):
infos = [info]
else:
infos = info
# First deal with the caching of infos
if hasattr(self, 'cached_interaction_infos'):
# Now try to recover it
for info_key in infos:
try:
returned_dict[info] = self.cached_interaction_infos[\
(lorentzname,outgoing,tuple(tag),info)]
except KeyError:
# Some information has never been computed before, so they
# will be computed later.
pass
elif cached:
self.cached_interaction_infos = {}
init = False
for info_key in infos:
if info_key in returned_dict:
continue
elif not init:
# need to create the aloha object
lorentz = eval('self.model.lorentz.%s' % lorentzname)
abstract = AbstractRoutineBuilder(lorentz)
routine = abstract.compute_routine(outgoing, tag, factorize=False)
init = True
assert 'routine' in locals()
returned_dict[info_key] = routine.get_info(info_key)
if cached:
# Cache the information computed
self.cached_interaction_infos[\
(lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
if isinstance(info, str):
return returned_dict[info]
else:
return returned_dict
def set(self, lorentzname, outgoing, abstract_routine):
""" add in the dictionary """
self[(lorentzname, outgoing)] = abstract_routine
def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
""" define all the AbstractRoutine linked to a model """
# Search identical particles in the vertices in order to avoid
#to compute identical contribution
self.look_for_symmetries()
conjugate_list = self.look_for_conjugate()
self.look_for_multiple_lorentz_interactions()
if not wanted_lorentz:
wanted_lorentz = [l.name for l in self.model.all_lorentz]
for lorentz in self.model.all_lorentz:
if not lorentz.name in wanted_lorentz:
# Only include the routines we ask for
continue
if -1 in lorentz.spins:
# No Ghost in ALOHA
continue
if lorentz.structure == 'external':
for i in range(len(lorentz.spins)):
self.external_routines.append('%s_%s' % (lorentz.name, i))
continue
#standard routines
routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
# search for special propagators
if custom_propa:
for vertex in self.model.all_vertices:
if lorentz in vertex.lorentz:
for i,part in enumerate(vertex.particles):
new_prop = False
if hasattr(part, 'propagator') and part.propagator:
new_prop = ['P%s' % part.propagator.name]
elif part.mass.name.lower() == 'zero':
new_prop = ['P0']
if new_prop and (i+1, new_prop) not in routines:
routines.append((i+1, new_prop))
builder = AbstractRoutineBuilder(lorentz, self.model)
self.compute_aloha(builder, routines=routines)
if lorentz.name in self.multiple_lor:
for m in self.multiple_lor[lorentz.name]:
for outgoing in range(len(lorentz.spins)+1):
try:
self[(lorentz.name, outgoing)].add_combine(m)
except Exception:
pass # this routine is a symmetric one, so it
# already has the combination.
if lorentz.name in conjugate_list:
conjg_builder_list= builder.define_all_conjugate_builder(\
conjugate_list[lorentz.name])
for conjg_builder in conjg_builder_list:
# No duplication of conjugation:
assert conjg_builder_list.count(conjg_builder) == 1
self.compute_aloha(conjg_builder, lorentz.name)
if lorentz.name in self.multiple_lor:
for m in self.multiple_lor[lorentz.name]:
for outgoing in range(len(lorentz.spins)+1):
realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
try:
self[(realname, outgoing)].add_combine(m)
except Exception as error:
self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
if save:
self.save()
def add_Lorentz_object(self, lorentzlist):
"""add a series of Lorentz structure created dynamically"""
for lor in lorentzlist:
if not hasattr(self.model.lorentz, lor.name):
setattr(self.model.lorentz, lor.name, lor)
def compute_subset(self, data):
""" create the requested ALOHA routine.
data should be a list of tuple (lorentz, tag, outgoing)
tag should be the list of special tag (like conjugation on pair)
to apply on the object """
# Search identical particles in the vertices in order to avoid
#to compute identical contribution
self.look_for_symmetries()
# reorganize the data (in order to use optimization for a given lorentz
#structure
aloha.loop_mode = False
# self.explicit_combine = False
request = {}
for list_l_name, tag, outgoing in data:
#allow tag to have integer for retro-compatibility
all_tag = tag[:]
conjugate = [i for i in tag if isinstance(i, int)]
tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
tag = tag + ['C%s'%i for i in conjugate]
tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
conjugate = tuple([int(float(c[1:])) for c in tag if c.startswith('C')])
loop = any((t.startswith('L') for t in tag))
if loop:
aloha.loop_mode = True
self.explicit_combine = True
for l_name in list_l_name:
try:
request[l_name][conjugate].append((outgoing,tag))
except Exception:
try:
request[l_name][conjugate] = [(outgoing,tag)]
except Exception:
request[l_name] = {conjugate: [(outgoing,tag)]}
# Loop on the structure to build exactly what is request
for l_name in request:
lorentz = eval('self.model.lorentz.%s' % l_name)
if lorentz.structure == 'external':
for tmp in request[l_name]:
for outgoing, tag in request[l_name][tmp]:
name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
if name not in self.external_routines:
self.external_routines.append(name)
continue
builder = AbstractRoutineBuilder(lorentz, self.model)
for conjg in request[l_name]:
#ensure that routines are in rising order (for symetries)
def sorting(a,b):
if a[0] < b[0]: return -1
else: return 1
routines = request[l_name][conjg]
routines.sort(key=misc.cmp_to_key(sorting))
if not conjg:
# No need to conjugate -> compute directly
self.compute_aloha(builder, routines=routines)
else:
# Define the high level conjugate routine
conjg_builder = builder.define_conjugate_builder(conjg)
# Compute routines
self.compute_aloha(conjg_builder, symmetry=lorentz.name,
routines=routines)
# Build mutiple lorentz call
for list_l_name, tag, outgoing in data:
if len(list_l_name) ==1:
continue
#allow tag to have integer for retrocompatibility
conjugate = [i for i in tag if isinstance(i, int)]
all_tag = tag[:]
tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
tag = tag + ['C%s'%i for i in conjugate]
tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
if not self.explicit_combine:
lorentzname = list_l_name[0]
lorentzname += ''.join(tag)
if (lorentzname, outgoing) in self:
self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
else:
lorentz = eval('self.model.lorentz.%s' % list_l_name[0])
assert lorentz.structure == 'external'
else:
l_lorentz = []
for l_name in list_l_name:
l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
builder = CombineRoutineBuilder(l_lorentz)
for conjg in request[list_l_name[0]]:
#ensure that routines are in rising order (for symetries)
def sorting(a,b):
if a[0] < b[0]: return -1
else: return 1
routines = request[list_l_name[0]][conjg]
routines.sort(key=operator.itemgetter(0))
if not conjg:
# No need to conjugate -> compute directly
self.compute_aloha(builder, routines=routines)
else:
# Define the high level conjugate routine
conjg_builder = builder.define_conjugate_builder(conjg)
# Compute routines
self.compute_aloha(conjg_builder, symmetry=lorentz.name,
routines=routines)
def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
""" define all the AbstractRoutine linked to a given lorentz structure
symmetry authorizes to use the symmetry of anoter lorentz structure.
routines to define only a subset of the routines."""
name = builder.name
if not symmetry:
symmetry = name
if not routines:
if not tag:
tag = ['C%s' % i for i in builder.conjg]
else:
addon = ['C%s' % i for i in builder.conjg]
tag = [(i,addon +onetag) for i,onetag in tag]
routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
# Create the routines
for outgoing, tag in routines:
symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
realname = name + ''.join(tag)
if (realname, outgoing) in self:
continue # already computed
if symmetric:
self.get(realname, symmetric).add_symmetry(outgoing)
else:
wavefunction = builder.compute_routine(outgoing, tag)
#Store the information
self.set(realname, outgoing, wavefunction)
def compute_aloha_without_kernel(self, builder, symmetry=None, routines=None):
"""define all the AbstractRoutine linked to a given lorentz structure
symmetry authorizes to use the symmetry of anoter lorentz structure.
routines to define only a subset of the routines.
Compare to compute_aloha, each routines are computed independently.
"""
name = builder.name
if not routines:
routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
for outgoing, tag in routines:
builder.routine_kernel = None
wavefunction = builder.compute_routine(outgoing, tag)
self.set(name, outgoing, wavefunction)
def write(self, output_dir, language):
""" write the full set of Helicity Routine in output_dir"""
for abstract_routine in self.values():
abstract_routine.write(output_dir, language)
for routine in self.external_routines:
self.locate_external(routine, language, output_dir)
# if aloha_lib.KERNEL.unknow_fct:
# if language == 'Fortran':
# logger.warning('''Some function present in the lorentz structure are not
# recognized. A Template file has been created:
# %s
# Please edit this file to include the associated definition.''' % \
# pjoin(output_dir, 'additional_aloha_function.f') )
# else:
# logger.warning('''Some function present in the lorentz structure are
# not recognized. Please edit the code to add the defnition of such function.''')
# logger.info('list of missing fct: %s .' % \
# ','.join([a[0] for a in aloha_lib.KERNEL.unknow_fct]))
#
# for fct_name, nb_arg in aloha_lib.KERNEL.unknow_fct:
# if language == 'Fortran':
# aloha_writers.write_template_fct(fct_name, nb_arg, output_dir)
#self.write_aloha_file_inc(output_dir)
def locate_external(self, name, language, output_dir=None):
"""search a valid external file and copy it to output_dir directory"""
language_to_ext = {'Python': 'py',
'Fortran' : 'f',
'CPP': 'C'}
ext = language_to_ext[language]
paths = [os.path.join(self.model_pos, language), self.model_pos,
os.path.join(root_path, 'aloha', 'template_files', )]
ext_files = []
for path in paths:
ext_files = misc.glob('%s.%s' % (name, ext), path)
if ext_files:
break
else:
raise ALOHAERROR('No external routine \"%s.%s\" in directories\n %s' % \
(name, ext, '\n'.join(paths)))
if output_dir:
for filepath in ext_files:
files.cp(filepath, output_dir)
return ext_files
def look_for_symmetries(self):
"""Search some symmetries in the vertices.
We search if some identical particles are in a vertices in order
to avoid to compute symmetrical contributions"""
for vertex in self.model.all_vertices:
for i, part1 in enumerate(vertex.particles):
for j in range(i-1,-1,-1):
part2 = vertex.particles[j]
if part1.pdg_code == part2.pdg_code and part1.color == 1:
if part1.spin == 2 and (i % 2 != j % 2 ):
continue
for lorentz in vertex.lorentz:
if lorentz.name in self.symmetries:
if i+1 in self.symmetries[lorentz.name]:
self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
else:
self.symmetries[lorentz.name][i+1] = j+1
else:
self.symmetries[lorentz.name] = {i+1:j+1}
break
def look_for_multiple_lorentz_interactions(self):
"""Search the interaction associate with more than one lorentz structure.
If those lorentz structure have the same order and the same color then
associate a multiple lorentz routines to ALOHA """
orders = {}
for coup in self.model.all_couplings:
orders[coup.name] = str(coup.order)
for vertex in self.model.all_vertices:
if len(vertex.lorentz) == 1:
continue
#remove ghost
#if -1 in vertex.lorentz[0].spins:
# continue
# assign each order/color to a set of lorentz routine
combine = {}
for (id_col, id_lor), coups in vertex.couplings.items():
if not isinstance(coups, list):
coups = [coups]
for coup in coups:
order = orders[coup.name]
key = (id_col, order)
if key in combine:
combine[key].append(id_lor)
else:
combine[key] = [id_lor]
# Check if more than one routine are associated
for list_lor in combine.values():
if len(list_lor) == 1:
continue
list_lor.sort()
main = vertex.lorentz[list_lor[0]].name
if main not in self.multiple_lor:
self.multiple_lor[main] = []
info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
if info not in self.multiple_lor[main]:
self.multiple_lor[main].append(info)
def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
""" This returns out if no symmetries are available, otherwise it finds
the lowest equivalent outgoing by recursivally calling this function.
auth is a list of authorize output, if define"""
try:
equiv = self.symmetries[l_name][outgoing]
except Exception:
return out
else:
if not valid_output or equiv in valid_output:
return self.has_symmetries(l_name, equiv, out=equiv,
valid_output=valid_output)
else:
return self.has_symmetries(l_name, equiv, out=out,
valid_output=valid_output)
def look_for_conjugate(self):
""" create a list for the routine needing to be conjugate """
# Check if they are majorana in the model.
need = False
for particle in self.model.all_particles:
if particle.spin == 2 and particle.selfconjugate:
need = True
break
if not need:
for interaction in self.model.all_vertices:
fermions = [p for p in interaction.particles if p.spin == 2]
for i in range(0, len(fermions), 2):
if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
# This is a fermion flow violating interaction
need = True
break
# No majorana particles
if not need:
return {}
conjugate_request = {}
# Check each vertex if they are fermion and/or majorana
for vertex in self.model.all_vertices:
for i in range(0, len(vertex.particles), 2):
part1 = vertex.particles[i]
if part1.spin !=2:
# deal only with fermion
break
# check if this pair contains a majorana
if part1.selfconjugate:
continue
part2 = vertex.particles[i + 1]
if part2.selfconjugate:
continue
# No majorana => add the associate lorentz structure
for lorentz in vertex.lorentz:
try:
conjugate_request[lorentz.name].add(i//2+1)
except Exception:
conjugate_request[lorentz.name] = set([i//2+1])
for elem in conjugate_request:
conjugate_request[elem] = list(conjugate_request[elem])
return conjugate_request
def write_aloha_file_inc(aloha_dir,file_ext, comp_ext):
"""find the list of Helicity routine in the directory and create a list
of those files (but with compile extension)"""
aloha_files = []
# Identify the valid files
alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
for filename in os.listdir(aloha_dir):
if os.path.isfile(os.path.join(aloha_dir, filename)):
if alohafile_pattern.search(filename):
aloha_files.append(filename.replace(file_ext, comp_ext))
if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
aloha_files.append('additional_aloha_function.o')
text="ALOHARoutine = "
text += ' '.join(aloha_files)
text +='\n'
open(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
def create_prop_library(tag, lib={}):
def create(obj):
""" """
obj= obj.simplify()
obj = obj.expand()
obj = obj.simplify()
return obj
# avoid to add tag in global
old_tag = set(aloha_lib.KERNEL.use_tag)
name, i = tag
if name == "Spin2Prop":
lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
2 * _spin2_mult + i,'I2','I3', i) )
elif name == "Spin2PropMassless":
lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
_spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
aloha_lib.KERNEL.use_tag = old_tag
return lib
if '__main__' == __name__:
logging.basicConfig(level=0)
#create_library()
import profile
#model
start = time.time()
def main():
alohagenerator = AbstractALOHAModel('sm')
alohagenerator.compute_all(save=False)
return alohagenerator
def write(alohagenerator):
alohagenerator.write('/tmp/', 'Python')
alohagenerator = main()
logger.info('done in %s s' % (time.time()-start))
write(alohagenerator)
#profile.run('main()')
#profile.run('write(alohagenerator)')
stop = time.time()
logger.info('done in %s s' % (stop-start))
| 41.14574
| 187
| 0.52553
|
3157d39e92ee8f5242c5ce42d5c532828c792063
| 10,736
|
py
|
Python
|
sdk/python/pulumi_azure_native/recoveryservices/v20180110/replicationv_center.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/recoveryservices/v20180110/replicationv_center.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/recoveryservices/v20180110/replicationv_center.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ReplicationvCenterArgs', 'ReplicationvCenter']
@pulumi.input_type
class ReplicationvCenterArgs:
def __init__(__self__, *,
fabric_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
resource_name: pulumi.Input[str],
properties: Optional[pulumi.Input['AddVCenterRequestPropertiesArgs']] = None,
v_center_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ReplicationvCenter resource.
:param pulumi.Input[str] fabric_name: Fabric name.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the recovery services vault is present.
:param pulumi.Input[str] resource_name: The name of the recovery services vault.
:param pulumi.Input['AddVCenterRequestPropertiesArgs'] properties: The properties of an add vCenter request.
:param pulumi.Input[str] v_center_name: vCenter name.
"""
pulumi.set(__self__, "fabric_name", fabric_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "resource_name", resource_name)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if v_center_name is not None:
pulumi.set(__self__, "v_center_name", v_center_name)
@property
@pulumi.getter(name="fabricName")
def fabric_name(self) -> pulumi.Input[str]:
"""
Fabric name.
"""
return pulumi.get(self, "fabric_name")
@fabric_name.setter
def fabric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "fabric_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group where the recovery services vault is present.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> pulumi.Input[str]:
"""
The name of the recovery services vault.
"""
return pulumi.get(self, "resource_name")
@resource_name.setter
def resource_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_name", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['AddVCenterRequestPropertiesArgs']]:
"""
The properties of an add vCenter request.
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['AddVCenterRequestPropertiesArgs']]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter(name="vCenterName")
def v_center_name(self) -> Optional[pulumi.Input[str]]:
"""
vCenter name.
"""
return pulumi.get(self, "v_center_name")
@v_center_name.setter
def v_center_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "v_center_name", value)
class ReplicationvCenter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
fabric_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['AddVCenterRequestPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
v_center_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
vCenter definition.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] fabric_name: Fabric name.
:param pulumi.Input[pulumi.InputType['AddVCenterRequestPropertiesArgs']] properties: The properties of an add vCenter request.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the recovery services vault is present.
:param pulumi.Input[str] resource_name_: The name of the recovery services vault.
:param pulumi.Input[str] v_center_name: vCenter name.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ReplicationvCenterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
vCenter definition.
:param str resource_name: The name of the resource.
:param ReplicationvCenterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ReplicationvCenterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
fabric_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['AddVCenterRequestPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
v_center_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ReplicationvCenterArgs.__new__(ReplicationvCenterArgs)
if fabric_name is None and not opts.urn:
raise TypeError("Missing required property 'fabric_name'")
__props__.__dict__["fabric_name"] = fabric_name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if resource_name_ is None and not opts.urn:
raise TypeError("Missing required property 'resource_name_'")
__props__.__dict__["resource_name"] = resource_name_
__props__.__dict__["v_center_name"] = v_center_name
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:recoveryservices/v20180110:ReplicationvCenter"), pulumi.Alias(type_="azure-native:recoveryservices:ReplicationvCenter"), pulumi.Alias(type_="azure-nextgen:recoveryservices:ReplicationvCenter"), pulumi.Alias(type_="azure-native:recoveryservices/v20160810:ReplicationvCenter"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20160810:ReplicationvCenter"), pulumi.Alias(type_="azure-native:recoveryservices/v20180710:ReplicationvCenter"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20180710:ReplicationvCenter"), pulumi.Alias(type_="azure-native:recoveryservices/v20210210:ReplicationvCenter"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210210:ReplicationvCenter"), pulumi.Alias(type_="azure-native:recoveryservices/v20210301:ReplicationvCenter"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210301:ReplicationvCenter")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ReplicationvCenter, __self__).__init__(
'azure-native:recoveryservices/v20180110:ReplicationvCenter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ReplicationvCenter':
"""
Get an existing ReplicationvCenter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ReplicationvCenterArgs.__new__(ReplicationvCenterArgs)
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return ReplicationvCenter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource Location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.VCenterPropertiesResponse']:
"""
VCenter related data.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource Type
"""
return pulumi.get(self, "type")
| 44.920502
| 941
| 0.663934
|
97bcd2a07a3d6d7c99e27f61c0956675eb489773
| 849
|
py
|
Python
|
multivariate_time_series_rnn/scripts/train.py
|
xdr940/huawei2021
|
4d51f4b15bf152a6a7c75c9724d9414df3b9b636
|
[
"MIT"
] | 1
|
2021-12-10T10:14:55.000Z
|
2021-12-10T10:14:55.000Z
|
multivariate_time_series_rnn/scripts/train.py
|
xdr940/huawei2021
|
4d51f4b15bf152a6a7c75c9724d9414df3b9b636
|
[
"MIT"
] | null | null | null |
multivariate_time_series_rnn/scripts/train.py
|
xdr940/huawei2021
|
4d51f4b15bf152a6a7c75c9724d9414df3b9b636
|
[
"MIT"
] | null | null | null |
# Copyright Niantic 2019. Patent Pending. All rights reserved.
#
# This software is licensed under the terms of the Monodepth2 licence
# which allows for non-commercial use only, the full terms of which are made
# available in the LICENSE file.
from __future__ import absolute_import, division, print_function
from utils.yaml_wrapper import YamlHandler
import argparse
from my_trainer import Trainer
def main(args):
opts = YamlHandler(args.settings).read_yaml()
trainer = Trainer(opts,settings=args.settings)#after decoder the yaml file, send the filename to trainer for checkpoints saving
trainer(opts)
print('training over')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="DeepSfM")
parser.add_argument("--settings", default='./opts/ctmnt.yaml')
args = parser.parse_args()
main(args)
| 32.653846
| 131
| 0.760895
|
d801cf25f20b2f00b13123568eb75064077da80e
| 887
|
py
|
Python
|
nilearn/plotting/tests/test_img_plotting/test_plot_prob_atlas.py
|
ctw/nilearn
|
932eee9c69cd8fbf40ee6af5cee77f8f93b25da3
|
[
"BSD-2-Clause"
] | 827
|
2015-01-30T23:11:42.000Z
|
2022-03-29T21:21:05.000Z
|
nilearn/plotting/tests/test_img_plotting/test_plot_prob_atlas.py
|
ctw/nilearn
|
932eee9c69cd8fbf40ee6af5cee77f8f93b25da3
|
[
"BSD-2-Clause"
] | 2,845
|
2015-01-04T22:14:41.000Z
|
2022-03-31T20:28:09.000Z
|
nilearn/plotting/tests/test_img_plotting/test_plot_prob_atlas.py
|
ctw/nilearn
|
932eee9c69cd8fbf40ee6af5cee77f8f93b25da3
|
[
"BSD-2-Clause"
] | 484
|
2015-02-03T10:58:19.000Z
|
2022-03-29T21:57:16.000Z
|
"""Tests for :func:`nilearn.plotting.plot_prob_atlas`."""
import pytest
import numpy as np
import matplotlib.pyplot as plt
from nibabel import Nifti1Image
from nilearn.plotting import plot_prob_atlas
@pytest.mark.parametrize("params",
[{"view_type": 'contours'},
{"view_type": 'filled_contours', "threshold": .2},
{"view_type": "continuous"},
{"view_type": 'filled_contours', "colorbar": True},
{"threshold": None}])
def test_plot_prob_atlas(params):
"""Smoke tests for plot_prob_atlas.
Tests different combinations of parameters `view_type`, `threshold`,
and `colorbar`.
"""
rng = np.random.RandomState(42)
data_rng = rng.normal(size=(6, 8, 10, 5))
plot_prob_atlas(Nifti1Image(data_rng, np.eye(4)), **params)
plt.close()
| 34.115385
| 77
| 0.606539
|
a3314d05ea25c97785e9a3010cf16ee691f69487
| 11,310
|
py
|
Python
|
tests/jinja_context_tests.py
|
hikaya-io/incubator-superset
|
3dac81c89613f04dc9e4424dda043821c7557323
|
[
"Apache-2.0"
] | 44
|
2021-04-14T10:53:36.000Z
|
2021-09-11T00:29:50.000Z
|
tests/jinja_context_tests.py
|
hikaya-io/incubator-superset
|
3dac81c89613f04dc9e4424dda043821c7557323
|
[
"Apache-2.0"
] | 77
|
2020-02-02T07:54:13.000Z
|
2022-03-23T18:22:04.000Z
|
tests/jinja_context_tests.py
|
hikaya-io/incubator-superset
|
3dac81c89613f04dc9e4424dda043821c7557323
|
[
"Apache-2.0"
] | 11
|
2021-06-09T08:30:57.000Z
|
2021-11-30T03:16:14.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
from datetime import datetime
from typing import Any
from unittest import mock
import pytest
import tests.test_app
from superset import app
from superset.exceptions import SupersetTemplateException
from superset.jinja_context import (
ExtraCache,
filter_values,
get_template_processor,
safe_proxy,
)
from superset.utils import core as utils
from tests.base_tests import SupersetTestCase
class TestJinja2Context(SupersetTestCase):
def test_filter_values_default(self) -> None:
with app.test_request_context():
self.assertEqual(filter_values("name", "foo"), ["foo"])
def test_filter_values_no_default(self) -> None:
with app.test_request_context():
self.assertEqual(filter_values("name"), [])
def test_filter_values_adhoc_filters(self) -> None:
with app.test_request_context(
data={
"form_data": json.dumps(
{
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "foo",
"expressionType": "SIMPLE",
"operator": "in",
"subject": "name",
}
],
}
)
}
):
self.assertEqual(filter_values("name"), ["foo"])
with app.test_request_context(
data={
"form_data": json.dumps(
{
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["foo", "bar"],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "name",
}
],
}
)
}
):
self.assertEqual(filter_values("name"), ["foo", "bar"])
def test_filter_values_extra_filters(self) -> None:
with app.test_request_context(
data={
"form_data": json.dumps(
{"extra_filters": [{"col": "name", "op": "in", "val": "foo"}]}
)
}
):
self.assertEqual(filter_values("name"), ["foo"])
def test_url_param_default(self) -> None:
with app.test_request_context():
self.assertEqual(ExtraCache().url_param("foo", "bar"), "bar")
def test_url_param_no_default(self) -> None:
with app.test_request_context():
self.assertEqual(ExtraCache().url_param("foo"), None)
def test_url_param_query(self) -> None:
with app.test_request_context(query_string={"foo": "bar"}):
self.assertEqual(ExtraCache().url_param("foo"), "bar")
def test_url_param_form_data(self) -> None:
with app.test_request_context(
query_string={"form_data": json.dumps({"url_params": {"foo": "bar"}})}
):
self.assertEqual(ExtraCache().url_param("foo"), "bar")
def test_safe_proxy_primitive(self) -> None:
def func(input: Any) -> Any:
return input
return_value = safe_proxy(func, "foo")
self.assertEqual("foo", return_value)
def test_safe_proxy_dict(self) -> None:
def func(input: Any) -> Any:
return input
return_value = safe_proxy(func, {"foo": "bar"})
self.assertEqual({"foo": "bar"}, return_value)
def test_safe_proxy_lambda(self) -> None:
def func(input: Any) -> Any:
return input
with pytest.raises(SupersetTemplateException):
safe_proxy(func, lambda: "bar")
def test_safe_proxy_nested_lambda(self) -> None:
def func(input: Any) -> Any:
return input
with pytest.raises(SupersetTemplateException):
safe_proxy(func, {"foo": lambda: "bar"})
def test_process_template(self) -> None:
maindb = utils.get_example_database()
sql = "SELECT '{{ 1+1 }}'"
tp = get_template_processor(database=maindb)
rendered = tp.process_template(sql)
self.assertEqual("SELECT '2'", rendered)
def test_get_template_kwarg(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo }}"
tp = get_template_processor(database=maindb, foo="bar")
rendered = tp.process_template(s)
self.assertEqual("bar", rendered)
def test_template_kwarg(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo }}"
tp = get_template_processor(database=maindb)
rendered = tp.process_template(s, foo="bar")
self.assertEqual("bar", rendered)
def test_get_template_kwarg_dict(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo.bar }}"
tp = get_template_processor(database=maindb, foo={"bar": "baz"})
rendered = tp.process_template(s)
self.assertEqual("baz", rendered)
def test_template_kwarg_dict(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo.bar }}"
tp = get_template_processor(database=maindb)
rendered = tp.process_template(s, foo={"bar": "baz"})
self.assertEqual("baz", rendered)
def test_get_template_kwarg_lambda(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo() }}"
tp = get_template_processor(database=maindb, foo=lambda: "bar")
with pytest.raises(SupersetTemplateException):
tp.process_template(s)
def test_template_kwarg_lambda(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo() }}"
tp = get_template_processor(database=maindb)
with pytest.raises(SupersetTemplateException):
tp.process_template(s, foo=lambda: "bar")
def test_get_template_kwarg_module(self) -> None:
maindb = utils.get_example_database()
s = "{{ dt(2017, 1, 1).isoformat() }}"
tp = get_template_processor(database=maindb, dt=datetime)
with pytest.raises(SupersetTemplateException):
tp.process_template(s)
def test_template_kwarg_module(self) -> None:
maindb = utils.get_example_database()
s = "{{ dt(2017, 1, 1).isoformat() }}"
tp = get_template_processor(database=maindb)
with pytest.raises(SupersetTemplateException):
tp.process_template(s, dt=datetime)
def test_get_template_kwarg_nested_module(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo.dt }}"
tp = get_template_processor(database=maindb, foo={"dt": datetime})
with pytest.raises(SupersetTemplateException):
tp.process_template(s)
def test_template_kwarg_nested_module(self) -> None:
maindb = utils.get_example_database()
s = "{{ foo.dt }}"
tp = get_template_processor(database=maindb)
with pytest.raises(SupersetTemplateException):
tp.process_template(s, foo={"bar": datetime})
@mock.patch("superset.jinja_context.HiveTemplateProcessor.latest_partition")
def test_template_hive(self, lp_mock) -> None:
lp_mock.return_value = "the_latest"
db = mock.Mock()
db.backend = "hive"
s = "{{ hive.latest_partition('my_table') }}"
tp = get_template_processor(database=db)
rendered = tp.process_template(s)
self.assertEqual("the_latest", rendered)
@mock.patch("superset.jinja_context.context_addons")
def test_template_context_addons(self, addons_mock) -> None:
addons_mock.return_value = {"datetime": datetime}
maindb = utils.get_example_database()
s = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
tp = get_template_processor(database=maindb)
rendered = tp.process_template(s)
self.assertEqual("SELECT '2017-01-01T00:00:00'", rendered)
@mock.patch("tests.superset_test_custom_template_processors.datetime")
def test_custom_process_template(self, mock_dt) -> None:
"""Test macro defined in custom template processor works."""
mock_dt.utcnow = mock.Mock(return_value=datetime(1970, 1, 1))
db = mock.Mock()
db.backend = "db_for_macros_testing"
tp = get_template_processor(database=db)
sql = "SELECT '$DATE()'"
rendered = tp.process_template(sql)
self.assertEqual("SELECT '{}'".format("1970-01-01"), rendered)
sql = "SELECT '$DATE(1, 2)'"
rendered = tp.process_template(sql)
self.assertEqual("SELECT '{}'".format("1970-01-02"), rendered)
def test_custom_get_template_kwarg(self) -> None:
"""Test macro passed as kwargs when getting template processor
works in custom template processor."""
db = mock.Mock()
db.backend = "db_for_macros_testing"
s = "$foo()"
tp = get_template_processor(database=db, foo=lambda: "bar")
rendered = tp.process_template(s)
self.assertEqual("bar", rendered)
def test_custom_template_kwarg(self) -> None:
"""Test macro passed as kwargs when processing template
works in custom template processor."""
db = mock.Mock()
db.backend = "db_for_macros_testing"
s = "$foo()"
tp = get_template_processor(database=db)
rendered = tp.process_template(s, foo=lambda: "bar")
self.assertEqual("bar", rendered)
def test_custom_template_processors_overwrite(self) -> None:
"""Test template processor for presto gets overwritten by custom one."""
db = mock.Mock()
db.backend = "db_for_macros_testing"
tp = get_template_processor(database=db)
sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
rendered = tp.process_template(sql)
self.assertEqual(sql, rendered)
sql = "SELECT '{{ DATE(1, 2) }}'"
rendered = tp.process_template(sql)
self.assertEqual(sql, rendered)
def test_custom_template_processors_ignored(self) -> None:
"""Test custom template processor is ignored for a difference backend
database."""
maindb = utils.get_example_database()
sql = "SELECT '$DATE()'"
tp = get_template_processor(database=maindb)
rendered = tp.process_template(sql)
assert sql == rendered
| 38.338983
| 82
| 0.603802
|
353cfc8853ec5666c7e9408573c9711273ce61b9
| 2,334
|
py
|
Python
|
tests/api_resources/test_webhook_endpoint.py
|
photocrowd/stripe-python
|
7c705e3d41f38f8524e419eb7ea18c1425a4ad89
|
[
"MIT"
] | null | null | null |
tests/api_resources/test_webhook_endpoint.py
|
photocrowd/stripe-python
|
7c705e3d41f38f8524e419eb7ea18c1425a4ad89
|
[
"MIT"
] | null | null | null |
tests/api_resources/test_webhook_endpoint.py
|
photocrowd/stripe-python
|
7c705e3d41f38f8524e419eb7ea18c1425a4ad89
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import stripe_modern as stripe
TEST_RESOURCE_ID = "we_123"
class TestWebhookEndpoint(object):
def test_is_listable(self, request_mock):
resources = stripe.WebhookEndpoint.list()
request_mock.assert_requested("get", "/v1/webhook_endpoints")
assert isinstance(resources.data, list)
assert isinstance(resources.data[0], stripe.WebhookEndpoint)
def test_is_retrievable(self, request_mock):
resource = stripe.WebhookEndpoint.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/webhook_endpoints/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.WebhookEndpoint)
def test_is_creatable(self, request_mock):
resource = stripe.WebhookEndpoint.create(
enabled_events=["charge.succeeded"], url="https://stripe.com"
)
request_mock.assert_requested("post", "/v1/webhook_endpoints")
assert isinstance(resource, stripe.WebhookEndpoint)
def test_is_saveable(self, request_mock):
resource = stripe.WebhookEndpoint.retrieve(TEST_RESOURCE_ID)
resource.enabled_events = ["charge.succeeded"]
resource.save()
request_mock.assert_requested(
"post", "/v1/webhook_endpoints/%s" % TEST_RESOURCE_ID
)
def test_is_modifiable(self, request_mock):
resource = stripe.WebhookEndpoint.modify(
TEST_RESOURCE_ID,
enabled_events=["charge.succeeded"],
url="https://stripe.com",
)
request_mock.assert_requested(
"post", "/v1/webhook_endpoints/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.WebhookEndpoint)
def test_is_deletable(self, request_mock):
resource = stripe.WebhookEndpoint.retrieve(TEST_RESOURCE_ID)
resource.delete()
request_mock.assert_requested(
"delete", "/v1/webhook_endpoints/%s" % TEST_RESOURCE_ID
)
assert resource.deleted is True
def test_can_delete(self, request_mock):
resource = stripe.WebhookEndpoint.delete(TEST_RESOURCE_ID)
request_mock.assert_requested(
"delete", "/v1/webhook_endpoints/%s" % TEST_RESOURCE_ID
)
assert resource.deleted is True
| 37.047619
| 73
| 0.680805
|
22ef56785069f58a46cb7a6989fc445b4189af15
| 3,527
|
py
|
Python
|
sdk/lusid/models/operator.py
|
finbourne/lusid-sdk-python-generated-preview
|
9c36c953e8149443a4390ed7f0c04d01211401b6
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/operator.py
|
finbourne/lusid-sdk-python-generated-preview
|
9c36c953e8149443a4390ed7f0c04d01211401b6
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/operator.py
|
finbourne/lusid-sdk-python-generated-preview
|
9c36c953e8149443a4390ed7f0c04d01211401b6
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.4425
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid.configuration import Configuration
class Operator(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
EQUALS = "Equals"
NOTEQUALS = "NotEquals"
GREATERTHAN = "GreaterThan"
GREATERTHANOREQUALTO = "GreaterThanOrEqualTo"
LESSTHAN = "LessThan"
LESSTHANOREQUALTO = "LessThanOrEqualTo"
IN = "In"
allowable_values = [EQUALS, NOTEQUALS, GREATERTHAN, GREATERTHANOREQUALTO, LESSTHAN, LESSTHANOREQUALTO, IN] # noqa: E501
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
}
attribute_map = {
}
required_map = {
}
def __init__(self, local_vars_configuration=None): # noqa: E501
"""Operator - a model defined in OpenAPI"
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self.discriminator = None
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Operator):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Operator):
return True
return self.to_dict() != other.to_dict()
| 27.992063
| 124
| 0.577545
|
b3767ff5584a2aa19ddddeaed877e4b9847f822b
| 65,070
|
py
|
Python
|
fusionless/core.py
|
Colorbleed/fusionless
|
4ff726c64e40f48383ddb032267587722712aaff
|
[
"BSD-3-Clause"
] | 10
|
2017-08-14T05:07:25.000Z
|
2021-07-01T23:33:36.000Z
|
fusionless/core.py
|
BigRoy/fusionless
|
4ff726c64e40f48383ddb032267587722712aaff
|
[
"BSD-3-Clause"
] | 5
|
2016-09-05T14:04:19.000Z
|
2016-09-14T07:26:00.000Z
|
fusionless/core.py
|
Colorbleed/fusionless
|
4ff726c64e40f48383ddb032267587722712aaff
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Contains the main `PyNode` base class and its derived classes.
### Reference
For a community-built class list of Fusion's built-in classes:
http://www.steakunderwater.com/VFXPedia/96.0.243.189/index8c76.html?title=Eyeon:Script/Reference/Applications/Fusion/Classes
"""
import sys
class PyObject(object):
"""This is the base class for all classes referencing Fusion's classes.
Upon initialization of any PyObject class it checks whether the referenced
data is of the correct type usingbPython's special `__new__` method.
This way we convert the instance to correct class type based on the
internal Fusion type.
The `PyObject` is *fusionless*'s class representation of Fusion's
internal Object class. All the other classes representing Fusion objects
are derived from PyObject.
Example
>>> node = PyObject(comp)
>>> print type(node)
>>> # Comp()
At any time you can access Fusion's python object from the instance using
the `_reference` attribute.
Example
>>> node = PyObject()
>>> reference = node._reference
>>> print reference
"""
_reference = None # reference to PyRemoteObject
_default_reference = None
def __new__(cls, *args, **kwargs):
"""Convert the class instantiation to the correct type.
This is where the magic happens that automatically maps any of the
PyNode objects to the correct class type.
"""
reference = args[0] if args else None
# if argument provided is already of correct class type return it
if isinstance(reference, cls):
return reference
# if no arguments assume reference to default type for cls (if any)
elif reference is None:
if cls._default_reference is not None:
reference = cls._default_reference()
if reference is None:
raise RuntimeError("No default reference for: "
"{0}".format(type(reference).__name__))
else:
raise ValueError("Can't instantiate a PyObject with a "
"reference to None")
# Python crashes whenever you perform `type()` or `dir()` on the
# PeyeonScript.scripapp() retrieved applications. As such we try to
# get the attributes before that check before type-checking in case
# of errors.
attrs = None
try:
attrs = reference.GetAttrs()
except AttributeError:
# Check if the reference is a PyRemoteObject.
# Since we don't have access to the class type that fusion returns
# outside of Fusion we use a hack based on its name
if type(reference).__name__ != 'PyRemoteObject':
raise TypeError("Reference is not of type PyRemoteObject "
"but {0}".format(type(reference).__name__))
newcls = None
if attrs:
# Acquire an attribute to check for type (start prefix)
# Comp, Tool, Input, Output, View, etc. all return attributes
# that define its type
data_type = next(iter(attrs))
# Define the new class type
if data_type.startswith("COMP"):
newcls = Comp
elif data_type.startswith("TOOL"):
newcls = Tool
elif data_type.startswith("INP"):
newcls = Input
elif data_type.startswith("OUT"):
newcls = Output
elif data_type.startswith("VIEW"):
newcls = Flow
elif data_type.startswith("FUSION"):
newcls = Fusion
else:
# Image (output value) does not return attributes from GetAttrs()
# so we use some data from the PyRemoteObject
str_data_type = str(reference).split(' ', 1)[0]
if str_data_type == "Image":
newcls = Image
# Ensure we convert to a type preferred by the user
# eg. Tool() would come out as Comp() since no arguments are provided.
# so instead we raise a TypeError() to be clear in those cases.
if cls is not PyObject:
if not issubclass(newcls, cls):
raise TypeError("PyObject did not convert to preferred "
"type. '{0}' is not an instance "
"of '{1}'".format(newcls, cls))
# Instantiate class and return
if newcls:
klass = super(PyObject, cls).__new__(newcls)
klass._reference = reference
return klass
return None
def set_attr(self, key, value):
self.set_attrs({key: value})
def get_attr(self, key):
attrs = self.get_attrs()
return attrs[key]
def set_attrs(self, attr_values):
self._reference.SetAttrs(attr_values)
def get_attrs(self):
return self._reference.GetAttrs()
def set_data(self, name, value):
""" Set persistent data on this object.
Persistent data is a very useful way to store names, dates, filenames,
notes, flags, or anything else, in such a way that they are permanently
associated with this instance of the object, and are stored along with
the object. This data can be retrieved at any time by using
`get_data()`.
The method of storage varies by object:
- Fusion application:
SetData() called on the Fusion app itself will save its data in the
Fusion.prefs file, and will be available whenever that copy of
Fusion is running.
- Objects associated with a composition:
Calling SetData() on any object associated with a Composition will
cause the data to be saved in the .comp file, or in any settings
files that may be saved directly from that object.
- Ephemeral objects not associated with composition:
Some ephemeral objects that are not associated with any
composition and are not otherwise saved in any way, may not have
their data permanently stored at all, and the data will only
persist as long as the object itself does.
.. note::
You can use SetData to add a key called HelpPage to any tool.
Its value can be a URL to a web page (for example a link to a
page on Vfxpedia) and will override this tool's default help when
the user presses F1 (requires Fusion 6.31 or later).
It's most useful for macros.
Example
>>> Comp().set_data('HelpPage',
>>> 'https://github.com/BigRoy/fusionless')
Args:
name (str): The name the name of the attribute to set.
As of 5.1, this name can be in "table.subtable" format,
to allow setting persistent data within subtables.
value: This is the value to be recorded in the object's persistent
data. It can be of almost any type.
Returns:
None
"""
self._reference.SetData(name, value)
def get_data(self, name):
""" Get persistent data from this object.
Args:
name (str): The name of the attribute to fetch.
Returns:
The value fetched from the object's persistent data.
It can be of almost any type.
"""
return self._reference.GetData(name)
def name(self):
"""The internal Fusion Name as string of the node this PyObject
references.
Returns:
str: A string value containing the internal Name of this node.
"""
return self._reference.Name
def id(self):
"""Returns the ID key as string.
For example the id label of an Input is used to retrieve it from the
Tool.
Returns:
str: Internal ID of the referenced Fusion object
"""
return self._reference.ID
def get_help(self):
"""Returns a formatted string of internal help information to Fusion.
Returns:
str: internal Fusion information
"""
return self._reference.GetHelp()
def get_reg(self):
""" Returns the related Registry instance for this PyObject.
Returns:
Registry: The registry related to this object.
"""
return self._reference.GetReg()
def get_id(self):
"""Returns the internal Fusion ID as string.
.. note:: This uses the internal `GetID()` method on the object
instance that this PyObject references.
Returns:
str: Internal ID of the referenced Fusion object
"""
return self._reference.GetID()
def comp(self):
""" Return the Comp this instance belongs to.
Returns:
Comp: The composition from this instance.
"""
return Comp(self._reference.Comp())
def __getattr__(self, attr):
"""Allow access to Fusion's built-in methods on the reference directly.
.. note::
Since the normal behaviour is to raise a very confusing TypeError
whenever an unknown method is called on the PyRemoteObject so we
added a raise a more explanatory error if we retrieve unknown data.
"""
result = getattr(self._reference, attr)
if result is None:
raise AttributeError("{0} object has no attribute "
"'{1}'".format(self, attr))
return result
def __repr__(self):
return '{0}("{1}")'.format(self.__class__.__name__,
str(self._reference.Name))
# TODO: Implement PyObject.GetApp
# TODO: Implement PyObject.TriggerEvent
class Comp(PyObject):
""" A Comp instance refers to a Fusion composition.
Here you can perform the global changes to the current composition.
"""
# TODO: Implement the rest of the `Comp` methods and its documentations.
@staticmethod
def _default_reference():
"""Fallback for the default reference"""
# this would be accessible within Fusion scripts as "comp"
ref = getattr(sys.modules["__main__"], "comp", None)
if ref is not None:
return ref
# this would be accessible within Fusion's console and is not always
# present in the locals() or globals(). It's seems to be magically get
# served up when requested. So we try it get it served.
# Note: this doesn't seem to work in Fusion 8+ but worked in older
# versions
try:
return comp
except NameError:
pass
# this would be accessible within Fusion's console (if in globals)
# haven't seen this happen yet.
ref = globals().get("comp", None)
if ref is not None:
return ref
# if the above fails we can try to find an active "fusion" and get
# the currently active composition (fallback method)
try:
fusion = Fusion()
comp = fusion.get_current_comp()
return comp._reference
except RuntimeError:
pass
def get_current_time(self):
""" Returns the current time in this composition.
:return: Current time.
:rtype: int
"""
return self._reference.CurrentTime
def get_tool_list(self, selected=False, node_type=None):
""" Returns the tool list of this composition.
Args:
selected (bool): Whether to return only tools from the current
selection. When False all tools in the composition will be
considered.
node_type (str): If provided filter to only tools of this type.
Returns:
list: A list of Tool instances
"""
args = (node_type,) if node_type is not None else tuple()
return [Tool(x) for x in self._reference.GetToolList(selected,
*args).values()]
def get_selected_tools(self, node_type=None):
"""Returns the currently selected tools.
.. warning::
Fusion does NOT return the selected tool list in the order of
selection!
Returns:
list: A list of selected Tool instances
"""
return self.get_tool_list(True, node_type=node_type)
def current_frame(self):
""" Returns the currently active ChildFrame for this composition.
..note ::
This does not return the current time, but a UI element.
To get the current time use `get_current_time()`
Returns:
The currently active ChildFrame for this Composition.
"""
return self.CurrentFrame
def get_active_tool(self):
""" Return active tool.
Returns:
Tool or None: Currently active tool on this comp
"""
tool = self._reference.ActiveTool
return Tool(tool) if tool else None
def set_active_tool(self, tool):
""" Set the current active tool in the composition to the given tool.
If tool is None it ensure nothing is active.
Args:
tool (Tool): The tool instance to make active.
If None provided active tool will be deselected.
Returns:
None
"""
if tool is None: # deselect if None
self._reference.SetActiveTool(None)
return
if not isinstance(tool, Tool):
tool = Tool(tool)
self._reference.SetActiveTool(tool._reference)
def create_tool(self, node_type, attrs=None, insert=False, name=None):
""" Creates a new node in the composition based on the node type.
Args:
node_type (str): The type id of the node to create.
attrs (dict): A dictionary of input values to set.
insert (bool): When True the node gets created and automatically
inserted/connected to the active tool.
name (str): When provided the created node is automatically
renamed to the provided name.
Returns:
Tool: The created Tool instance.
"""
# Fusion internally uses the magic 'position' (-32768, -32768) to trigger an automatic connection and insert
# when creating a new node. So we use that internal functionality when `insert` parameter is True.
args = (-32768, -32768) if insert else tuple()
tool = Tool(self._reference.AddTool(node_type, *args))
if attrs: # Directly set attributes if any provided
tool.set_attrs(attrs)
if name: # Directly set a name if any provided
tool.rename(name)
return tool
def copy(self, tools):
"""Copy a list of tools to the Clipboard.
The copied Tools can be pasted into the Composition by using its
corresponding `paste` method.
Args:
tools (list): The Tools list to be copied to the clipboard
"""
return self._reference.Copy([tool._reference for tool in tools])
def paste(self, settings=None):
"""Pastes a tool from the Clipboard or a settings table.
Args:
settings (dict or None): If settings dictionary provided it will be
used as the settings table to be copied, instead of using the
Comp's current clipboard.
Returns:
None
"""
args = tuple() if settings is None else (settings,)
return self._reference.Paste(*args)
def lock(self):
"""Sets the composition to a locked state.
Sets a composition to non-interactive ("batch", or locked) mode.
This makes Fusion suppress any dialog boxes which may appear, and
additionally prevents any re-rendering in response to changes to the
controls. A locked composition can be unlocked with the unlock()
function, which returns the composition to interactive mode.
It is often useful to surround a script with Lock() and Unlock(),
especially when adding tools or modifying a composition. Doing this
ensures Fusion won't pop up a dialog to ask for user input, e.g. when
adding a Loader, and can also speed up the operation of the script
since no time will be spent rendering until the comp is unlocked.
For convenience this is also available as a Context Manager as
`fusionless.context.LockComp`.
"""
self._reference.Lock()
def unlock(self):
"""Sets the composition to an unlocked state."""
self._reference.Unlock()
def redo(self, num=1):
"""Redo one or more changes to the composition.
Args:
num (int): Amount of redo changes to perform.
"""
self._reference.Redo(num)
def undo(self, num):
""" Undo one or more changes to the composition.
Args:
num (int): Amount of undo changes to perform.
"""
self._reference.Undo(num)
def start_undo(self, name):
"""Start an undo block.
This should always be paired with an end_undo call.
The StartUndo() function is always paired with an EndUndo() function.
Any changes made to the composition by the lines of script between
StartUndo() and EndUndo() are stored as a single Undo event.
Changes captured in the undo event can be undone from the GUI using
CTRL-Z, or the Edit menu. They can also be undone from script, by
calling the `undo()` method.
.. note::
If the script exits before `end_undo()` is called Fusion will
automatically close the undo event.
Args:
name (str): Specifies the name displayed in the Edit/Undo menu of
the Fusion GUI a string containing the complete path and name
of the composition to be saved.
"""
self._reference.StartUndo(name)
def end_undo(self, keep=True):
"""Close an undo block.
This should always be paired with a start_undo call.
The `start_undo()` is always paired with an `end_undo()` call.
Any changes made to the composition by the lines of script between
`start_undo()` and `end_undo()` are stored as a single Undo event.
Changes captured in the undo event can be undone from the GUI using
CTRL-Z, or the Edit menu. They can also be undone from script, by
calling the `undo()` method.
Specifying 'True' results in the undo event being added to the undo
stack, and appearing in the appropriate menu. Specifying False' will
result in no undo event being created. This should be used sparingly,
as the user (or script) will have no way to undo the preceding
commands.
.. note::
If the script exits before `end_undo()` is called Fusion will
automatically close the undo event.
Args:
keep (bool): Determines whether the captured undo event is to kept
or discarded.
Returns:
None
"""
self._reference.EndUndo(keep)
def clear_undo(self):
"""Use this function to clear the undo/redo history for the
composition."""
self._reference.ClearUndo()
def save(self, filename=None):
"""Save the composition.
This function causes the composition to be saved to disk. The compname
argument must specify a path relative to the filesystem of the Fusion
which is saving the composition. In other words - if system `a` is
using the Save() function to instruct a Fusion on system `b` to save a
composition, the path provided must be valid from the perspective of
system `b`.
Arguments:
filename (str): Full path to save to. When None it will save over
the current comp path (if alreaady available).
Returns:
bool: Whether saving succeeded
"""
if filename is None and not self.filename():
# When not saved yet we raise an error instead of
# silently failing without explanation
raise ValueError("Can't save comp without filename.")
return self._reference.Save(filename)
def play(self):
"""This function is used to turn on the play control in the playback
controls of the composition.
"""
self._reference.Play()
def stop(self):
"""This function is used to turn off the play control in the playback
controls of the composition.
"""
self._reference.Stop()
def loop(self, mode):
"""This function is used to turn on the loop control in the playback
controls of the composition.
Args:
mode (bool): Enables looping interactive playback.
Returns:
None
"""
self._reference.Loop(mode)
def render(self, wait_for_render, **kwargs):
""" Renders the composition.
Args:
wait_for_render (bool): Whether the script should wait for the
render to complete or continue processing once the render has
begun. Defaults to False.
Kwargs:
start (int): The frame to start rendering at.
Default: Comp's render start settings.
end (int): The frame to stop rendering at.
Default: Comp's render end settings.
high_quality (bool): Render in High Quality (HiQ).
Default True.
render_all (bool): Render all tools, even if not required by a
saver. Default False.
motion_blur (bool): Do motion blur in render, where specified in
tools. Default true.
size_type (int): Resize the output:
-1. Custom (only used by PreviewSavers during
preview render)
0. Use prefs setting
1. Full Size (default)
2. Half Size
3. Third Size
4. Quarter Size
width (int): Width of result when doing a Custom preview
(defaults to pref)
height (int): Height of result when doing a Custom preview
(defaults to pref)
keep_aspect (bool): Maintains the frame aspect when doing a Custom
preview. Defaults to Preview prefs setting.
step_render (bool): Render only 1 out of every X frames ("shoot on
X frames") or render every frame. Defaults to False.
steps (int): If step rendering, how many to step. Default 5.
use_network (bool): Enables rendering with the network.
Defaults to False.
groups (str): Use these network slave groups to render on (when
net rendering). Default "all".
flags (number): Number specifying render flags, usually 0
(the default). Most flags are specified by other means, but a
value of 262144 is used for preview renders.
tool (Tool): A tool to render up to. If this is specified only
sections of the comp up to this tool will be rendered. eg you
could specify comp.Saver1 to only render *up to* Saver1,
ignoring any tools (including savers) after it.
frame_range (str): Describes which frames to render.
(eg "1..100,150..180"). Defaults to "Start".."End"
Returns:
True if the composition rendered successfully, None if it failed
to start or complete.
"""
# convert our 'Pythonic' keyword arguments to Fusion's internal ones.
conversion = {'start': 'Start',
'end': 'End',
'high_quality': 'HiQ',
'render_all': 'RenderAll',
'motion_blur': 'MotionBlur',
'size_type': 'SizeType',
'width': 'Width',
'height': 'Height',
'keep_aspect': 'KeepAspect',
'step_render': 'StepRender',
'steps': 'Steps',
'use_network': 'UseNetwork',
'groups': 'Groups',
'flags': 'Flags ',
'tool': 'Tool ',
'frame_range': 'FrameRange'}
for key, new_key in conversion.iteritems():
if key in kwargs:
value = kwargs.pop(key)
kwargs[new_key] = value
# use our required argument
required_kwargs = {'Wait': wait_for_render}
kwargs.update(required_kwargs)
return self._reference.Render(**kwargs)
def render_range(self, wait_to_render, start, end, steps=1, **kwargs):
""" A render that specifies an explicit render range.
Args:
wait_for_render (bool): Whether the script should wait for the render to complete or continue processing
once the render has begun. Defaults to False
start (int): The frame to start rendering at.
end (int): The frame to stop rendering at.
steps (int): If step rendering, how many to step. Default 1.
Kwargs:
See `Comp.render()` method.
Returns:
True if the composition rendered successfully, None if it failed to start or complete.
"""
range_kwargs = {'start': start,
'end': end,
'steps': steps}
kwargs.update(range_kwargs)
return self.render(wait_to_render=wait_to_render, **kwargs)
def run_script(self, filename):
""" Run a script within the composition's script context
Use this function to run a script in the composition environment.
This is similar to launching a script from the comp's Scripts menu.
The script will be started with 'fusion' and 'composition' variables set to the Fusion and currently active
Composition objects. The filename given may be fully specified, or may be relative to the comp's Scripts: path.
Since version 6.3.2 you can run Python and eyeonScripts.
Fusion supports .py .py2 and .py3 extensions to differentiate python script versions.
Arguments:
filename (str): The filename of a script to be run in the
composition environment.
"""
self._reference.RunScript(filename)
def is_rendering(self):
""" Returns True if the comp is busy rendering.
Use this method to determine whether a composition object is currently rendering.
It will return True if it is playing, rendering, or just rendering a tool after trying to view it.
Returns:
bool: Whether composition is currently rendering
"""
return self._reference.IsRendering()
def is_playing(self):
""" Returns True if the comp is being played.
Use this method to determine whether a composition object is currently playing.
Returns:
bool: Whether comp is currently being played.
"""
return self._reference.IsPlaying()
def is_locked(self):
""" Returns True if the comp is locked.
Use this method to see whether a composition is locked or not.
Returns:
bool: Whether comp is currently locked.
"""
return self._reference.IsPlaying()
def filename(self):
"""Return the current file path of the composition.
Returns:
str: Full path to current comp. (empty string if not saved yet)
"""
return self._reference.GetAttrs()['COMPS_FileName']
def __repr__(self):
return '{0}("{1}")'.format(self.__class__.__name__, self.filename())
class Tool(PyObject):
"""A Tool is a single operator/node in your composition.
You can use this object to perform changes to a single tool, make
connections with another or query information. For example renaming,
deleting, connecting and retrieving its inputs/outputs.
"""
def get_pos(self):
"""Return the X and Y position of this tool in the FlowView.
Returns:
list(float, float): The X and Y coordinate of the tool.
"""
flow = self.comp().current_frame().FlowView
# possible optimization: self._reference.Comp.CurrentFrame.FlowView
return flow.GetPosTable(self._reference).values()
def set_pos(self, pos):
"""Reposition this tool.
Arguments:
pos (list(float, float)): The X and Y coordinate to apply.
Returns:
None
"""
flow = self.comp().current_frame().FlowView
# possible optimization: self._reference.Comp.CurrentFrame.FlowView
flow.SetPos(self._reference, *pos)
# region inputs
def main_input(self, index):
"""Returns the main (visible) Input knob of the tool, by index.
Note:
The index starts at 1!
Arguments:
index (int): numerical index of the knob (starts at 1)
Returns:
Input: input that the given index.
"""
return Input(self._reference.FindMainInput(index))
def input(self, id):
"""Returns an Input by ID.
Arguments:
id (str): ID name of the input.
Returns:
Input: input at the given index.
"""
return Input(self._reference[id])
def inputs(self):
"""Return all Inputs of this Tools
Returns:
list: inputs of the tool.
"""
return [Input(x) for x in self._reference.GetInputList().values()]
# endregion
# region outputs
def main_output(self, index):
""" Returns the main (visible) Output knob of the tool, by index.
Note:
The index starts at 1!
Arguments:
index (int): numerical index of the knob (starts at 1)
Returns:
Output: output that the given index.
"""
return Output(self._reference.FindMainOutput(index))
def output(self, id):
""" Returns the Output knob by ID.
Arguments:
id (str): ID name of the output.
Returns:
Output: The resulting output.
"""
# TODO: Optimize: there must be a more optimized way than this.
output_reference = next(x for x in self.outputs() if x.ID == id)
if not output_reference:
return None
else:
return Output(output_reference)
def outputs(self):
""" Return all Outputs of this Tools """
return [Output(x) for x in self._reference.GetOutputList().values()]
# endregion
# region connections
def connect_main(self, tool, from_index=1, to_index=1):
"""Helper function that quickly connects main outputs to another
tool's main inputs.
Arguments:
tool (Tool): The other tool to connect to.
from_index (int): Index of main output on this instance.
(index start at 1)
to_index (int): Index of main input on the other tool.
(index start at 1)
Returns:
None
"""
if not isinstance(tool, Tool):
tool = Tool(tool)
id = tool._reference.FindMainInput(1).ID
tool._reference[id] = self._reference.FindMainOutput(1) # connect
def disconnect(self, inputs=True, outputs=True):
"""Disconnect all inputs and outputs of this tool.
Arguments:
inputs (bool): Whether to disconnect all inputs
outputs (bool): Whether to disconnect all outputs
Returns:
None
"""
if inputs:
for input in self.inputs():
input.disconnect()
if outputs:
for output in self.outputs():
output.disconnect()
def connections_iter(self, inputs=True, outputs=True):
"""Yield each Input and Output connection for this Tool instance.
Each individual connection is yielded in the format: `(Output, Input)`
Arguments:
inputs (bool): Whether to include inputs of this Tool instance.
outputs (bool): Whether to include outputs of this Tool instance.
Yields:
(Output, Input): representing a connection to or from this Tool.
"""
if inputs:
for input in self.inputs():
connected_output = input.get_connected_output()
if connected_output:
yield (connected_output, input)
if outputs:
for output in self.outputs():
connected_inputs = output.get_connected_inputs()
if connected_inputs:
for connected_input in connected_inputs:
yield (output, connected_input)
def connections(self, inputs=True, outputs=True):
"""Return all Input and Output connections of this Tools.
Each individual connection is a 2-tuple in the list in the format:
`(Output, Input)`
For example:
`[(Output, Input), (Output, Input), (Output, Input)]`
Args:
inputs (bool): If True include the inputs of this Tool, else they
are excluded.
outputs (bool): If True include the outputs of this Tool, else they
are excluded.
Returns:
A list of 2-tuples (Output, Input) representing each connection to
or from this Tool.
"""
return list(self.connections_iter(inputs=inputs, outputs=outputs))
# endregion
def rename(self, name):
"""Sets the name for this Tool to `name`.
Args:
name (str): The new name to change to.
"""
self._reference.SetAttrs({'TOOLB_NameSet': True, 'TOOLS_Name': name})
def clear_name(self):
"""Clears user-defined name reverting to automated internal name."""
self._reference.SetAttrs({'TOOLB_NameSet': False, 'TOOLS_Name': ''})
def delete(self):
"""Removes the tool from the composition.
.. note::
This also releases the handle to the Fusion Tool object,
setting it to None. As such it invalidates this Tool instance.
"""
self._reference.Delete()
def refresh(self):
"""Refreshes the tool, showing updated user controls.
.. note::
Internally calling Refresh in Fusion will invalidate the handle to
internal object this tool references. You'd have to save the new
handle that is returned (even though the documentation says nothing
is returned). Calling this function on this Tool will invalidate
other Tool instances referencing this same object. But it will
update the reference in this instance on which the function call
is made.
"""
new_ref = self._reference.Refresh()
self._reference = new_ref
def parent(self):
"""Return the parent Group this Tool belongs to, if any."""
return self._reference.ParentTool
def save_settings(self, path=None):
"""Saves the tool's settings to a dict, or to a .setting file
specified by the path argument.
Arguments:
path (str): The path to save the .setting file.
Returns:
bool/dict: If a path is given, the tool's settings will be saved
to that file, and a boolean is returned to indicate success.
If no path is given, SaveSettings() will return a table of the
tool's settings instead.
"""
args = tuple() if path is None else (path,)
return self._reference.SaveSettings(*args)
def load_settings(self, settings):
"""Loads .setting files or settings dict into the tool.
This is potentially useful for any number of applications, such as
loading curve data into fusion, for which there is currently no simple
way to script interactively in Fusion. Beyond that, it could possibly
be used to sync updates to tools over project management systems.
Args:
settings (str, dict): The path to a valid .setting file or a
settings dict. A valid dict of settings, such as produced by
SaveSettings() or read from a .setting file.
Returns:
None
"""
self._reference.LoadSettings(settings)
def comp(self):
""" Return the Comp this Tool associated with. """
return Comp(self._reference.Composition)
def get_text_color(self):
"""Gets the Tool's text color.
Returns:
dict: The Tool's current text color.
"""
return self._reference.TextColor
def set_text_color(self, color):
""" Sets the Tool's text color.
Color should be assigned as a dictionary holding the RGB values between 0-1, like:
{"R": 1, "G": 0, "B": 0}
Example
>>> tool.set_text_color({'R':0.5, 'G':0.1, 'B': 0.0})
>>> tool.set_text_color(None)
"""
self._reference.TextColor = color
def get_tile_color(self):
""" Gets the Tool's tile color.
Returns:
dict: The Tool's current tile color.
"""
return self._reference.TileColor
def set_tile_color(self, color):
""" Sets the Tool's tile color.
Example
>>> tool.set_tile_color({'R':0.5, 'G':0.1, 'B': 0.0})
>>> tool.set_tile_color(None) # reset
"""
self._reference.TileColor = color
def get_keyframes(self):
"""Return a list of keyframe times, in order, for the tool only.
These are NOT the keyframes on Inputs of this tool!
Any animation splines or modifiers attached to the tool's inputs are
not considered.
.. note::
Most Tools will return only the start and end of their valid
region. Certain types of tools and modifiers such as BezierSplines
may return a longer list of keyframes.
Returns:
list: List of int values indicating frames.
"""
keyframes = self._reference.GetKeyFrames()
if keyframes:
return keyframes.values()
else:
return None
def __eq__(self, other):
if isinstance(other, Tool):
self.name() == other.name()
return False
def __hash__(self):
return hash(self.name())
class Flow(PyObject):
"""The Flow is the node-based overview of you Composition.
Fusion's internal name: `FlowView`
"""
def set_pos(self, tool, pos):
"""Reposition the given Tool to the position in the FlowView.
Args:
tool (Tool): The tool to reposition in the FlowView.
pos (tuple): The x and y co-ordinates to apply.
Returns:
None
"""
if not isinstance(tool, Tool):
tool = Tool(tool)
self._reference.SetPos(tool._reference, pos[0], pos[1])
def get_pos(self, tool):
"""return the X and Y position of a tool's tile in the FlowView.
Args:
tool (Tool): The tool to return the position of.
Returns:
tuple: The x and y co-ordinates of the tool.
"""
if not isinstance(tool, Tool):
tool = Tool(tool)
return self._reference.GetPos(tool._reference)
def queue_set_pos(self, tool, pos):
""" Queues the moving of a tool to a new position.
This function improves performance if you want to move a lot of tools
at the same time. For big graphs and loops this is preferred over
`set_pos` and `get_pos`.
Added in Fusion 6.1: FlowView::QueueSetPos()
Example
>>> c = Comp()
>>> tools = c.get_selected_tools()
>>> flow = c.flow()
>>> for i, tool in enumerate(tools):
>>> pos = [i, 0]
>>> flow.queue_set_pos(tool, pos)
>>> flow.flush_set_pos() # here the tools are actually moved
"""
return self._reference.QueueSetPos(tool._reference, pos[0], pos[1])
def flush_set_pos_queue(self):
"""Moves all tools queued with `queue_set_pos`.
This function improves performance if you want to move a lot of tools
at the same time. For big graphs and loops this is preferred over
`set_pos` and `get_pos`.
Added in Fusion 6.1: FlowView::FlushSetPosQueue()
Returns:
None
"""
return self._reference.FlushSetPosQueue()
def get_scale(self):
"""Returns the current scale of the FlowView.
Returns:
float: value indicating the current scale of the FlowView.
"""
return self._reference.GetScale()
def set_scale(self, scale):
""" Rescales the FlowView to the amount specified.
A value of 1 for the scale argument would set the FlowView to 100%.
While a value of 0.1 would set it to 10% of the default scale.
Args:
scale (float): The scaling to apply to this FlowView.
"""
return self._reference.SetScale(scale)
def frame_all(self):
"""Frames all tools so they fit in the view.
This function will rescale and reposition the FlowView to contain all
tools.
Returns:
None
"""
self._reference.FrameAll()
def select(self, tool=None, state=True):
"""Select, deselect or clear the selection of Tools in this Flow.
This function will add or remove the tool specified in it's first
argument from the current tool selection set. The second argument
should be set to False to remove the tool from the selection, or to
True to add it. If called with no arguments, the function will clear
all tools from the current selection.
Args:
tool (Tool): The tool to add or remove.
state (bool): When False the tools will be removed from selection,
otherwise the tools will ba added to the current selection.
Returns:
None
"""
if tool is None:
return self._reference.Select() # clear selection
elif not isinstance(tool, Tool):
tool = Tool(tool)
self._reference.Select(tool._reference, state)
class Link(PyObject):
"""The Link is the base class for Fusion's Input and Output types"""
def tool(self):
""" Return the Tool this Link belongs to """
return Tool(self._reference.GetTool())
class Input(Link):
"""An Input is any attribute that can be set or connected to by the user
on the incoming side of a tool.
.. note::
These are the input knobs in the Flow view, but also the input values
inside the Control view for a Tool.
Because of the way node-graphs work any value that goes into a Tool
required to process the information should result (in most scenarios) in a
reproducible output under the same conditions.
"""
def __current_time(self):
# optimize over going through PyNodes (??)
# instead of: time = self.tool().comp().get_current_time()
return self._reference.GetTool().Composition.CurrentTime
def get_value(self, time=None):
"""Get the value of this Input at the given time.
Arguments:
time (float): The time to set the value at. If None provided the
current time is used.
Returns:
A value directly from the internal input object.
"""
if time is None:
time = self.__current_time()
return self._reference[time]
def set_value(self, value, time=None):
"""Set the value of the input at the given time.
When an attribute is an enum type it will try to perform a correct
conversion when the Input requires a float value and a string was
given. Similarly when a float was given and a string id would be
required it will peform a correct conversion.
This also allows settings checkboxes and alike using a boolean value
instead of requiring an integer or float input value. (This will
convert it as required by the input.)
Arguments:
time (float): The time to set the value at. If None provided the
currentt time is used.
"""
if time is None:
time = self.__current_time()
attrs = self.get_attrs()
data_type = attrs['INPS_DataType']
# Setting boolean values doesn't work. So instead set an integer value
# allow settings checkboxes with True/False
if isinstance(value, bool):
value = int(value)
# Convert float/integer to enum if datatype == "FuID"
elif isinstance(value, (int, float)) and data_type == "FuID":
# We must compare it with a float value. We add 1 to interpret
# as zero based indices. (Zero would be 1.0 in the fusion id
# dictionary, etc.)
v = float(value) + 1.0
enum_keys = ("INPIDT_MultiButtonControl_ID",
"INPIDT_ComboControl_ID")
for enum_key in enum_keys:
if enum_key in attrs:
enum = attrs[enum_key]
if v in enum:
value = enum[v]
break
# Convert enum string value to its corresponding integer value
elif (isinstance(value, basestring) and
data_type != "Text" and
data_type != "FuID"):
enum_keys = ("INPST_MultiButtonControl_String",
"INPIDT_MultiButtonControl_ID",
"INPIDT_ComboControl_ID")
for enum_key in enum_keys:
if enum_key in attrs:
enum = dict((str(key), value) for value, key in
attrs[enum_key].items())
if value in enum:
value = enum[str(value)] - 1.0
break
self._reference[time] = value
def connect_to(self, output):
"""Connect an Output as incoming connection to this Input.
.. note::
This function behaves similarly to right clicking on a property,
selecting Connect To, and selecting the property you wish to
connect the input to. In that respect, if you try to connect
non-similar data types (a path's value to a polygon's level, for
instance) it will not connect the values. Such an action will
yield NO error message.
Args:
output (Output): The output that should act as incoming connection.
Returns:
None
"""
# disconnect
if output is None:
self._reference.ConnectTo(None)
return
# or connect
if not isinstance(output, Output):
output = Output(output)
self._reference.ConnectTo(output._reference)
def disconnect(self):
"""Disconnect the Output this Input is connected to, if any."""
self.connect_to(None)
def get_connected_output(self):
""" Returns the output that is connected to a given input.
Returns:
Output: The Output this Input is connected to if any, else None.
"""
other = self._reference.GetConnectedOutput()
if other:
return Output(other)
def get_expression(self):
"""Return the expression string shown in the Input's Expression field.
Returns:
str: the simple expression string from a given input if any else
an empty string is returned.
"""
return self._reference.GetExpression()
def set_expression(self, expression):
"""Set the Expression field for the Input to the given string.
Args:
expression (str): An expression string.
"""
self._reference.SetExpression(expression)
def get_keyframes(self):
"""Return the times at which this Input has keys.
Returns:
list: List of int values indicating frames.
"""
keyframes = self._reference.GetKeyFrames()
if keyframes:
return keyframes.values()
else:
return None
def remove_keyframes(self, time=None, index=None):
"""Remove the keyframes on this Input (if any)
"""
# TODO: Implement Input.remove_keyframes()
raise NotImplementedError()
def is_connected(self):
"""Return whether the Input is an incoming connection from an Output
Returns:
bool: True if connected, otherwise False
"""
return bool(self._reference.GetConnectedOutput())
def data_type(self):
"""Returns the type of Parameter
For example the (Number, Point, Text, Image) types this Input accepts.
Returns:
str: Type of parameter.
"""
return self._reference.GetAttrs()['OUTS_DataType']
# TODO: implement `Input.WindowControlsVisible`
# TODO: implement `Input.HideWindowControls`
# TODO: implement `Input.ViewControlsVisible`
# TODO: implement `Input.HideViewControls`
class Output(Link):
"""Output of a Tool.
An Output is any attributes that is a result from a Tool that can be
connected as input to another Tool.
.. note:: These are the output knobs in the Flow view.
"""
def get_value(self, time=None):
"""Return the value of this Output at the given time.
If time is provided the value is evaluated at that specific time,
otherwise current time is used.
Args:
time (float): Time at which to evaluate the Output.
If None provided current time will be used.
Returns:
The value of the output at the given time.
"""
return self.get_value_attrs(time=time)[0]
def get_value_attrs(self, time=None):
"""Returns a tuple of value and attrs for this Output.
`value` may be None, or a variety of different types:
Number - returns a number
Point - returns a table with X and Y members
Text - returns a string
Clip - returns the filename string
Image - returns an Image object
`attrs` is a dictionary with the following entries:
Valid - table with numeric Start and End entries
DataType - string ID for the parameter type
TimeCost - time taken to render this parameter
Args:
time (float): Time at which to evaluate the Output.
If None provided current time will be used.
Returns:
tuple: Value and attributes of this output at the given time.
"""
if time is None:
# optimize over going through PyNodes (??)
time = self._reference.GetTool().Composition.CurrentTime
# time = self.tool().comp().get_current_time()
return self._reference.GetValue(time)
def get_time_cost(self, time=None):
""" Return the time taken to render this parameter at the given time.
.. note:: This will evaluate the output and could be computationally
expensive.
Args:
time (float): Time at which to evaluate the Output.
If None provided current time will be used.
Returns:
float: Time taken to render this Output.
"""
return self.get_value_attrs(time=time)[1]['TimeCost']
def disconnect(self, inputs=None):
"""Disconnect Inputs this Output is connected to.
Args:
inputs (list or None): The inputs to disconnet or all of the
current connections if None is provided.
"""
if inputs is None: # disconnect all (if any)
inputs = self.get_connected_inputs()
else: # disconnect a subset of the connections (if valid)
# ensure iterable
if not isinstance(inputs, (list, tuple)):
inputs = [inputs]
# ensure all are Inputs
inputs = [Input(input) for input in inputs]
# ensure Inputs are connected to this output
connected_inputs = set(self._reference.GetConnectedInputs().values())
inputs = [input for input in inputs if input._reference in connected_inputs]
for input in inputs:
input._reference.ConnectTo(None)
def get_connected_inputs(self):
""" Returns a list of all Inputs that are connected to this Output.
:return: List of Inputs connected to this Output.
:rtype: list
"""
return [Input(x) for x in self._reference.GetConnectedInputs().values()]
def get_dod(self):
"""Returns the Domain of Definition for this output.
Returns:
[int, int, int, int]: The domain of definition for this output in
the as list of integers ordered: left, bottom, right, top.
"""
return self._reference.GetDoD()
# region connections
def connect_to(self, input):
"""Connect this Output to another Input.
This connection gains an outgoing connection for this tool.
.. note::
This function behaves similarly to right clicking on a
property, selecting Connect To, and selecting
the property you wish to connect the input to. In that
respect, if you try to connect non-similar
data types (a path's value to a polygon's level,
for instance) it will not connect the values.
Such an action will yield NO error message.
Args:
input (Input): The input to connect to.
"""
if not isinstance(input, Input):
input = Input(input)
input.connect_to(self)
def is_connected(self):
"""Return whether the Output has any outgoing connection to any Inputs.
Returns:
bool: True if connected, otherwise False
"""
return any(self._reference.GetConnectedInputs().values())
def data_type(self):
"""Returns the type of Parameter (e.g. Number, Point, Text, Image)
this Output accepts.
Returns:
str: Type of parameter.
"""
return self._reference.GetAttrs()['INPS_DataType']
# TODO: implement `Output.GetValueMemBlock` Retrieve the Output's value as a MemBlock
# TODO: implement `Output.EnableDiskCache` Controls disk-based caching
# TODO: implement `Output.ClearDiskCache` Clears frames from the disk cache
# TODO: implement `Output.ShowDiskCacheDlg` Displays the Cache-To-Disk dialog for user interaction
class Parameter(PyObject):
""" Base class for all parameter (values) types """
pass
class Image(Parameter):
""" An Image parameter object.
For example the Image output from a Tool.
"""
def width(self):
""" Return the width in pixels for the current output, this could be for the current proxy resolution.
:return: Actual horizontal size, in pixels
"""
return self._reference.Width
def height(self):
""" Return the height in pixels for the current output, this could be for the current proxy resolution.
:return: Actual horizontal size, in pixels
"""
return self._reference.Height
def original_width(self):
"""
:return: Unproxied horizontal size, in pixels.
"""
return self._reference.OriginalWidth
def original_height(self):
"""
:return: Unproxied vertical size, in pixels.
"""
return self._reference.OriginalHeight
def depth(self):
""" Image depth indicator (not in bits)
:return: Image depth
"""
return self._reference.Depth
def x_scale(self):
"""
:return: Pixel X Aspect
"""
return self._reference.XScale
def y_scale(self):
"""
:return: Pixel Y Aspect
"""
return self._reference.YScale
def x_offset(self):
"""Returns x-offset in pixels
Returns:
int: X Offset, in pixels
"""
return self._reference.XOffset
def y_offset(self):
"""Returns y-offset in pixels
Returns:
int: Y Offset, in pixels
"""
return self._reference.YOffset
def field(self):
"""Returns field indicator.
Returns:
int: Field indicator
"""
return self._reference.Field
def proxy_scale(self):
"""Returns image proxy scale multiplier.
Returns:
float: Image proxy scale multiplier.
"""
return self._reference.ProxyScale
class TransformMatrix(Parameter):
pass
class Fusion(PyObject):
"""The Fusion application.
Contains all functionality to interact with the global Fusion sessions.
For example this would allow you to retrieve the available compositions
that are currently open or open a new one.
"""
# TODO: Implement Fusion methods: http://www.steakunderwater.com/VFXPedia/96.0.243.189/index5522.html?title=Eyeon:Script/Reference/Applications/Fusion/Classes/Fusion
@staticmethod
def _default_reference():
"""Fallback for the default reference"""
# this would be accessible within Fusion as "fusion" in a script
ref = getattr(sys.modules["__main__"], "fusion", None)
if ref is not None:
return ref
# this would be accessible within Fusion's console
ref = globals().get("fusion", None)
if ref is not None:
return ref
def new_comp(self):
"""Creates a new composition and sets it as the currently active one"""
# TODO: Need fix: During NewComp() Fusion seems to be temporarily unavailable
self._reference.NewComp()
comp = self._reference.GetCurrentComp()
return Comp(comp)
def get_current_comp(self):
"""Return the currently active comp in this Fusion instance"""
comp = self._reference.GetCurrentComp()
return Comp(comp)
@property
def build(self):
"""Returns the build number of the current Fusion instance.
Returns:
float: Build number
"""
return self._reference.Build
@property
def version(self):
"""Returns the version of the current Fusion instance.
Returns:
float: Version number
"""
return self._reference.Version
def __repr__(self):
return '{0}("{1}")'.format(self.__class__.__name__,
str(self._reference))
class AskUserDialog(object):
"""Dialog to ask users a question through a prompt gui.
Example
>>> dialog = AskUserDialog("Question")
>>> dialog.add_message("Simple message")
>>> dialog.add_position("Center", default=(0.2, 0.8))
>>> dialog.add_position("Size")
>>> result = dialog.show()
"""
# Conversion from nice name attributes to Fusion named attributes
CONVERSION = {
"name": "Name", # (str) All controls
"default": "Default", # (numeric) Checkbox, Dropdown, Multibutton
"min": "Min", # (numeric) Numeric controls
"max": "Max", # (numeric) Numeric controls
"precision": "DisplayedPrecision", # (int) Numeric controls
"integer": "Integer", # (bool) Numeric controls
"options": "Options", # (dict) Options table
"linear": "Linear", # (int) Text
"wrap": "Wrap", # (bool) Text
"read_only": "ReadOnly", # (bool) Text
"font_name": "FontName", # (str) Text
"font_size": "FontSize", # (float) Text
"save": "Save", # (bool) FileBrowse, PathBrowse, ClipBrowse
"low_name": "LowName", # (str) Slider
"high_name": "HighName", # (str) Slider
"num_across": "NumAcross" # (int) Checkbox
}
def __init__(self, title=""):
self._items = list()
self.title = title
def set_title(self, title):
self.title = title
def _add(self,
type,
label,
**kwargs):
"""Utility method for adding any type of control to the dialog"""
item = {
1: label,
2: type
}
# Add the optional keys (kwargs) and convert to the original Fusion
# names for the variables
for key, value in kwargs.items():
if key in self.CONVERSION:
item[self.CONVERSION[key]] = value
else:
raise TypeError("Invalid argument for a Dialog control: "
"{0}".format(key))
self._items.append(item)
def add_text(self, label, **kwargs):
self._add("Text", label, **kwargs)
def add_file_browse(self, label, **kwargs):
self._add("FileBrowse", label, **kwargs)
def add_path_browse(self, label, **kwargs):
self._add("PathBrowse", label, **kwargs)
def add_clip_browse(self, label, **kwargs):
self._add("ClipBrowse", label, **kwargs)
def add_slider(self, label, **kwargs):
self._add("Slider", label, **kwargs)
def add_checkbox(self, label, **kwargs):
self._add("Checkbox", label, **kwargs)
def add_position(self, label, **kwargs):
"""Add a X & Y coordinaties control
Displays a pair of edit boxes used to enter X & Y coordinates
for a center control or other position value. The default value
of this control is a table with two values, one for the X value and
one for the Y. The control returns a table of values.
"""
if "default" in kwargs:
default = kwargs["default"]
# A tuple does not work as a default value for a position control
# so we convert it to a list to fix it.
if isinstance(default, tuple):
kwargs["default"] = list(default)
self._add("Position", label, **kwargs)
def add_screw(self, label, **kwargs):
"""Add the standard Fusion thumbnail or screw control.
This control is almost identical to a slider in almost all respects
except that its range is infinite, and so it is well suited for
angle controls and other values without practical limits.
"""
self._add("Screw", label, **kwargs)
def add_dropdown(self, label, **kwargs):
"""Add a dropdown combobox.
Displays the standard Fusion drop down menu for selecting from a
list of options. This control exposes and option call Options,
which takes a table containing the values for the drop down menu.
Note that the index for the Options table starts at 0, not 1 like is
common in most FusionScript tables. So, if you wish to set a default
for the first entry in a list, you would use Default=0, for the
second Default=1, and so on.
"""
self._add("Dropdown", label, **kwargs)
def add_multibutton(self, label, **kwargs):
"""Add a multibutton.
Displays a Multibutton, where each option is drawn as a button.
The same options are used like in a Dropdown.
A set of buttons acting like a combobox (choice).
"""
self._add("Multibutton", label, **kwargs)
def show(self):
"""Show the dialog
Returns:
dict: The state of the controls in the UI
"""
data = dict((i, value) for i, value in enumerate(self._items))
return Comp()._reference.AskUser(self.title, data)
class Registry(PyObject):
"""Represents a Registry type of object within Fusion"""
pass
| 33.165138
| 169
| 0.594206
|
19fafb3626d183034ab96b1a19ae8e7a0d021950
| 16,530
|
py
|
Python
|
ombott/ombott.py
|
macneiln/ombott
|
f18f6e0e639f20efb63b137edbab8c8b3871d354
|
[
"MIT"
] | null | null | null |
ombott/ombott.py
|
macneiln/ombott
|
f18f6e0e639f20efb63b137edbab8c8b3871d354
|
[
"MIT"
] | null | null | null |
ombott/ombott.py
|
macneiln/ombott
|
f18f6e0e639f20efb63b137edbab8c8b3871d354
|
[
"MIT"
] | null | null | null |
import sys
import json
import functools
from traceback import format_exc
import itertools
from urllib.parse import urljoin
from .common_helpers import (
html_escape,
tob,
cached_property,
WSGIFileWrapper,
SimpleConfig,
)
from .router import HookTypes, RadiRouter
from .request import Request, errors as request_errors
from .response import Response, HTTPResponse, HTTPError
from . import server_adapters
from . import error_render
__version__ = "0.0.13"
HTTP_METHODS = 'DELETE GET HEAD OPTIONS PATCH POST PUT'.split()
@SimpleConfig.keys_holder
class DefaultConfig(SimpleConfig):
catchall = True
debug = False
domain_map = {}
# request specific
app_name_header = ''
errors_map = {
request_errors.RequestError: HTTPError(400, 'Bad request'),
request_errors.BodySizeError: HTTPError(413, 'Request entity too large'),
request_errors.BodyParsingError: HTTPError(400, 'Error while parsing chunked transfer body'),
}
max_body_size = None
max_memfile_size = 100 * 1024
allow_x_script_name = False
class _closeiter:
''' This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). '''
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = close if isinstance(close, (list, tuple)) else [close]
def __iter__(self):
return iter(self.iterator)
def close(self):
[cb() for cb in self.close_callbacks]
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
quiet=False, **kwargs):
_stderr = sys.stderr.write
try:
app = app or default_app()
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
server_names = server_adapters.server_names
if server in server_names:
server = server_names.get(server)
server = server(host=host, port=port, **kwargs)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Ombott v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr(f"Listening on http{'s' if kwargs.get('certfile', None) else ''}://{server.host}:{server.port}/\n")
_stderr("Hit Ctrl-C to quit.\n\n")
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except: # noqa
raise
def with_method_shortcuts(methods):
def injector(cls):
for m in methods:
setattr(cls, m.lower(), functools.partialmethod(cls.route, method=m))
return cls
return injector
###############################################################################
# Application Object ###########################################################
###############################################################################
@with_method_shortcuts(HTTP_METHODS)
class Ombott:
__slots__ = ('config', 'router', 'request', 'response', '_route_hooks', 'error_handlers', '__dict__')
def __init__(self, config=None):
self.config = config = DefaultConfig(config)
self.router = RadiRouter()
self.request = Request(config=config)
self.response = Response()
self._route_hooks = {}
self.error_handlers = {'404-hooks': {}}
def setup(self, config=None):
self.config = config = DefaultConfig(config)
self.request.setup(config)
def run(self, **kwargs):
''' Calls :func:`run` with the same parameters. '''
run(self, **kwargs)
def to_route(self, path, verb):
if verb == 'HEAD':
methods = [verb, 'GET', 'ANY']
else:
methods = [verb, 'ANY']
end_point, error404_405 = self.router.resolve(path, methods)
return (end_point, error404_405)
def add_route(self, rule, method, handler, name=None, *, overwrite=False):
return self.router.add(rule, method, handler, name, overwrite=overwrite)
def remove_route(self, rule=None, *, route_pattern=None, name=None):
self.router.remove(rule, route_pattern=route_pattern, name=name)
def route(self, rule=None, method='GET', callback=None,
*, name=None, overwrite=False):
def decorator(callback):
self.add_route(rule, method, callback, name, overwrite=overwrite)
return callback
return decorator(callback) if callback else decorator
# overwrites by @with_method_shortcuts()
def delete(self, rule=None, callback=None, *, name=None, overwrite=False): pass
def get(self, rule=None, callback=None, *, name=None, overwrite=False): pass
def head(self, rule=None, callback=None, *, name=None, overwrite=False): pass
def options(self, rule=None, callback=None, *, name=None, overwrite=False): pass
def patch(self, rule=None, callback=None, *, name=None, overwrite=False): pass
def post(self, rule=None, callback=None, *, name=None, overwrite=False): pass
def put(self, rule=None, callback=None, *, name=None, overwrite=False): pass
@property
def routes(self):
return self.router.routes
__hook_names = ('before_request', 'after_request')
__hook_reversed = {'after_request'}
@cached_property
def _hooks(self):
return {name: [] for name in self.__hook_names}
def add_hook(self, name, func):
"""Attach a callback to a hook.
Three hooks are currently implemented:
`before_request`
Executed once before each request. The request context is
available, but no routing has happened yet.
`after_request`
Executed once after each request regardless of its outcome.
"""
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def on(self, name, func=None):
if not func: # used as decorator
def decorator(func):
self.add_hook(name, func)
return func
return decorator
else:
self.add_hook(name, func)
def remove_hook(self, name, func):
if func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def emit(self, name, *args, **kwargs):
[hook(*args, **kwargs) for hook in self._hooks[name][:]]
def on_route(self, rule, func=None):
if not func: # used as decorator
def decorator(func):
self.router.add_hook(rule, func)
return func
return decorator
else:
self.router.add_hook(rule, func)
def remove_route_hook(self, rule):
self.router.remove_hook(rule)
def error(self, code=500, rule=None):
""" Decorator: Register an output handler for a HTTP error code"""
code = int(code)
def wrapper(handler):
if code == 404 and rule:
route_pattern = self.router.add_hook(
rule, handler, hook_type=HookTypes.PARTIAL
)
self.error_handlers['404-hooks'][route_pattern] = handler
else:
self.error_handlers[code] = handler
return handler
return wrapper
def default_error_handler(self, res):
if self.request.is_json_requested:
ret = json.dumps(dict(
body = res.body,
exception = repr(res.exception),
traceback = res.traceback
))
self.response.headers['Content-Type'] = 'application/json'
else:
ret = error_render.render(res, self.request.url, self.config.debug)
return ret
@staticmethod
def handler(app: 'Ombott', route, kwargs, route_hooks, error404_405):
if error404_405:
status, body, extra = error404_405
if status == 405:
raise HTTPError(status, body, Allow=extra)
else: # not found
hooks_collected = extra['hooks']
if hooks_collected:
route_pos, hooks = hooks_collected[-1]
partial_hook = hooks[HookTypes.PARTIAL]
if partial_hook:
hook_route = app.request.path[:1 + route_pos]
return partial_hook(hook_route, extra['param_values'])
raise HTTPError(status, body)
if route_hooks:
path = app.request.path
for route_pos, hooks in route_hooks:
hook = hooks[HookTypes.SIMPLE]
if hook:
hook(path[:1 + route_pos])
return route(**kwargs)
def _handle(self, environ):
response = self.response
request = self.request
path = environ['ombott.raw_path'] = environ['PATH_INFO']
try:
path = path.encode('latin1').decode('utf8')
except UnicodeError:
return HTTPError(400, 'Invalid path string. Expected UTF-8')
environ['PATH_INFO'] = path
try: # init thread
environ['ombott.app'] = self
request.__init__(environ)
response.__init__()
try: # routing
self.emit('before_request')
route, kwargs, route_hooks = (None, None, None)
end_point, error404_405 = self.to_route(request.path, request.method)
if end_point:
route, kwargs, route_hooks = end_point
environ['ombott.route'] = route
environ['route.url_args'] = kwargs
environ['route.hooks'] = route_hooks
return self.handler(self, route, kwargs, route_hooks, error404_405)
finally:
self.emit('after_request')
except HTTPResponse as resp:
return resp
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as err500:
# raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", err500, stacktrace)
def _cast(self, out):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
response = self.response
resp_headers = response.headers
request = self.request
loops_cnt = 0
while True: # <-------
loops_cnt += 1
if loops_cnt > 1000:
out = HTTPError(500, 'too many iterations')
out.apply(response)
out = self.default_error_handler(out)
# Empty output is done here
if not out:
resp_headers.setdefault('Content-Length', 0)
return []
if isinstance(out, str):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
resp_headers.setdefault('Content-Length', len(out))
return [out]
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handlers.get(
out.status_code,
self.default_error_handler
)(out); continue # -----------------^
if isinstance(out, HTTPResponse):
out.apply(response)
out = out.body; continue # -----------------^
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
first = next(iout)
while not first:
first = next(iout)
except StopIteration:
out = ''; continue # -----------------^
except HTTPResponse as rs:
first = rs
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as err500:
# if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', err500, format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
out = first; continue # -----------------^
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, str):
new_iter = (it.encode(response.charset) for it in itertools.chain([first], iout))
else:
out = HTTPError(500, f'Unsupported response type: {type(first)}')
continue # -----------------^
close = getattr(out, 'close', None)
if close:
new_iter = _closeiter(new_iter, close)
return new_iter
def wsgi(self, environ, start_response):
config: DefaultConfig = self.config
response = self.response
domain_map = config.domain_map
if domain_map:
app_name = domain_map(environ.get('HTTP_X_FORWARDED_HOST') or environ.get('HTTP_HOST'))
if app_name:
environ[config.app_name_header] = '/' + app_name
environ["PATH_INFO"] = '/' + app_name + environ["PATH_INFO"]
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if (
response._status_code in {100, 101, 204, 304}
or environ['REQUEST_METHOD'] == 'HEAD'
):
close = getattr(out, 'close', None)
if close:
close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as _e:
if not self.config.catchall:
raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if self.config.debug:
err += (
'<h2>Error:</h2>\n<pre>\n%s\n</pre>\n'
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n'
% (html_escape(repr(_e)), html_escape(format_exc()))
)
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
return self.wsgi(environ, start_response)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(location, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version.
"""
response = Globals.response
request = Globals.request
url = location
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.headers['Location'] = urljoin(request.url, url)
raise res
class Globals:
app = Ombott()
route = app.route
on_route = app.on_route
request = app.request
response = app.response
error = app.error
def default_app():
return Globals.app
| 36.409692
| 119
| 0.557532
|
c462d45285faa5d03dcdb3156b2cc174e5abe722
| 8,477
|
py
|
Python
|
model-optimizer/extensions/middle/ONNXResize11ToInterpolate.py
|
monroid/openvino
|
8272b3857ef5be0aaa8abbf7bd0d5d5615dc40b6
|
[
"Apache-2.0"
] | 2,406
|
2020-04-22T15:47:54.000Z
|
2022-03-31T10:27:37.000Z
|
model-optimizer/extensions/middle/ONNXResize11ToInterpolate.py
|
thomas-yanxin/openvino
|
031e998a15ec738c64cc2379d7f30fb73087c272
|
[
"Apache-2.0"
] | 4,948
|
2020-04-22T15:12:39.000Z
|
2022-03-31T18:45:42.000Z
|
model-optimizer/extensions/middle/ONNXResize11ToInterpolate.py
|
thomas-yanxin/openvino
|
031e998a15ec738c64cc2379d7f30fb73087c272
|
[
"Apache-2.0"
] | 991
|
2020-04-23T18:21:09.000Z
|
2022-03-31T18:40:57.000Z
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import logging as log
import numpy as np
from extensions.ops.Cast import Cast
from extensions.ops.activation_ops import Floor
from extensions.ops.elementwise import Add, Div, Mul
from extensions.ops.interpolate import Interpolate
from mo.front.common.layout import get_depth_dim, get_height_dim, get_width_dim
from mo.front.common.partial_infer.utils import int64_array, float_array
from mo.front.tf.graph_utils import create_op_with_const_inputs
from mo.graph.graph import Graph, Node, rename_nodes
from mo.middle.replacement import MiddleReplacementPattern
from mo.ops.const import Const
from mo.ops.shape import Shape
from mo.ops.strided_slice import StridedSlice
def convert_mode(onnx_mode: str) -> str:
return {'nearest': 'nearest', 'linear': 'linear_onnx', 'cubic': 'cubic'}[onnx_mode]
def replace_resize(graph: Graph, resize: Node):
log.debug("Converting of ONNX Resize-11 to Interpolate-4 "
"is triggered for node {}.".format(resize.soft_get('name', resize.id)))
input_shape = resize.in_port(0).data.get_shape()
input_rank = len(input_shape)
resize_name = resize.soft_get('name', resize.id)
if input_rank not in {4, 5}:
log.warning('The input shape is not 4D or 5D for op with name {}'.format(resize_name))
return
assert (resize.is_in_port_connected(0) and (resize.is_in_port_connected(2) or resize.is_in_port_connected(3))), \
"Scales or sizes inputs must be connected to Node {} with op {}.".format(resize.soft_get("name", resize.id),
resize.op)
assert resize.soft_get('coordinate_transformation_mode') != 'tf_crop_and_resize', \
'Mode tf_crop_and_resize is not supported for op {} with name {}'.format(resize.op,
resize.soft_get("name", resize.id))
layout = graph.graph['layout']
if input_rank == 4:
begin_dim = get_height_dim(layout, input_rank)
end_dim = get_width_dim(layout, input_rank) + 1
else:
begin_dim = get_depth_dim(layout, input_rank)
end_dim = get_width_dim(layout, input_rank) + 1
sizes_ss = create_op_with_const_inputs(graph, StridedSlice,
{1: int64_array([begin_dim]),
2: int64_array([end_dim]),
3: int64_array([1])},
{'name': resize_name + '/StridedSlice_sizes',
'begin_mask': int64_array([1]),
'end_mask': int64_array([1]),
'new_axis_mask': int64_array([0]),
'shrink_axis_mask': int64_array([0]),
'ellipsis_mask': int64_array([0])})
scales_ss = create_op_with_const_inputs(graph, StridedSlice,
{1: int64_array([begin_dim]),
2: int64_array([end_dim]),
3: int64_array([1])},
{'name': resize_name + '/StridedSlice_scales',
'begin_mask': int64_array([1]),
'end_mask': int64_array([1]),
'new_axis_mask': int64_array([0]),
'shrink_axis_mask': int64_array([0]),
'ellipsis_mask': int64_array([0])})
axes_node = Const(graph,
{'name': resize_name + '/axis',
'value': int64_array(np.arange(begin_dim, end_dim))}).create_node()
shape_calculation_mode = 'sizes' if resize.is_in_port_connected(3) else 'scales'
interpolate_node = Interpolate(graph, {'version': 'opset4',
'mode': convert_mode(resize.mode),
'coordinate_transformation_mode': resize.coordinate_transformation_mode,
'cube_coeff': resize.cube_coeff,
'nearest_mode': resize.nearest_mode,
'pads_begin': int64_array([0]),
'pads_end': int64_array([0]),
'antialias': 0,
'shape_calculation_mode': shape_calculation_mode,
'in_ports_count': 4}).create_node()
axes_node.out_port(0).connect(interpolate_node.in_port(3))
shape_of = Shape(graph, {'name': resize_name + '/ShapeOf'}).create_node()
add_node = create_op_with_const_inputs(graph, Add,
{1: float_array([1.0e-5])},
{'name': resize_name + '/Add'})
dst_dtype = np.float32 # even if data_type=FP16 use float32 for shape values
if not resize.is_in_port_connected(3):
cast_shape_to_float = Cast(graph, {'dst_type': dst_dtype}).create_node()
mul_node = Mul(graph, {'name': resize_name + '/Mul'}).create_node()
shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
cast_shape_to_float.out_port(0).connect(mul_node.in_port(0))
cast_add_result_to_int = Cast(graph, {'dst_type': np.int64}).create_node()
floor_node = Floor(graph, {'name': resize_name + '/Floor'}).create_node()
mul_node.out_port(0).connect(add_node.in_port(0))
add_node.out_port(0).connect(floor_node.in_port(0))
floor_node.out_port(0).connect(cast_add_result_to_int.in_port(0))
cast_add_result_to_int.out_port(0).connect(sizes_ss.in_port(0))
sizes_ss.out_port(0).connect(interpolate_node.in_port(1))
scales_ss.out_port(0).connect(interpolate_node.in_port(2))
connection_of_resize_input = resize.in_port(0).get_connection()
connection_of_resize_input.set_destination(interpolate_node.in_port(0))
connection_of_scales = resize.in_port(2).get_connection()
connection_of_scales.set_destination(scales_ss.in_port(0))
connection_of_resize_input.get_source().connect(shape_of.in_port(0))
connection_of_scales.get_source().connect(mul_node.in_port(1))
else:
cast_shape_to_float = Cast(graph, {'dst_type': dst_dtype}).create_node()
cast_sizes_to_float = Cast(graph, {'dst_type': dst_dtype}).create_node()
div_node = Div(graph, {'name': resize_name + '/Div'}).create_node()
cast_sizes_to_float.out_port(0).connect(div_node.in_port(0))
cast_shape_to_float.out_port(0).connect(div_node.in_port(1))
shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
div_node.out_port(0).connect(add_node.in_port(0))
add_node.out_port(0).connect(scales_ss.in_port(0))
scales_ss.out_port(0).connect(interpolate_node.in_port(2))
sizes_ss.out_port(0).connect(interpolate_node.in_port(1))
connection_of_resize_input = resize.in_port(0).get_connection()
connection_of_resize_input.set_destination(interpolate_node.in_port(0))
connection_of_sizes = resize.in_port(3).get_connection()
connection_of_sizes.set_destination(sizes_ss.in_port(0))
connection_of_resize_input.get_source().connect(shape_of.in_port(0))
connection_of_sizes.get_source().connect(cast_sizes_to_float.in_port(0))
rename_nodes([(resize, resize_name + '/delete'), (interpolate_node, resize_name)])
resize.out_port(0).get_connection().set_source(interpolate_node.out_port(0))
class ONNXResize11ToInterpolate(MiddleReplacementPattern):
"""
The transformation replaces ONNX Resize 11 with Interpolate-4.
"""
enabled = True
def run_before(self):
from extensions.middle.InterpolateSequenceToInterpolate import InterpolateSequenceToInterpolate
return [InterpolateSequenceToInterpolate]
def find_and_replace_pattern(self, graph: Graph):
resize11_ops = graph.get_op_nodes(op='ONNXResize11')
for resize in resize11_ops:
replace_resize(graph, resize)
| 52.652174
| 117
| 0.6021
|
57a2524188bb74800e748338b7d4dcbe330ccf87
| 1,513
|
py
|
Python
|
kinko/constant.py
|
vmagamedov/kinko
|
b65f8ae97bbf5d056781e90e22d2a369f440ed4c
|
[
"BSD-3-Clause"
] | 5
|
2015-11-18T20:31:14.000Z
|
2016-05-05T07:43:21.000Z
|
kinko/constant.py
|
vmagamedov/kinko
|
b65f8ae97bbf5d056781e90e22d2a369f440ed4c
|
[
"BSD-3-Clause"
] | 5
|
2016-03-17T14:42:30.000Z
|
2016-06-27T13:31:26.000Z
|
kinko/constant.py
|
vmagamedov/kinko
|
b65f8ae97bbf5d056781e90e22d2a369f440ed4c
|
[
"BSD-3-Clause"
] | null | null | null |
HTML_ELEMENTS = frozenset((
# root
'html',
# metadata
'head', 'title', 'base', 'link', 'meta', 'style',
# sections
'body', 'article', 'section', 'nav', 'aside', 'h1', 'h2', 'h3', 'h4', 'h5',
'h6', 'hgroup', 'headers', 'footer', 'address',
# grouping content
'p', 'hr', 'pre', 'blockquote', 'ol', 'ul', 'li', 'dl', 'dt', 'dd',
'figure', 'figcaption', 'main', 'div',
# text-level semantics
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'ruby', 'rt',
'rp', 'data', 'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b',
'u', 'mark', 'bdi', 'bdo', 'span', 'br', 'wbr',
# links
'a', 'area',
# edits
'ins', 'del',
# embedded content
'picture', 'source', 'img', 'iframe', 'embed', 'object', 'param', 'video',
'audio', 'track', 'map',
# tabular data
'table', 'caption', 'colgroup', 'col', 'tbody', 'thead', 'tfoot', 'tr',
'td', 'th',
# forms
'form', 'label', 'input', 'button', 'select', 'datalist', 'optgroup',
'option', 'textarea', 'keygen', 'output', 'progress', 'meter', 'fieldset',
'legend',
# interactive elements
'details', 'summary', 'menu', 'menuitem', 'dialog',
# scripting
'script', 'noscript', 'template', 'canvas',
))
SELF_CLOSING_ELEMENTS = frozenset((
'base',
'link',
'meta',
'hr',
'wbr',
'img',
'embed',
'param',
'source',
'track',
'area',
'col',
'input',
'keygen',
'menuitem',
))
| 23.640625
| 80
| 0.487112
|
bcde01ef484e836d9c77121db1f31099b0bd203e
| 870
|
py
|
Python
|
mestre/src/exceptions/exception_handlers/movie_exception_handlers.py
|
rodrigoddc/simple_mvc
|
faff65547ccaf0d2b36d7e0fc7c5297ff0dd35c0
|
[
"MIT"
] | null | null | null |
mestre/src/exceptions/exception_handlers/movie_exception_handlers.py
|
rodrigoddc/simple_mvc
|
faff65547ccaf0d2b36d7e0fc7c5297ff0dd35c0
|
[
"MIT"
] | 1
|
2021-12-10T00:41:55.000Z
|
2021-12-10T00:41:55.000Z
|
mestre/src/exceptions/exception_handlers/movie_exception_handlers.py
|
rodrigoddc/simple_mvc
|
faff65547ccaf0d2b36d7e0fc7c5297ff0dd35c0
|
[
"MIT"
] | null | null | null |
from fastapi import Request
from fastapi.responses import JSONResponse
from mestre.src.exceptions.movie_exceptions import MovieRatingException, MovieNotFoundException, \
MovieMissingDataFromTMDBAPIException
def movie_exception_handler(request: Request, exc: MovieRatingException):
return JSONResponse(
status_code=400,
content={"error": f"Oops, algo de errado não está certo! {exc} "}
)
def movie_not_found_exception_handler(request: Request, exc: MovieNotFoundException):
return JSONResponse(
status_code=404,
content={"error": f"Oops, algo de errado não está certo! {exc} "}
)
def movie_missing_data_exception_handler(request: Request, exc: MovieMissingDataFromTMDBAPIException):
return JSONResponse(
status_code=409,
content={"error": f"Oops, algo de errado não está certo! {exc} "}
)
| 33.461538
| 102
| 0.73908
|
47a6a0ae8c51888baaf8a47b77b788044bb96d04
| 10,636
|
py
|
Python
|
fabfile.py
|
smaragden/OpenRenderManagement
|
cf3ab356f96969d7952b60417b48e941955e435c
|
[
"BSD-3-Clause"
] | 35
|
2015-02-23T23:13:13.000Z
|
2021-01-03T05:56:39.000Z
|
fabfile.py
|
smaragden/OpenRenderManagement
|
cf3ab356f96969d7952b60417b48e941955e435c
|
[
"BSD-3-Clause"
] | 15
|
2015-01-12T12:58:29.000Z
|
2016-03-30T13:10:19.000Z
|
fabfile.py
|
mikrosimage/OpenRenderManagement
|
6f9237a86cb8e4b206313f9c22424c8002fd5e4d
|
[
"BSD-3-Clause"
] | 20
|
2015-03-18T06:57:13.000Z
|
2020-07-01T15:09:36.000Z
|
#!/usr/bin/python2.6
# -*- coding: utf8 -*-
from __future__ import with_statement
"""
"""
__author__ = "Jerome Samson"
__copyright__ = "Copyright 2013, Mikros Image"
from fabric.api import *
from fabric.colors import green, blue
env.timeout = 5
env.disable_known_hosts = True
env.source_path = ''
env.target_path = ''
env.shared_path = ''
env.common_path = ''
env.logdir = ''
env.confdir = ''
env.local = False
env.force = False
@task()
def deploy_server(source_path=env.source_path, target_path=env.target_path, force=env.force):
"""
Install dispatcher subsytem on server host
Used when installing dispatcher subsystem locally on the server host.
WARNING: it does not install config files (use deploy_server_conf in addition)
Installation layout is the following:
tartget_path/
octopus/
dispatcherd.py
jobcleaner.py
puliclient/
__init__.py
jobs.py
runner.py
server.py
pulitools/
"""
print ""
print(green("Deploy puli server", bold=True))
print(green(" - source path = %s" % source_path, bold=True))
print(green(" - target path = %s" % target_path, bold=True))
print(green(" - target host = %s" % env.hosts, bold=True))
print(green(" - steps:", bold=True))
print(green(" 1. install core apps", bold=True))
print(green(" 2. install API files", bold=True))
print(green(" 3. install scripts", bold=True))
if not force:
result = prompt(green("\nContinue ?", bold=True), default='y')
if result != 'y':
abort("Interrupted by user.")
run("sudo mkdir -p %s" % target_path)
run("sudo rsync -r %s/src/ %s" % (source_path, target_path))
run("sudo rsync -r %s/scripts/util/jobcleaner.py %s/scripts" % (source_path, target_path))
# print(blue("Install API", bold=True))
# # run("sudo mkdir -p %s/puliclient" % target_path)
# run("sudo rsync -r %s/src/puliclient/__init__.py %s/puliclient" % (source_path, target_path))
# run("sudo rsync -r %s/src/puliclient/jobs.py %s/puliclient" % (source_path, target_path))
# print(blue("Install startup scripts", bold=True))
# run("sudo mkdir -p %s/scripts" % target_path)
# run("sudo rsync -r %s/scripts/dispatcherd.py %s/scripts" % (source_path, target_path))
# run("sudo rsync -r %s/scripts/util/jobcleaner.py %s/scripts" % (source_path, target_path))
@task()
def deploy_server_conf(source_path=env.source_path, target_path=env.target_path):
"""
Install config files on server host
Layout:
tartget_path/
conf/
config.ini
licences.lst
"""
print ""
print(green("Deploy config files on host(s): %s" % env.hosts, bold=True))
print(green(" - source path = %s" % source_path, bold=True))
print(green(" - target path = %s" % target_path, bold=True))
print(green(" - target host = %s" % env.hosts, bold=True))
print(green(" - Copy following file:", bold=True))
print(green(" - config.ini", bold=True))
print(green(" - licences.lst", bold=True))
result = prompt(green("\nContinue ?", bold=True), default='y')
if result != 'y':
abort("Interrupted by user.")
print(blue("Install config", bold=True))
run("sudo mkdir -p %s/conf" % target_path)
run("sudo rsync -r %s/etc/puli/config.ini %s/conf" % (source_path, target_path))
run("sudo rsync -r %s/etc/puli/licences.lst %s/conf" % (source_path, target_path))
@task()
def deploy_on_shared_storage(source_path=env.source_path, shared_path=env.shared_path, force=env.force):
"""
Install full distribution on a shared storage (i.e. dispatcher, worker, API and tools)
"""
print ""
print(green("Deploy sources, API and tools on network path", bold=True))
print(green(" - source path = %s" % source_path, bold=True))
print(green(" - shared path = %s" % shared_path, bold=True))
print(green(" - steps:", bold=True))
print(green(" 1. install core apps", bold=True))
print(green(" 2. install API files", bold=True))
print(green(" 3. install scripts", bold=True))
if not force:
result = prompt(green("\nContinue ?", bold=True), default='y')
if result != 'y':
abort("Interrupted by user.")
local("mkdir -p %s" % shared_path)
local("rsync -r %s/src/ %s" % (source_path, shared_path))
# print(blue("Install API", bold=True))
# local("mkdir -p %s/puliclient" % shared_path)
# local("rsync -r %s/src/puliclient/__init__.py %s/puliclient" % (source_path, shared_path))
# local("rsync -r %s/src/puliclient/jobs.py %s/puliclient" % (source_path, shared_path))
# print(blue("Install scripts", bold=True))
# local("mkdir -p %s/scripts" % shared_path)
# local("rsync -r %s/scripts/dispatcherd.py %s/scripts" % (source_path, shared_path))
# local("rsync -r %s/scripts/workerd.py %s/scripts" % (source_path, shared_path))
# local("rsync -r %s/scripts/util/jobcleaner.py %s/scripts" % (source_path, shared_path))
# @task()
# def deploy_tools_on_shared_storage(source_path=env.source_path, shared_path=env.shared_path):
# """
# Install tools sources on a shared storage
# """
# print ""
# print(green("Deploy puli tools on network path", bold=True))
# print(green(" - source path = %s" % source_path, bold=True))
# print(green(" - shared path = %s" % shared_path, bold=True))
# result = prompt(green("\nContinue ?", bold=True), default='y')
# if result != 'y':
# abort("Interrupted by user.")
# local("mkdir -p %s" % shared_path)
# print(blue("Install core apps", bold=True))
# local("rsync -r %s/src/pulitools %s/src" % (source_path, shared_path))
@task()
def create_launcher(shared_path=env.shared_path, common_path=env.common_path):
"""
Create launcher scripts for core tools: pul_query, pul_rn...
"""
print ""
print(green("Create launchers on a shared folder (must be listed in user's PATH)", bold=True))
print(green(" - shared path = %s" % shared_path, bold=True))
print(green(" - common path = %s" % common_path, bold=True))
result = prompt(green("\nContinue ?", bold=True), default='y')
if result != 'y':
abort("Interrupted by user.")
local("mkdir -p %s" % common_path)
template = '''#!/bin/bash
export PYTHONPATH=__PULI_INSTALL_PATH__:${PYTHONPATH}
export PATH=__PULI_INSTALL_PATH__/__TOOL__:${PATH}
__EXEC__ "$@"
'''
with hide('running', 'stdout'):
# Replace install path, the folder in which the tools sources are installed.
template = template.replace('__PULI_INSTALL_PATH__', shared_path)
print(blue("Create pul_query", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliquery")
currContent = currContent.replace('__EXEC__', "pul_query")
local("echo '%s' > %s/pul_query" % (currContent, common_path))
local("chmod +x %s/pul_query" % common_path)
print(blue("Create pul_rn", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliquery")
currContent = currContent.replace('__EXEC__', "pul_rn")
local("echo '%s' > %s/pul_rn" % (currContent, common_path))
local("chmod +x %s/pul_rn" % common_path)
print(blue("Create pul_pause", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliquery")
currContent = currContent.replace('__EXEC__', "pul_set_pause --set 1")
local("echo '%s' > %s/pul_pause" % (currContent, common_path))
local("chmod +x %s/pul_pause" % common_path)
print(blue("Create pul_cancel", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliquery")
currContent = currContent.replace('__EXEC__', "pul_set_status --set 5")
local("echo '%s' > %s/pul_cancel" % (currContent, common_path))
local("chmod +x %s/pul_cancel" % common_path)
print(blue("Create pul_restart", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliquery")
currContent = currContent.replace('__EXEC__', "pul_set_status --set 1")
local("echo '%s' > %s/pul_restart" % (currContent, common_path))
local("chmod +x %s/pul_restart" % common_path)
print(blue("Create pul_resume", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliquery")
currContent = currContent.replace('__EXEC__', "pul_set_pause --set 0")
local("echo '%s' > %s/pul_resume" % (currContent, common_path))
local("chmod +x %s/pul_resume" % common_path)
print(blue("Create pul_stats", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/statsviewer")
currContent = currContent.replace('__EXEC__', "stats.py")
local("echo '%s' > %s/pul_stats" % (currContent, common_path))
local("chmod +x %s/pul_stats" % common_path)
print(blue("Create puliexec", bold=True))
currContent = template
currContent = currContent.replace('__TOOL__', "pulitools/puliexec")
currContent = currContent.replace('__EXEC__', "puliexec.py")
local("echo '%s' > %s/puliexec" % (currContent, common_path))
local("chmod +x %s/puliexec" % common_path)
@task()
def update_settings(target_path=env.target_path, log_dir=env.logdir, conf_dir=env.confdir, force=env.force, exec_local=env.local):
"""
Insert path to LOGDIR and CONFDIR in dispatcher settings file.
"""
print ""
print(green("Update settings on server:", bold=True))
print(green(" - target_path = %s" % target_path, bold=True))
print(green(" - log_dir = %s" % log_dir, bold=True))
print(green(" - conf_dir = %s" % conf_dir, bold=True))
if not force:
result = prompt(green("\nContinue ?", bold=True), default='y')
if result != 'y':
abort("Interrupted by user.")
settings_file = "%s/octopus/dispatcher/settings.py" % target_path
if exec_local:
local("sed -i \"s:__LOGDIR_PLACEHOLDER__:%s:g\" %s" % (log_dir, settings_file))
local("sed -i 's:__CONFDIR_PLACEHOLDER__:%s:g' %s" % (conf_dir, settings_file))
else:
run("sudo sed -i 's:__LOGDIR_PLACEHOLDER__:%s:g' %s" % (log_dir, settings_file))
run("sudo sed -i 's:__CONFDIR_PLACEHOLDER__:%s:g' %s" % (conf_dir, settings_file))
| 39.835206
| 130
| 0.641783
|
39d2f1c50b4ef80a1198a8acc796ef8b3f6eb892
| 18,314
|
py
|
Python
|
tensorflow_federated/python/core/impl/caching_executor_test.py
|
federated-learning-experiments/federated
|
ba258f53ff28574375cc5e5a8a80da1a2cd57290
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_federated/python/core/impl/caching_executor_test.py
|
federated-learning-experiments/federated
|
ba258f53ff28574375cc5e5a8a80da1a2cd57290
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_federated/python/core/impl/caching_executor_test.py
|
federated-learning-experiments/federated
|
ba258f53ff28574375cc5e5a8a80da1a2cd57290
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import collections
from unittest import mock
from absl.testing import absltest
import numpy as np
import tensorflow as tf
from tensorflow_federated.python.common_libs import anonymous_tuple
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.impl import caching_executor
from tensorflow_federated.python.core.impl import computation_impl
from tensorflow_federated.python.core.impl import eager_executor
from tensorflow_federated.python.core.impl import executor_base
from tensorflow_federated.python.core.impl import executor_test_utils
from tensorflow_federated.python.core.impl import lambda_executor
def _make_executor_and_tracer_for_test(support_lambdas=False):
tracer = executor_test_utils.TracingExecutor(eager_executor.EagerExecutor())
ex = caching_executor.CachingExecutor(tracer)
if support_lambdas:
ex = lambda_executor.LambdaExecutor(caching_executor.CachingExecutor(ex))
return ex, tracer
def _tensor_to_id(iterable):
# Tensor is not hashable in TF 2.0 so we hash it using id().
return [
item if not isinstance(item, tf.Tensor) else id(item) for item in iterable
]
class TestError(Exception):
"""An error for unittests."""
async def raise_error(*args, **kwargs):
"""A function for mock executors that always raises an error."""
del args # unused
del kwargs # unused
await asyncio.sleep(1)
raise TestError()
# An arbitrary value for testing.
TEST_VALUE = True
async def create_test_value(*args, **kwargs):
"""A function for mock executors that returns an arbitrary value."""
del args # unused
del kwargs # unused
await asyncio.sleep(1)
return TEST_VALUE
@computations.tf_computation
def foo():
return tf.constant(10)
class CachingExecutorTest(absltest.TestCase):
def test_create_value_does_not_cache_error(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_value(1.0, tf.float32))
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_value(1.0, tf.float32))
# Ensure create_value was called twice on the mock (not cached and only
# called once).
mock_executor.create_value.assert_has_calls([
mock.call(1.0, computation_types.TensorType(tf.float32)),
mock.call(1.0, computation_types.TensorType(tf.float32))
])
def test_create_value_does_not_cache_error_avoids_double_cache_delete(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
future1 = cached_executor.create_value(1.0, tf.float32)
future2 = cached_executor.create_value(1.0, tf.float32)
results = loop.run_until_complete(
asyncio.gather(future1, future2, return_exceptions=True))
# Ensure create_call is only called once, since the first call inserts the
# inner executor future into the cache. However we expect two errors to be
# returned.
mock_executor.create_value.assert_called_once_with(
1.0, computation_types.TensorType(tf.float32))
self.assertLen(results, 2)
self.assertIsInstance(results[0], TestError)
self.assertIsInstance(results[1], TestError)
def test_create_call_does_not_cache_error(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = create_test_value
mock_executor.create_call.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
v = loop.run_until_complete(cached_executor.create_value(foo))
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_call(v))
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_call(v))
# Ensure create_call was called twice on the mock (not cached and only
# called once).
mock_executor.create_call.assert_has_calls(
[mock.call(TEST_VALUE), mock.call(TEST_VALUE)])
def test_create_call_does_not_cache_error_avoids_double_cache_delete(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = create_test_value
mock_executor.create_call.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
v = loop.run_until_complete(cached_executor.create_value(foo))
future_call1 = cached_executor.create_call(v)
future_call2 = cached_executor.create_call(v)
results = loop.run_until_complete(
asyncio.gather(future_call1, future_call2, return_exceptions=True))
# Ensure create_call is only called once, since the first call inserts the
# inner executor future into the cache. However we expect two errors to be
# returned.
mock_executor.create_call.assert_called_once_with(TEST_VALUE)
self.assertLen(results, 2)
self.assertIsInstance(results[0], TestError)
self.assertIsInstance(results[1], TestError)
def test_create_tuple_does_not_cache_error(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = create_test_value
mock_executor.create_tuple.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
value = loop.run_until_complete(cached_executor.create_value(foo))
value_tuple = (value, value)
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_tuple(value_tuple))
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_tuple(value_tuple))
# Ensure create_tuple was called twice on the mock (not cached and only
# called once).
anon_tuple_value = anonymous_tuple.AnonymousTuple([(None, TEST_VALUE),
(None, TEST_VALUE)])
mock_executor.create_tuple.assert_has_calls(
[mock.call(anon_tuple_value),
mock.call(anon_tuple_value)])
def test_create_tuple_does_not_cache_error_avoids_double_delete(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = create_test_value
mock_executor.create_tuple.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
value = loop.run_until_complete(cached_executor.create_value(foo))
value_tuple = (value, value)
future1 = cached_executor.create_tuple(value_tuple)
future2 = cached_executor.create_tuple(value_tuple)
results = loop.run_until_complete(
asyncio.gather(future1, future2, return_exceptions=True))
# Ensure create_call is only called once, since the first call inserts the
# inner executor future into the cache. However we expect two errors to be
# returned.
mock_executor.create_tuple.assert_called_once_with(
anonymous_tuple.AnonymousTuple([(None, TEST_VALUE),
(None, TEST_VALUE)]))
self.assertLen(results, 2)
self.assertIsInstance(results[0], TestError)
self.assertIsInstance(results[1], TestError)
def test_create_selection_does_not_cache_error(self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = create_test_value
mock_executor.create_selection.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
value = loop.run_until_complete(
cached_executor.create_value((1, 2),
computation_types.NamedTupleType(
(tf.int32, tf.int32))))
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_selection(value, 1))
with self.assertRaises(TestError):
_ = loop.run_until_complete(cached_executor.create_selection(value, 1))
# Ensure create_tuple was called twice on the mock (not cached and only
# called once).
mock_executor.create_selection.assert_has_calls([])
def test_create_selection_does_not_cache_error_avoids_double_cache_delete(
self):
loop = asyncio.get_event_loop()
mock_executor = mock.create_autospec(executor_base.Executor)
mock_executor.create_value.side_effect = create_test_value
mock_executor.create_selection.side_effect = raise_error
cached_executor = caching_executor.CachingExecutor(mock_executor)
value = loop.run_until_complete(
cached_executor.create_value((1, 2),
computation_types.NamedTupleType(
(tf.int32, tf.int32))))
future1 = cached_executor.create_selection(value, 1)
future2 = cached_executor.create_selection(value, 1)
results = loop.run_until_complete(
asyncio.gather(future1, future2, return_exceptions=True))
# Ensure create_tuple was called twice on the mock (not cached and only
# called once).
mock_executor.create_selection.assert_has_calls([])
self.assertLen(results, 2)
self.assertIsInstance(results[0], TestError)
self.assertIsInstance(results[1], TestError)
def test_close_clears_cache(self):
ex, _ = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(ex.create_value(10, tf.int32))
v2 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertIs(v2, v1)
ex.close()
v3 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertIsNot(v3, v1)
def test_with_integer_constant(self):
ex, tracer = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertIsInstance(v1, caching_executor.CachedValue)
self.assertEqual(str(v1.identifier), '1')
c1 = loop.run_until_complete(v1.compute())
self.assertEqual(c1.numpy(), 10)
v2 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertIsInstance(v2, caching_executor.CachedValue)
self.assertEqual(str(v2.identifier), '1')
self.assertIs(v2, v1)
expected_trace = [('create_value', 10,
computation_types.TensorType(tf.int32), 1),
('compute', 1, c1)]
self.assertLen(tracer.trace, len(expected_trace))
for x, y in zip(tracer.trace, expected_trace):
self.assertCountEqual(_tensor_to_id(x), _tensor_to_id(y))
def test_with_no_arg_tf_computation(self):
ex, tracer = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(ex.create_value(foo))
self.assertIsInstance(v1, caching_executor.CachedValue)
self.assertEqual(str(v1.identifier), '1')
v2 = loop.run_until_complete(ex.create_call(v1))
self.assertIsInstance(v2, caching_executor.CachedValue)
self.assertEqual(str(v2.identifier), '1()')
c2 = loop.run_until_complete(v2.compute())
self.assertEqual(c2.numpy(), 10)
v3 = loop.run_until_complete(ex.create_value(foo))
self.assertIsInstance(v3, caching_executor.CachedValue)
self.assertEqual(str(v3.identifier), '1')
self.assertIs(v3, v1)
v4 = loop.run_until_complete(ex.create_call(v3))
self.assertIsInstance(v4, caching_executor.CachedValue)
self.assertEqual(str(v4.identifier), '1()')
self.assertIs(v4, v2)
c4 = loop.run_until_complete(v4.compute())
self.assertEqual(c4.numpy(), 10)
expected_trace = [('create_value',
computation_impl.ComputationImpl.get_proto(foo),
foo.type_signature, 1), ('create_call', 1, 2),
('compute', 2, c4)]
self.assertLen(tracer.trace, len(expected_trace))
for x, y in zip(tracer.trace, expected_trace):
self.assertCountEqual(_tensor_to_id(x), _tensor_to_id(y))
def test_with_one_arg_tf_computation(self):
ex, tracer = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
@computations.tf_computation(tf.int32)
def add_one(x):
return tf.add(x, 1)
v1 = loop.run_until_complete(ex.create_value(add_one))
self.assertEqual(str(v1.identifier), '1')
v2 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertEqual(str(v2.identifier), '2')
v3 = loop.run_until_complete(ex.create_call(v1, v2))
self.assertEqual(str(v3.identifier), '1(2)')
v4 = loop.run_until_complete(ex.create_value(add_one))
self.assertIs(v4, v1)
v5 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertIs(v5, v2)
v6 = loop.run_until_complete(ex.create_call(v4, v5))
self.assertIs(v6, v3)
c6 = loop.run_until_complete(v6.compute())
self.assertEqual(c6.numpy(), 11)
expected_trace = [
('create_value', computation_impl.ComputationImpl.get_proto(add_one),
add_one.type_signature, 1),
('create_value', 10, computation_types.TensorType(tf.int32), 2),
('create_call', 1, 2, 3), ('compute', 3, c6)
]
self.assertLen(tracer.trace, len(expected_trace))
for x, y in zip(tracer.trace, expected_trace):
self.assertCountEqual(_tensor_to_id(x), _tensor_to_id(y))
def test_with_tuple_of_unnamed_elements(self):
ex, _ = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertEqual(str(v1.identifier), '1')
v2 = loop.run_until_complete(ex.create_value(11, tf.int32))
self.assertEqual(str(v2.identifier), '2')
v3 = loop.run_until_complete(ex.create_tuple([v1, v2]))
self.assertEqual(str(v3.identifier), '<1,2>')
v4 = loop.run_until_complete(ex.create_tuple((v1, v2)))
self.assertIs(v4, v3)
c4 = loop.run_until_complete(v4.compute())
self.assertEqual(
str(anonymous_tuple.map_structure(lambda x: x.numpy(), c4)), '<10,11>')
def test_with_tuple_of_named_elements(self):
ex, _ = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(ex.create_value(10, tf.int32))
self.assertEqual(str(v1.identifier), '1')
v2 = loop.run_until_complete(ex.create_value(11, tf.int32))
self.assertEqual(str(v2.identifier), '2')
v3 = loop.run_until_complete(
ex.create_tuple(collections.OrderedDict([('P', v1), ('Q', v2)])))
self.assertEqual(str(v3.identifier), '<P=1,Q=2>')
v4 = loop.run_until_complete(
ex.create_tuple(collections.OrderedDict([('P', v1), ('Q', v2)])))
self.assertIs(v4, v3)
c4 = loop.run_until_complete(v4.compute())
self.assertEqual(
str(anonymous_tuple.map_structure(lambda x: x.numpy(), c4)),
'<P=10,Q=11>')
def test_with_selection_by_index(self):
ex, _ = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(
ex.create_value([10, 20],
computation_types.NamedTupleType([tf.int32, tf.int32])))
self.assertEqual(str(v1.identifier), '1')
v2 = loop.run_until_complete(ex.create_selection(v1, index=0))
self.assertEqual(str(v2.identifier), '1[0]')
v3 = loop.run_until_complete(ex.create_selection(v1, index=1))
self.assertEqual(str(v3.identifier), '1[1]')
v4 = loop.run_until_complete(ex.create_selection(v1, index=0))
self.assertIs(v4, v2)
v5 = loop.run_until_complete(ex.create_selection(v1, index=1))
self.assertIs(v5, v3)
c5 = loop.run_until_complete(v5.compute())
self.assertEqual(c5.numpy(), 20)
def test_with_numpy_array(self):
ex, _ = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
v1 = loop.run_until_complete(
ex.create_value(np.array([10]), (tf.int32, [1])))
self.assertEqual(str(v1.identifier), '1')
c1 = loop.run_until_complete(v1.compute())
self.assertEqual(c1.numpy(), 10)
v2 = loop.run_until_complete(
ex.create_value(np.array([10]), (tf.int32, [1])))
self.assertIs(v2, v1)
def test_with_eager_dataset(self):
ex, _ = _make_executor_and_tracer_for_test()
loop = asyncio.get_event_loop()
@computations.tf_computation(computation_types.SequenceType(tf.int32))
def ds_reduce(ds):
return ds.reduce(np.int32(0), lambda x, y: x + y)
v1 = loop.run_until_complete(ex.create_value(ds_reduce))
self.assertEqual(str(v1.identifier), '1')
ds = tf.data.Dataset.from_tensor_slices([10, 20, 30])
v2 = loop.run_until_complete(
ex.create_value(ds, computation_types.SequenceType(tf.int32)))
self.assertEqual(str(v2.identifier), '2')
v3 = loop.run_until_complete(ex.create_call(v1, v2))
self.assertEqual(str(v3.identifier), '1(2)')
c3 = loop.run_until_complete(v3.compute())
self.assertEqual(c3.numpy(), 60)
v4 = loop.run_until_complete(
ex.create_value(ds, computation_types.SequenceType(tf.int32)))
self.assertIs(v4, v2)
def test_runs_tf(self):
ex, _ = _make_executor_and_tracer_for_test(support_lambdas=True)
executor_test_utils.test_runs_tf(self, ex)
if __name__ == '__main__':
tf.compat.v1.enable_v2_behavior()
absltest.main()
| 43.708831
| 80
| 0.729551
|
60aeb4e989a3e4a9933f2e2a35cf0b91025c2085
| 12,893
|
py
|
Python
|
packages/syft/src/syft/core/node/common/action/run_class_method_action.py
|
maltetoelle/PySyft
|
0521f407cfdd046d00c332b733894a865848bd19
|
[
"Apache-1.1"
] | null | null | null |
packages/syft/src/syft/core/node/common/action/run_class_method_action.py
|
maltetoelle/PySyft
|
0521f407cfdd046d00c332b733894a865848bd19
|
[
"Apache-1.1"
] | 3
|
2021-11-17T15:34:03.000Z
|
2021-12-08T14:39:10.000Z
|
packages/syft/src/syft/core/node/common/action/run_class_method_action.py
|
maltetoelle/PySyft
|
0521f407cfdd046d00c332b733894a865848bd19
|
[
"Apache-1.1"
] | null | null | null |
# stdlib
import functools
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
# third party
from google.protobuf.reflection import GeneratedProtocolMessageType
from nacl.signing import VerifyKey
# syft absolute
import syft as sy
# relative
from ..... import lib
from .....logger import critical
from .....logger import traceback_and_raise
from .....logger import warning
from .....proto.core.node.common.action.run_class_method_pb2 import (
RunClassMethodAction as RunClassMethodAction_PB,
)
from .....util import inherit_tags
from ....common.serde.serializable import serializable
from ....common.uid import UID
from ....io.address import Address
from ....store.storeable_object import StorableObject
from ...abstract.node import AbstractNode
from .common import ImmediateActionWithoutReply
@serializable()
class RunClassMethodAction(ImmediateActionWithoutReply):
"""
When executing a RunClassMethodAction, a :class:`Node` will run a method defined
by the action's path attribute on the object pointed at by _self and keep the returned
value in its store.
Attributes:
path: the dotted path to the method to call
_self: a pointer to the object which the method should be applied to.
args: args to pass to the function. They should be pointers to objects
located on the :class:`Node` that will execute the action.
kwargs: kwargs to pass to the function. They should be pointers to objects
located on the :class:`Node` that will execute the action.
"""
def __init__(
self,
path: str,
_self: Any,
args: List[Any],
kwargs: Dict[Any, Any],
id_at_location: UID,
address: Address,
msg_id: Optional[UID] = None,
is_static: Optional[bool] = False,
):
self.path = path
self._self = _self
self.args = args
self.kwargs = kwargs
self.id_at_location = id_at_location
self.is_static = is_static
# logging needs .path to exist before calling
# this which is why i've put this super().__init__ down here
super().__init__(address=address, msg_id=msg_id)
@staticmethod
def intersect_keys(
left: Dict[VerifyKey, UID], right: Dict[VerifyKey, UID]
) -> Dict[VerifyKey, UID]:
# get the intersection of the dict keys, the value is the request_id
# if the request_id is different for some reason we still want to keep it,
# so only intersect the keys and then copy those over from the main dict
# into a new one
intersection = set(left.keys()).intersection(right.keys())
# left and right have the same keys
return {k: left[k] for k in intersection}
@property
def pprint(self) -> str:
return f"RunClassMethodAction({self.path})"
def __repr__(self) -> str:
method_name = self.path.split(".")[-1]
self_name = str(self._self.__class__.__name__)
arg_names = ",".join([a.__class__.__name__ for a in self.args])
kwargs_names = ",".join(
[f"{k}={v.__class__.__name__}" for k, v in self.kwargs.items()]
)
return f"RunClassMethodAction {self_name}.{method_name}({arg_names}, {kwargs_names})"
def execute_action(self, node: AbstractNode, verify_key: VerifyKey) -> None:
method = node.lib_ast(self.path)
mutating_internal = False
if (
self.path.startswith("torch.Tensor")
and self.path.endswith("_")
and not self.path.endswith("__call__")
):
mutating_internal = True
elif not self.path.startswith("torch.Tensor") and self.path.endswith(
"__call__"
):
mutating_internal = True
resolved_self = None
if not self.is_static:
resolved_self = node.store.get_object(key=self._self.id_at_location)
if resolved_self is None:
critical(
f"execute_action on {self.path} failed due to missing object"
+ f" at: {self._self.id_at_location}"
)
return
result_read_permissions = resolved_self.read_permissions
else:
result_read_permissions = {}
resolved_args = list()
tag_args = []
for arg in self.args:
r_arg = node.store[arg.id_at_location]
result_read_permissions = self.intersect_keys(
result_read_permissions, r_arg.read_permissions
)
resolved_args.append(r_arg.data)
tag_args.append(r_arg)
resolved_kwargs = {}
tag_kwargs = {}
for arg_name, arg in self.kwargs.items():
r_arg = node.store[arg.id_at_location]
result_read_permissions = self.intersect_keys(
result_read_permissions, r_arg.read_permissions
)
resolved_kwargs[arg_name] = r_arg.data
tag_kwargs[arg_name] = r_arg
(
upcasted_args,
upcasted_kwargs,
) = lib.python.util.upcast_args_and_kwargs(resolved_args, resolved_kwargs)
if self.is_static:
result = method(*upcasted_args, **upcasted_kwargs)
else:
if resolved_self is None:
traceback_and_raise(
ValueError(f"Method {method} called, but self is None.")
)
method_name = self.path.split(".")[-1]
# relative
from ....plan.plan import Plan
if (
isinstance(resolved_self.data, Plan)
and method_name == "__call__"
or (
hasattr(resolved_self.data, "forward")
and (
resolved_self.data.forward.__class__.__name__ == "Plan"
or getattr(resolved_self.data.forward, "__name__", None)
== "_compile_and_forward"
)
and method_name in ["__call__", "forward"]
)
):
if len(self.args) > 0:
traceback_and_raise(
ValueError(
"You passed args to Plan.__call__, while it only accepts kwargs"
)
)
if method.__name__ == "_forward_unimplemented":
method = resolved_self.data.forward
result = method(node, verify_key, **self.kwargs)
else:
result = method(resolved_self.data, node, verify_key, **self.kwargs)
else:
target_method = getattr(resolved_self.data, method_name, None)
if id(target_method) != id(method):
warning(
f"Method {method_name} overwritten on object {resolved_self.data}"
)
method = target_method
else:
method = functools.partial(method, resolved_self.data)
result = method(*upcasted_args, **upcasted_kwargs)
# TODO: add numpy support https://github.com/OpenMined/PySyft/issues/5164
if "numpy." in str(type(result)):
if "float" in type(result).__name__:
result = float(result)
if "int" in type(result).__name__:
result = int(result)
if "bool" in type(result).__name__:
result = bool(result)
if lib.python.primitive_factory.isprimitive(value=result):
# Wrap in a SyPrimitive
result = lib.python.primitive_factory.PrimitiveFactory.generate_primitive(
value=result, id=self.id_at_location
)
else:
# TODO: overload all methods to incorporate this automatically
if hasattr(result, "id"):
try:
if hasattr(result, "_id"):
# set the underlying id
result._id = self.id_at_location
else:
result.id = self.id_at_location
if result.id != self.id_at_location:
raise AttributeError("IDs don't match")
except AttributeError as e:
err = f"Unable to set id on result {type(result)}. {e}"
traceback_and_raise(Exception(err))
if mutating_internal:
if isinstance(resolved_self, StorableObject):
resolved_self.read_permissions = result_read_permissions
if not isinstance(result, StorableObject):
result = StorableObject(
id=self.id_at_location,
data=result,
read_permissions=result_read_permissions,
)
inherit_tags(
attr_path_and_name=self.path,
result=result,
self_obj=resolved_self,
args=tag_args,
kwargs=tag_kwargs,
)
node.store[self.id_at_location] = result
def _object2proto(self) -> RunClassMethodAction_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: RunClassMethodAction_PB
.. note::
This method is purely an internal method. Please use sy.serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return RunClassMethodAction_PB(
path=self.path,
_self=sy.serialize(self._self, to_bytes=True),
args=list(map(lambda x: sy.serialize(x, to_bytes=True), self.args)),
kwargs={k: sy.serialize(v, to_bytes=True) for k, v in self.kwargs.items()},
id_at_location=sy.serialize(self.id_at_location),
address=sy.serialize(self.address),
msg_id=sy.serialize(self.id),
)
@staticmethod
def _proto2object(proto: RunClassMethodAction_PB) -> "RunClassMethodAction":
"""Creates a ObjectWithID from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of RunClassMethodAction
:rtype: RunClassMethodAction
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return RunClassMethodAction(
path=proto.path,
_self=sy.deserialize(blob=proto._self, from_bytes=True),
args=list(
map(lambda x: sy.deserialize(blob=x, from_bytes=True), proto.args)
),
kwargs={
k: sy.deserialize(blob=v, from_bytes=True)
for k, v in proto.kwargs.items()
},
id_at_location=sy.deserialize(blob=proto.id_at_location),
address=sy.deserialize(blob=proto.address),
msg_id=sy.deserialize(blob=proto.msg_id),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return RunClassMethodAction_PB
def remap_input(self, current_input: Any, new_input: Any) -> None:
"""Redefines some of the arguments, and possibly the _self of the function"""
if self._self.id_at_location == current_input.id_at_location:
self._self = new_input
for i, arg in enumerate(self.args):
if arg.id_at_location == current_input.id_at_location:
self.args[i] = new_input
for k, v in self.kwargs.items():
if v.id_at_location == current_input.id_at_location:
self.kwargs[k] = new_input
| 38.25816
| 95
| 0.59955
|
60a74af84063c3c2b2f9b43db4c1810897173c83
| 159
|
py
|
Python
|
scipy/solutions/simple_linalg_solution.py
|
ramosmaria/school2021
|
3e162a05767bbc8fa6ce3f92c858d1bd639fee16
|
[
"MIT"
] | 252
|
2021-05-18T11:58:17.000Z
|
2022-03-12T06:48:52.000Z
|
scipy/solutions/simple_linalg_solution.py
|
ramosmaria/school2021
|
3e162a05767bbc8fa6ce3f92c858d1bd639fee16
|
[
"MIT"
] | 44
|
2021-05-21T14:28:34.000Z
|
2021-07-12T22:36:06.000Z
|
scipy/solutions/simple_linalg_solution.py
|
ramosmaria/school2021
|
3e162a05767bbc8fa6ce3f92c858d1bd639fee16
|
[
"MIT"
] | 128
|
2021-05-24T18:32:54.000Z
|
2022-03-26T11:24:16.000Z
|
# flake8: noqa
# 1
A = np.random.uniform(0, 1, size=(10, 10))
#2
A_sym = A + A.T
plt.matshow(A_sym)
plt.grid(False)
#3
values, vectors = linalg.eigh(A_sym)
| 14.454545
| 42
| 0.641509
|
200acc2bfe3a21df253cb38ad668da717c168220
| 5,790
|
py
|
Python
|
docs/conf.py
|
Clariteia/api_gateway_common
|
e68095f31091699fc6cc4537bd6acf97a8dc6c3e
|
[
"MIT"
] | 3
|
2021-05-14T08:13:09.000Z
|
2021-05-26T11:25:35.000Z
|
docs/conf.py
|
Clariteia/api_gateway_common
|
e68095f31091699fc6cc4537bd6acf97a8dc6c3e
|
[
"MIT"
] | 27
|
2021-05-13T08:43:19.000Z
|
2021-08-24T17:19:36.000Z
|
docs/conf.py
|
Clariteia/api_gateway_common
|
e68095f31091699fc6cc4537bd6acf97a8dc6c3e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# minos documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 9 13:47:02 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
from minos.api_gateway import (
common, )
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
import sphinx_rtd_theme
extensions = [
"sphinxcontrib.apidoc",
'sphinx.ext.autodoc',
"sphinx_autodoc_typehints",
"sphinx.ext.viewcode",
"sphinx_rtd_theme",
"m2r2",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = "API Gateway Common"
copyright = "2021, Clariteia Devs"
author = "Clariteia Devs"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = common.__version__
# The full version, including alpha/beta/rc tags.
release = common.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {
# "codecov_button": True,
# "description": "Reactive microservices for an asynchronous world",
# "github_button": True,
# "github_user": "Clariteia",
# "github_repo": "api_gateway_common",
# "github_type": "star",
# }
html_sidebars = {"**": ["about.html", "navigation.html", "searchbox.html"]}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ---------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'minosdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"minos.tex",
"API Gateway Common Documentation",
"Clariteia Devs",
"manual",
),
]
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "minos", "API Gateway Common Documentation",
[author], 1)]
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"minos",
"API Gateway Common Documentation",
author,
"minos",
"One line description of project.",
"Miscellaneous",
),
]
# "apidoc" extension
apidoc_module_dir = "../minos"
apidoc_output_dir = "api"
apidoc_separate_modules = True
autodoc_default_options = {
"inherited-members": True,
"special-members": "__init__",
"undoc-members": True,
}
apidoc_toc_file = False
apidoc_module_first = True
apidoc_extra_args = [
"--force",
"--implicit-namespaces",
]
# "autodoc typehints" extension
set_type_checking_flag = True
typehints_fully_qualified = True
| 28.106796
| 77
| 0.675993
|
5ee4942594d31d798e6bdab038db216bb0c3edf3
| 2,167
|
py
|
Python
|
04_Properties_and_Class_methods/iso3436.py
|
MANOJPATRA1991/Python-Beyond-the-Basics
|
aed7bfd35e33c2b1759b48e1c89314aa149c56d0
|
[
"MIT"
] | null | null | null |
04_Properties_and_Class_methods/iso3436.py
|
MANOJPATRA1991/Python-Beyond-the-Basics
|
aed7bfd35e33c2b1759b48e1c89314aa149c56d0
|
[
"MIT"
] | null | null | null |
04_Properties_and_Class_methods/iso3436.py
|
MANOJPATRA1991/Python-Beyond-the-Basics
|
aed7bfd35e33c2b1759b48e1c89314aa149c56d0
|
[
"MIT"
] | null | null | null |
"""
ISO 6346 shipping container codes.
"""
def create(owner_code, serial, category='U'):
"""Create an ISO 6346 shipping container code.
Args:
owner_code (str): Three character alphabetic container code.
serial (str): Six digit numeric serial number.
category (str): Equipment category identifier.
Returns:
An ISO 6346 container code including a check digit.
Raises:
ValueError: If incorrect values are provided.
"""
if not (len(owner_code) == 3 and owner_code.isalpha()):
raise ValueError("Invalid ISO 6346 owner code '{}'".format(owner_code))
if category not in ('U', 'J', 'Z', 'R'):
raise ValueError("Invalid ISO 6346 category identifier '{}'".format(category))
if not (len(serial) == 6 and serial.isdigit()):
raise ValueError("Invalid ISO 6346 serial number")
raw_code = owner_code + category + serial
full_code = raw_code + str(check_digit(raw_code))
return full_code
def check_digit(raw_code):
"""Compute the check digit for an ISO 6346 code without that digit
Args:
raw_code (str): An ISO 6346 code lacking a check digit.
Returns:
An integer check digit between 0 and 9 inclusive.
"""
s = sum(code(char) * 2**index for index, char in enumerate(raw_code))
return s % 11 % 10
def code(char):
"""Determine the ISO 6346 numeric equivalent of a character.
Args:
char (str): A single character string.
Return:
An integer code equivalent to the supplied character.
"""
return int(char) if char.isdigit() else letter_code(char)
def letter_code(letter):
"""Determine the ISO 6346 numeric code for a letter.
Args:
letter (str): A single letter.
Returns:
An integer character code equivalent to the supplied letter.
"""
# Numeric encode the letters starting 'a' at 11
# ord('c') = 99
# ord('a') = 97
# ord('c') - ord('a') = 2
# 10 + 2 = 12 but we need 'c' to be at position 13
# 12 + 12 // 11 = 12 + 1 = 13 is the correction position for 'c'
value = ord(letter.lower()) - ord('a') + 10
return value + value // 11
| 32.343284
| 86
| 0.636364
|
9af81a3f5c40e7696c65089e18848cab933d71d0
| 36
|
py
|
Python
|
external/models/TransE_GUSE/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
external/models/TransE_GUSE/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
external/models/TransE_GUSE/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
from .TransE_GUSE import TransE_GUSE
| 36
| 36
| 0.888889
|
c9ab8f13984603f905227d9c9a11fa22364db3b6
| 1,576
|
py
|
Python
|
tools/filters/lav_to_bed.py
|
vimalkumarvelayudhan/galaxy
|
ea89dd8f149778b6c2f0f3f4a34c8b21f7033af7
|
[
"CC-BY-3.0"
] | null | null | null |
tools/filters/lav_to_bed.py
|
vimalkumarvelayudhan/galaxy
|
ea89dd8f149778b6c2f0f3f4a34c8b21f7033af7
|
[
"CC-BY-3.0"
] | null | null | null |
tools/filters/lav_to_bed.py
|
vimalkumarvelayudhan/galaxy
|
ea89dd8f149778b6c2f0f3f4a34c8b21f7033af7
|
[
"CC-BY-3.0"
] | null | null | null |
#!/usr/bin/env python
#Reads a LAV file and writes two BED files.
import sys
from galaxy import eggs
import pkg_resources
pkg_resources.require( "bx-python" )
import bx.align.lav
assert sys.version_info[:2] >= ( 2, 4 )
def stop_err( msg ):
sys.stderr.write( msg )
sys.exit()
def main():
try:
lav_file = open(sys.argv[1], 'r')
bed_file1 = open(sys.argv[2], 'w')
bed_file2 = open(sys.argv[3], 'w')
except Exception, e:
stop_err( str( e ) )
lavsRead = 0
bedsWritten = 0
species = {}
# TODO: this is really bad since everything is read into memory. Can we eliminate this tool?
for lavBlock in bx.align.lav.Reader( lav_file ):
lavsRead += 1
for c in lavBlock.components:
spec, chrom = bx.align.lav.src_split( c.src )
if bedsWritten < 1:
if len( species ) == 0:
species[spec] = bed_file1
elif len( species ) == 1:
species[spec] = bed_file2
else:
continue # this is a pairwise alignment...
if spec in species:
species[spec].write( "%s\t%i\t%i\t%s_%s\t%i\t%s\n" % ( chrom, c.start, c.end, spec, str( bedsWritten ), 0, c.strand ) )
bedsWritten += 1
for spec, file in species.items():
print "#FILE\t%s\t%s" % (file.name, spec)
lav_file.close()
bed_file1.close()
bed_file2.close()
print "%d lav blocks read, %d regions written\n" % (lavsRead, bedsWritten)
if __name__ == "__main__":
main()
| 28.142857
| 135
| 0.564721
|
9a9374f40b20213be4181c18826e1c6d4638f58f
| 2,950
|
py
|
Python
|
backend/users/models/user.py
|
hnthh/foodgram-project-react
|
3383c6a116fded11b4a764b95e6ca4ead03444f3
|
[
"MIT"
] | 1
|
2022-02-09T10:42:45.000Z
|
2022-02-09T10:42:45.000Z
|
backend/users/models/user.py
|
hnthh/foodgram
|
3383c6a116fded11b4a764b95e6ca4ead03444f3
|
[
"MIT"
] | null | null | null |
backend/users/models/user.py
|
hnthh/foodgram
|
3383c6a116fded11b4a764b95e6ca4ead03444f3
|
[
"MIT"
] | null | null | null |
from config.models import DefaultUserQuerySet, models
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.validators import (
UnicodeUsernameValidator as _UnicodeUsernameValidator,
)
from django.db.models import Count, Exists, OuterRef, Q, Value
class UnicodeUsernameValidator(_UnicodeUsernameValidator):
message = (
'Введите правильный логин. '
'Он может содержать только буквы, цифры и знаки @/./+/-/_.'
)
class UserQuerySet(DefaultUserQuerySet):
class Q: # noqa: PIE798
@staticmethod
def user_following(user):
return Q(following__user=user)
def for_detail(self, pk, user):
return self.for_viewset(user).get(id=pk)
def for_anon(self):
return self.annotate(
is_subscribed=Value(False),
recipes_count=Value(0),
)
def for_viewset(self, user):
from users.models import Subscribe
if not user.is_authenticated:
return self.for_anon()
return self.annotate(
is_subscribed=Exists(
Subscribe.objects.filter(user=user, author=OuterRef('pk')),
),
recipes_count=Count('recipes'),
)
def for_subscriptions(self, user):
qs = self.for_viewset(user)
if not user.is_authenticated:
return self.for_anon()
return qs.filter(self.Q.user_following(user))
class User(AbstractUser):
objects = UserQuerySet.as_manager()
username_validator = UnicodeUsernameValidator()
username = models.CharField(
'логин',
unique=True,
max_length=150,
validators=[username_validator],
error_messages={
'unique': (
'Пользователь с таким логином уже '
'зарегистрирован на платформе.'
),
},
)
email = models.EmailField(
'почта',
unique=True,
max_length=150,
error_messages={
'unique': (
'Пользователь с такой почтой уже '
'зарегистрирован на платформе.'
),
},
)
first_name = models.CharField('имя', max_length=150)
last_name = models.CharField('фамилия', max_length=150)
REQUIRED_FIELDS = ['email', 'first_name', 'last_name']
class Meta:
verbose_name = 'пользователь'
verbose_name_plural = 'пользователи'
swappable = 'AUTH_USER_MODEL'
constraints = (
models.UniqueConstraint(
fields=('email', 'username'),
name='unique_email_username',
),
)
def __str__(self):
return f'{self.first_name} {self.last_name}'
def subscribe(self, to):
from users.services import Subscriber
return Subscriber(self, to)()
def unsubscribe(self, fromm):
from users.services import Unsubscriber
return Unsubscriber(self, fromm)()
| 27.314815
| 75
| 0.605424
|
117aa3044489ec7ef687e190a2e22a6665d924b6
| 647
|
py
|
Python
|
misprogs/contador.py
|
dacocube/CursoGalileo
|
1dac903031d9ff61174cb0c5e00e3f3795ea60de
|
[
"Apache-2.0"
] | null | null | null |
misprogs/contador.py
|
dacocube/CursoGalileo
|
1dac903031d9ff61174cb0c5e00e3f3795ea60de
|
[
"Apache-2.0"
] | null | null | null |
misprogs/contador.py
|
dacocube/CursoGalileo
|
1dac903031d9ff61174cb0c5e00e3f3795ea60de
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
import signal
import sys
import time
import pyupm_grove as grove
import pyupm_ttp223 as ttp223
import pyupm_i2clcd as lcd
def interruptHandler(signal, frame):
sys.exit(0)
if __name__ == '__main__':
signal.signal(signal.SIGINT, interruptHandler)
touch = ttp223.TTP223(6)
myLcd = lcd.Jhd1313m1(0, 0x3E, 0x62)
button = grove.GroveButton(8)
count = 0
myLcd.setColor(0,0,255)
# Read the input and print, waiting 1/2 second between readings
while 1:
if button.value():
count=count+1
if touch.isPressed():
count=count-1
myLcd.setCursor(0,0)
myLcd.write('%6d'% count)
time.sleep(0.5)
del button
del touch
| 21.566667
| 64
| 0.727975
|
c9e56f5f70dd474993a40687a674f32c37bed1cb
| 7,470
|
py
|
Python
|
molecule.py
|
Ved-P/molecule
|
9727a9e7f8c0412feee27bbe034a1540cff7534e
|
[
"MIT"
] | null | null | null |
molecule.py
|
Ved-P/molecule
|
9727a9e7f8c0412feee27bbe034a1540cff7534e
|
[
"MIT"
] | 1
|
2022-01-03T20:07:31.000Z
|
2022-01-04T18:45:21.000Z
|
molecule.py
|
Ved-P/molecule
|
9727a9e7f8c0412feee27bbe034a1540cff7534e
|
[
"MIT"
] | null | null | null |
# Molecule
#
# This program takes in a molecular formula and creates a Lewis diagram and a 3D
# model of the molecule as the output.
#
# Author: Ved Pradhan
# Since: December 31, 2021
import json
import matplotlib.pyplot as plt
import sys
import math
# Opens the JSON file for use.
with open("elements.json", "r", encoding="utf8") as file:
data = json.load(file)
# Gets the formula and charge from the user.
formula = input("\n\n\nWelcome to Molecule! Please enter a molecular formula "
+ "(case sensitive): ")
temp = input("What is the charge of the molecule? Enter an integer (0 for no "
+ "charge): ")
try:
charge = int(temp)
except ValueError:
print("Error: '" + temp + "' is not a valid charge.\n\n\n")
sys.exit()
# A list to store each individual atom in the molecule.
atoms = []
# A dictionary to store each type of element and its frequency.
element_frequency = {}
# A list to store the bonds between Atom objects.
bonds = []
# Class to represent each individual atom in the molecule.
class Atom:
def __init__(self, symbol):
self.symbol = symbol
self.element = get_element(symbol)
if self.element != False:
self.enegativity = self.element["electronegativity_pauling"]
self.expected_ve = self.get_valence_electrons()
self.loose_ve = 0
self.sigma_bonds = 0
self.pi_bonds = 0
self.formal_charge = 0
self.total_ve = 0
self.lewis_x = 0
self.lewis_y = 0
# Returns the number of valence electrons the atom is expected to have.
def get_valence_electrons(self):
if self.symbol == "He":
return 2
elif 9 <= self.element["ypos"] <= 10:
return 2
elif 2 <= self.element["xpos"] <= 12:
return 2
else:
return self.element["xpos"] % 10
# Updates the formal charge of the atom.
def update_formal_charge(self):
self.formal_charge = self.expected_ve - self.loose_ve - self.sigma_bonds - self.pi_bonds
# Updates the total number of valence electrons, including shared ones.
def update_total_ve(self):
self.total_ve = self.loose_ve + 2 * (self.sigma_bonds + self.pi_bonds)
# Returns essential information about the atom as a string.
def __str__(self):
return (self.element["name"] + ": " + str(self.loose_ve) + " loose, "
+ str(self.sigma_bonds) + " sigma, " + str(self.pi_bonds) + " pi")
# Retrieves the element corresponding to the given symbol.
def get_element(symbol):
for element in data["elements"]:
if element["symbol"] == symbol:
return element
print("Error: Element '" + symbol + "' not found.\n\n\n")
return False
# Parses through the inputted formula, splitting it into elements and frequencies.
def parse(form):
i = 1
while i < len(form) and not(ord('A') <= ord(form[i]) <= ord('Z')):
i += 1
j = i - 1
while j >= 0 and ord('0') <= ord(form[j]) <= ord('9'):
j -= 1
if j < 0:
print("Error: The formula cannot start with a number.\n\n\n")
sys.exit()
symbol_part = form[:j+1]
number_part = form[j+1:i]
rest = form[i:]
ele = get_element(symbol_part)
if number_part == "":
number = 1
else:
number = int(number_part)
element_frequency[symbol_part] = number
for i in range(number):
atoms.append(Atom(symbol_part))
if len(rest) > 0:
parse(rest)
# Prints a "not supported" message and quits the program.
def noSupport():
print("Sorry, this molecule is not supported yet.\n\n\n")
sys.exit()
# Checks if the molecule is supported.
def check():
if len(element_frequency) != 2:
noSupport()
symb1 = list(element_frequency)[0]
symb2 = list(element_frequency)[1]
global center
global outer
if symb1 == "H":
center = symb2
outer = symb1
elif symb2 == "H":
center = symb1
outer = symb2
elif get_element(symb1)["electronegativity_pauling"] < get_element(symb2)["electronegativity_pauling"]:
center = symb1
outer = symb2
elif get_element(symb1)["electronegativity_pauling"] > get_element(symb2)["electronegativity_pauling"]:
center = symb2
outer = symb1
else:
noSupport()
if element_frequency[center] != 1:
noSupport()
# Bonds two atoms together; updates in the object and the data structure.
def bond(atom1, atom2, type):
bonds.append((atom1, atom2, type))
if (type == "sigma"):
atom1.sigma_bonds += 1
atom2.sigma_bonds += 1
if (type == "pi"):
atom1.pi_bonds += 1
atom2.pi_bonds += 1
# Distributes the valence electrons as loose ones or through bonds.
def distribute():
total_ve = 0
for a in atoms:
total_ve += a.expected_ve
total_ve -= charge
left_ve = total_ve
global centerAtom
centerAtom = -1
global outerAtoms
outerAtoms = []
for a in atoms:
if a.symbol == center:
centerAtom = a
elif a.symbol == outer:
outerAtoms.append(a)
for o in outerAtoms:
bond(centerAtom, o, "sigma")
left_ve -= 2
want_ve = -1
if outer == "H" or outer == "He":
want_ve = 0
else:
want_ve = 6
if left_ve // len(outerAtoms) >= want_ve:
for o in outerAtoms:
o.loose_ve += want_ve
left_ve -= want_ve
if left_ve >= 0:
centerAtom.loose_ve += left_ve
else:
noSupport()
# Draws the lewis diagram using matplotlib.
def draw_lewis():
centerAtom.lewis_x = 0
centerAtom.lewis_y = 0
plt.style.use('_mpl-gallery')
fig, ax = plt.subplots()
fig.suptitle(formula, fontsize=14, fontweight='bold')
ax.text(0, 0, centerAtom.symbol, verticalalignment='center', horizontalalignment='center')
for i in range(len(outerAtoms)):
o = outerAtoms[i]
o.lewis_x = math.cos(2 * i * math.pi / len(outerAtoms))
o.lewis_y = math.sin(2 * i * math.pi / len(outerAtoms))
ax.text(o.lewis_x, o.lewis_y, o.symbol, verticalalignment='center', horizontalalignment='center')
for b in bonds:
x1 = (2 * b[0].lewis_x + b[1].lewis_x) / 3
x2 = (b[0].lewis_x + 2 * b[1].lewis_x) / 3
y1 = (2 * b[0].lewis_y + b[1].lewis_y) / 3
y2 = (b[0].lewis_y + 2 * b[1].lewis_y) / 3
plt.plot([x1, x2], [y1, y2], color='gray')
for a in atoms:
x_shift = 0
y_shift = 0
for i in range(a.loose_ve):
if 0 <= i <= 1:
x_shift = -0.2
elif 2 <= i <= 3:
y_shift = -0.2
elif 4 <= i <= 5:
x_shift = 0.2
elif 6 <= i <= 7:
y_shift = 0.2
if i == 0 or i == 5:
y_shift = 0.05
elif i == 1 or i == 4:
y_shift = -0.05
elif i == 2 or i == 7:
x_shift = -0.05
elif i == 3 or i == 6:
x_shift = 0.05
ax.scatter(x = a.lewis_x + x_shift, y = a.lewis_y + y_shift + 0.03,
s = 4, color='black')
axes = plt.gca()
axes.set_aspect(1)
plt.xlim([-1.75, 1.75])
plt.ylim([-1.7, 1.8])
axes.axes.xaxis.set_visible(False)
axes.axes.yaxis.set_visible(False)
plt.show()
parse(formula)
check()
distribute()
print(element_frequency)
for a in atoms:
print(a)
draw_lewis()
print("\n\n\n")
| 30.995851
| 107
| 0.58822
|
3aab968d3452d7013fa9764a8812c92fab73bd01
| 1,899
|
py
|
Python
|
spec.py
|
swimclan/fergus_names
|
28c10d99a40408839b321f0dbb5fc26764668d3e
|
[
"MIT"
] | null | null | null |
spec.py
|
swimclan/fergus_names
|
28c10d99a40408839b321f0dbb5fc26764668d3e
|
[
"MIT"
] | null | null | null |
spec.py
|
swimclan/fergus_names
|
28c10d99a40408839b321f0dbb5fc26764668d3e
|
[
"MIT"
] | null | null | null |
from app import Directory
# =============================
# TESTS
# =============================
# TEST Instantiate a Directory with a max size of 2 and get empty initia
# list with None type in each index
product_team = Directory(2)
print "\nShould be an a list of size 2 with None objects in each index"
print product_team.directory
# TEST interactively collect user data for a single user in 'empty' list
new_member = product_team.promptNewUser()
product_team.addUser(new_member)
print "\nShould be a list of size 2 with a single user added. All other indices should be None type"
print product_team.directory
# TEST return count of users in the directory
print "\nShould return 1"
print product_team.user_count
# TEST interactively collect user data for an additional single user
# in directory
second_member = product_team.promptNewUser()
product_team.addUser(second_member)
print "\nShould be a list of size 2 with two users added. No more None values should appear"
print product_team.directory
# TEST return count of users in the directory with additional user accounted for
print "\nShould return 2"
print product_team.user_count
# TEST return of user object with an ID value of 0 (the first user in the directory)
print "\nShould return the first user entered"
print product_team.getUserById(0)
# TEST throw error if invalid id is given for get user by id
print "\nShould throw an error for invalid id"
print product_team.getUserById(4)
# TEST throw an error when trying to add another user beyonf the max user count
# specified during instantiation
third_member = product_team.promptNewUser()
print "\nShould throw an error when trying to add a third user"
product_team.addUser(third_member)
# TEST returning the full name of the second user entered into the directory
print "\nShould print the full name of the second user entered"
print product_team.getUserFullNameById(1)
| 37.235294
| 101
| 0.770932
|
193ab794aec56788d7751fe8de623df9bd8d22be
| 1,888
|
py
|
Python
|
setup.py
|
KCRW-org/kcrw.apple_news
|
81c0a402f210e87ae73fe02a3d39b7929c328ceb
|
[
"MIT"
] | null | null | null |
setup.py
|
KCRW-org/kcrw.apple_news
|
81c0a402f210e87ae73fe02a3d39b7929c328ceb
|
[
"MIT"
] | 85
|
2020-03-09T22:40:20.000Z
|
2022-03-31T01:10:18.000Z
|
setup.py
|
KCRW-org/kcrw.apple_news
|
81c0a402f210e87ae73fe02a3d39b7929c328ceb
|
[
"MIT"
] | 2
|
2021-05-12T17:37:09.000Z
|
2021-08-14T11:30:38.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages
###################################################################
KEYWORDS = ['Apple News', 'REST API', 'KCRW']
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
###################################################################
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = ['requests[security]', 'six', 'Click', 'click-log', ]
setup_requirements = ['pytest-runner', ]
test_requirements = ['pytest>=3', ]
setup(
name="kcrw.apple_news",
namespace_packages=['kcrw'],
author="Alec Mitchell",
author_email="alecpm@gmail.com",
version="0.2.6",
description="Library for using the Apple News API",
long_description=readme + '\n\n' + history,
long_description_content_type="text/x-rst",
license="MIT license",
url="https://github.com/KCRW-org/kcrw.apple_news",
keywords=KEYWORDS,
classifiers=CLASSIFIERS,
install_requires=requirements,
include_package_data=True,
package_dir={'': 'src'},
packages=find_packages('src'),
setup_requires=setup_requirements,
test_suite='tests',
tests_require=test_requirements,
zip_safe=False,
entry_points='''
[console_scripts]
apple_news_api=kcrw.apple_news.command:cli
'''
)
| 28.606061
| 68
| 0.610169
|
afb51f98e575842a537ab31be89d2e7a1af05c86
| 40,767
|
py
|
Python
|
zerver/lib/message.py
|
yakkl/yakkl
|
89ecf4ee8998554a0634667067e16f428e4c480c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
zerver/lib/message.py
|
yakkl/yakkl
|
89ecf4ee8998554a0634667067e16f428e4c480c
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2020-06-06T00:51:42.000Z
|
2022-02-10T21:38:40.000Z
|
zerver/lib/message.py
|
yakkl/yakkl
|
89ecf4ee8998554a0634667067e16f428e4c480c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import datetime
import ujson
import zlib
import ahocorasick
from django.utils.translation import ugettext as _
from django.utils.timezone import now as timezone_now
from django.db import connection
from django.db.models import Sum
from analytics.lib.counts import COUNT_STATS, RealmCount
from zerver.lib.avatar import get_avatar_field
import zerver.lib.bugdown as bugdown
from zerver.lib.cache import (
cache_with_key,
generic_bulk_cached_fetch,
to_dict_cache_key,
to_dict_cache_key_id,
)
from zerver.lib.request import JsonableError
from zerver.lib.stream_subscription import (
get_stream_subscriptions_for_user,
)
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.lib.topic import (
DB_TOPIC_NAME,
MESSAGE__TOPIC,
TOPIC_LINKS,
TOPIC_NAME,
)
from zerver.lib.topic_mutes import (
build_topic_mute_checker,
topic_is_muted,
)
from zerver.models import (
get_display_recipient_by_id,
get_user_profile_by_id,
query_for_ids,
Message,
Realm,
Recipient,
Stream,
SubMessage,
Subscription,
UserProfile,
UserMessage,
Reaction,
get_usermessage_by_message_id,
)
from typing import Any, Dict, List, Optional, Set, Tuple, Union, Sequence
from mypy_extensions import TypedDict
RealmAlertWords = Dict[int, List[str]]
RawUnreadMessagesResult = TypedDict('RawUnreadMessagesResult', {
'pm_dict': Dict[int, Any],
'stream_dict': Dict[int, Any],
'huddle_dict': Dict[int, Any],
'mentions': Set[int],
'muted_stream_ids': List[int],
'unmuted_stream_msgs': Set[int],
})
UnreadMessagesResult = TypedDict('UnreadMessagesResult', {
'pms': List[Dict[str, Any]],
'streams': List[Dict[str, Any]],
'huddles': List[Dict[str, Any]],
'mentions': List[int],
'count': int,
})
# We won't try to fetch more unread message IDs from the database than
# this limit. The limit is super high, in large part because it means
# client-side code mostly doesn't need to think about the case that a
# user has more older unread messages that were cut off.
MAX_UNREAD_MESSAGES = 50000
def messages_for_ids(message_ids: List[int],
user_message_flags: Dict[int, List[str]],
search_fields: Dict[int, Dict[str, str]],
apply_markdown: bool,
client_gravatar: bool,
allow_edit_history: bool) -> List[Dict[str, Any]]:
cache_transformer = MessageDict.build_dict_from_raw_db_row
id_fetcher = lambda row: row['id']
message_dicts = generic_bulk_cached_fetch(to_dict_cache_key_id,
MessageDict.get_raw_db_rows,
message_ids,
id_fetcher=id_fetcher,
cache_transformer=cache_transformer,
extractor=extract_message_dict,
setter=stringify_message_dict)
message_list = [] # type: List[Dict[str, Any]]
for message_id in message_ids:
msg_dict = message_dicts[message_id]
msg_dict.update({"flags": user_message_flags[message_id]})
if message_id in search_fields:
msg_dict.update(search_fields[message_id])
# Make sure that we never send message edit history to clients
# in realms with allow_edit_history disabled.
if "edit_history" in msg_dict and not allow_edit_history:
del msg_dict["edit_history"]
message_list.append(msg_dict)
MessageDict.post_process_dicts(message_list, apply_markdown, client_gravatar)
return message_list
def sew_messages_and_reactions(messages: List[Dict[str, Any]],
reactions: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Given a iterable of messages and reactions stitch reactions
into messages.
"""
# Add all messages with empty reaction item
for message in messages:
message['reactions'] = []
# Convert list of messages into dictionary to make reaction stitching easy
converted_messages = {message['id']: message for message in messages}
for reaction in reactions:
converted_messages[reaction['message_id']]['reactions'].append(
reaction)
return list(converted_messages.values())
def sew_messages_and_submessages(messages: List[Dict[str, Any]],
submessages: List[Dict[str, Any]]) -> None:
# This is super similar to sew_messages_and_reactions.
for message in messages:
message['submessages'] = []
message_dict = {message['id']: message for message in messages}
for submessage in submessages:
message_id = submessage['message_id']
if message_id in message_dict:
message = message_dict[message_id]
message['submessages'].append(submessage)
def extract_message_dict(message_bytes: bytes) -> Dict[str, Any]:
return ujson.loads(zlib.decompress(message_bytes).decode("utf-8"))
def stringify_message_dict(message_dict: Dict[str, Any]) -> bytes:
return zlib.compress(ujson.dumps(message_dict).encode())
@cache_with_key(to_dict_cache_key, timeout=3600*24)
def message_to_dict_json(message: Message) -> bytes:
return MessageDict.to_dict_uncached(message)
def save_message_rendered_content(message: Message, content: str) -> str:
rendered_content = render_markdown(message, content, realm=message.get_realm())
message.rendered_content = rendered_content
message.rendered_content_version = bugdown.version
message.save_rendered_content()
return rendered_content
class MessageDict:
@staticmethod
def wide_dict(message: Message) -> Dict[str, Any]:
'''
The next two lines get the cachable field related
to our message object, with the side effect of
populating the cache.
'''
json = message_to_dict_json(message)
obj = extract_message_dict(json)
'''
The steps below are similar to what we do in
post_process_dicts(), except we don't call finalize_payload(),
since that step happens later in the queue
processor.
'''
MessageDict.bulk_hydrate_sender_info([obj])
MessageDict.hydrate_recipient_info(obj)
return obj
@staticmethod
def post_process_dicts(objs: List[Dict[str, Any]], apply_markdown: bool, client_gravatar: bool) -> None:
MessageDict.bulk_hydrate_sender_info(objs)
for obj in objs:
MessageDict.hydrate_recipient_info(obj)
MessageDict.finalize_payload(obj, apply_markdown, client_gravatar)
@staticmethod
def finalize_payload(obj: Dict[str, Any],
apply_markdown: bool,
client_gravatar: bool) -> None:
MessageDict.set_sender_avatar(obj, client_gravatar)
if apply_markdown:
obj['content_type'] = 'text/html'
obj['content'] = obj['rendered_content']
else:
obj['content_type'] = 'text/x-markdown'
del obj['rendered_content']
del obj['sender_realm_id']
del obj['sender_avatar_source']
del obj['sender_avatar_version']
del obj['raw_display_recipient']
del obj['recipient_type']
del obj['recipient_type_id']
del obj['sender_is_mirror_dummy']
@staticmethod
def to_dict_uncached(message: Message) -> bytes:
dct = MessageDict.to_dict_uncached_helper(message)
return stringify_message_dict(dct)
@staticmethod
def to_dict_uncached_helper(message: Message) -> Dict[str, Any]:
return MessageDict.build_message_dict(
message = message,
message_id = message.id,
last_edit_time = message.last_edit_time,
edit_history = message.edit_history,
content = message.content,
topic_name = message.topic_name(),
pub_date = message.pub_date,
rendered_content = message.rendered_content,
rendered_content_version = message.rendered_content_version,
sender_id = message.sender.id,
sender_realm_id = message.sender.realm_id,
sending_client_name = message.sending_client.name,
recipient_id = message.recipient.id,
recipient_type = message.recipient.type,
recipient_type_id = message.recipient.type_id,
reactions = Reaction.get_raw_db_rows([message.id]),
submessages = SubMessage.get_raw_db_rows([message.id]),
)
@staticmethod
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
# This is a special purpose function optimized for
# callers like get_messages_backend().
fields = [
'id',
DB_TOPIC_NAME,
'pub_date',
'last_edit_time',
'edit_history',
'content',
'rendered_content',
'rendered_content_version',
'recipient_id',
'recipient__type',
'recipient__type_id',
'sender_id',
'sending_client__name',
'sender__realm_id',
]
messages = Message.objects.filter(id__in=needed_ids).values(*fields)
submessages = SubMessage.get_raw_db_rows(needed_ids)
sew_messages_and_submessages(messages, submessages)
reactions = Reaction.get_raw_db_rows(needed_ids)
return sew_messages_and_reactions(messages, reactions)
@staticmethod
def build_dict_from_raw_db_row(row: Dict[str, Any]) -> Dict[str, Any]:
'''
row is a row from a .values() call, and it needs to have
all the relevant fields populated
'''
return MessageDict.build_message_dict(
message = None,
message_id = row['id'],
last_edit_time = row['last_edit_time'],
edit_history = row['edit_history'],
content = row['content'],
topic_name = row[DB_TOPIC_NAME],
pub_date = row['pub_date'],
rendered_content = row['rendered_content'],
rendered_content_version = row['rendered_content_version'],
sender_id = row['sender_id'],
sender_realm_id = row['sender__realm_id'],
sending_client_name = row['sending_client__name'],
recipient_id = row['recipient_id'],
recipient_type = row['recipient__type'],
recipient_type_id = row['recipient__type_id'],
reactions=row['reactions'],
submessages=row['submessages'],
)
@staticmethod
def build_message_dict(
message: Optional[Message],
message_id: int,
last_edit_time: Optional[datetime.datetime],
edit_history: Optional[str],
content: str,
topic_name: str,
pub_date: datetime.datetime,
rendered_content: Optional[str],
rendered_content_version: Optional[int],
sender_id: int,
sender_realm_id: int,
sending_client_name: str,
recipient_id: int,
recipient_type: int,
recipient_type_id: int,
reactions: List[Dict[str, Any]],
submessages: List[Dict[str, Any]]
) -> Dict[str, Any]:
obj = dict(
id = message_id,
sender_id = sender_id,
content = content,
recipient_type_id = recipient_type_id,
recipient_type = recipient_type,
recipient_id = recipient_id,
timestamp = datetime_to_timestamp(pub_date),
client = sending_client_name)
obj[TOPIC_NAME] = topic_name
obj['sender_realm_id'] = sender_realm_id
obj['raw_display_recipient'] = get_display_recipient_by_id(
recipient_id,
recipient_type,
recipient_type_id
)
obj[TOPIC_LINKS] = bugdown.topic_links(sender_realm_id, topic_name)
if last_edit_time is not None:
obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time)
assert edit_history is not None
obj['edit_history'] = ujson.loads(edit_history)
if Message.need_to_render_content(rendered_content, rendered_content_version, bugdown.version):
if message is None:
# We really shouldn't be rendering objects in this method, but there is
# a scenario where we upgrade the version of bugdown and fail to run
# management commands to re-render historical messages, and then we
# need to have side effects. This method is optimized to not need full
# blown ORM objects, but the bugdown renderer is unfortunately highly
# coupled to Message, and we also need to persist the new rendered content.
# If we don't have a message object passed in, we get one here. The cost
# of going to the DB here should be overshadowed by the cost of rendering
# and updating the row.
# TODO: see #1379 to eliminate bugdown dependencies
message = Message.objects.select_related().get(id=message_id)
assert message is not None # Hint for mypy.
# It's unfortunate that we need to have side effects on the message
# in some cases.
rendered_content = save_message_rendered_content(message, content)
if rendered_content is not None:
obj['rendered_content'] = rendered_content
else:
obj['rendered_content'] = ('<p>[Yakkl note: Sorry, we could not ' +
'understand the formatting of your message]</p>')
if rendered_content is not None:
obj['is_me_message'] = Message.is_status_message(content, rendered_content)
else:
obj['is_me_message'] = False
obj['reactions'] = [ReactionDict.build_dict_from_raw_db_row(reaction)
for reaction in reactions]
obj['submessages'] = submessages
return obj
@staticmethod
def bulk_hydrate_sender_info(objs: List[Dict[str, Any]]) -> None:
sender_ids = list({
obj['sender_id']
for obj in objs
})
if not sender_ids:
return
query = UserProfile.objects.values(
'id',
'full_name',
'short_name',
'email',
'realm__string_id',
'avatar_source',
'avatar_version',
'is_mirror_dummy',
)
rows = query_for_ids(query, sender_ids, 'zerver_userprofile.id')
sender_dict = {
row['id']: row
for row in rows
}
for obj in objs:
sender_id = obj['sender_id']
user_row = sender_dict[sender_id]
obj['sender_full_name'] = user_row['full_name']
obj['sender_short_name'] = user_row['short_name']
obj['sender_email'] = user_row['email']
obj['sender_realm_str'] = user_row['realm__string_id']
obj['sender_avatar_source'] = user_row['avatar_source']
obj['sender_avatar_version'] = user_row['avatar_version']
obj['sender_is_mirror_dummy'] = user_row['is_mirror_dummy']
@staticmethod
def hydrate_recipient_info(obj: Dict[str, Any]) -> None:
'''
This method hyrdrates recipient info with things
like full names and emails of senders. Eventually
our clients should be able to hyrdrate these fields
themselves with info they already have on users.
'''
display_recipient = obj['raw_display_recipient']
recipient_type = obj['recipient_type']
recipient_type_id = obj['recipient_type_id']
sender_is_mirror_dummy = obj['sender_is_mirror_dummy']
sender_email = obj['sender_email']
sender_full_name = obj['sender_full_name']
sender_short_name = obj['sender_short_name']
sender_id = obj['sender_id']
if recipient_type == Recipient.STREAM:
display_type = "stream"
elif recipient_type in (Recipient.HUDDLE, Recipient.PERSONAL):
assert not isinstance(display_recipient, str)
display_type = "private"
if len(display_recipient) == 1:
# add the sender in if this isn't a message between
# someone and themself, preserving ordering
recip = {'email': sender_email,
'full_name': sender_full_name,
'short_name': sender_short_name,
'id': sender_id,
'is_mirror_dummy': sender_is_mirror_dummy}
if recip['email'] < display_recipient[0]['email']:
display_recipient = [recip, display_recipient[0]]
elif recip['email'] > display_recipient[0]['email']:
display_recipient = [display_recipient[0], recip]
else:
raise AssertionError("Invalid recipient type %s" % (recipient_type,))
obj['display_recipient'] = display_recipient
obj['type'] = display_type
if obj['type'] == 'stream':
obj['stream_id'] = recipient_type_id
@staticmethod
def set_sender_avatar(obj: Dict[str, Any], client_gravatar: bool) -> None:
sender_id = obj['sender_id']
sender_realm_id = obj['sender_realm_id']
sender_email = obj['sender_email']
sender_avatar_source = obj['sender_avatar_source']
sender_avatar_version = obj['sender_avatar_version']
obj['avatar_url'] = get_avatar_field(
user_id=sender_id,
realm_id=sender_realm_id,
email=sender_email,
avatar_source=sender_avatar_source,
avatar_version=sender_avatar_version,
medium=False,
client_gravatar=client_gravatar,
)
class ReactionDict:
@staticmethod
def build_dict_from_raw_db_row(row: Dict[str, Any]) -> Dict[str, Any]:
return {'emoji_name': row['emoji_name'],
'emoji_code': row['emoji_code'],
'reaction_type': row['reaction_type'],
'user': {'email': row['user_profile__email'],
'id': row['user_profile__id'],
'full_name': row['user_profile__full_name']}}
def access_message(user_profile: UserProfile, message_id: int) -> Tuple[Message, Optional[UserMessage]]:
"""You can access a message by ID in our APIs that either:
(1) You received or have previously accessed via starring
(aka have a UserMessage row for).
(2) Was sent to a public stream in your realm.
We produce consistent, boring error messages to avoid leaking any
information from a security perspective.
"""
try:
message = Message.objects.select_related().get(id=message_id)
except Message.DoesNotExist:
raise JsonableError(_("Invalid message(s)"))
user_message = get_usermessage_by_message_id(user_profile, message_id)
if has_message_access(user_profile, message, user_message):
return (message, user_message)
raise JsonableError(_("Invalid message(s)"))
def has_message_access(user_profile: UserProfile, message: Message,
user_message: Optional[UserMessage]) -> bool:
if user_message is None:
if message.recipient.type != Recipient.STREAM:
# You can't access private messages you didn't receive
return False
stream = Stream.objects.get(id=message.recipient.type_id)
if stream.realm != user_profile.realm:
# You can't access public stream messages in other realms
return False
if not stream.is_history_public_to_subscribers():
# You can't access messages you didn't directly receive
# unless history is public to subscribers.
return False
if not stream.is_public():
# This stream is an invite-only stream where message
# history is available to subscribers. So we check if
# you're subscribed.
if not Subscription.objects.filter(user_profile=user_profile, active=True,
recipient=message.recipient).exists():
return False
# You are subscribed, so let this fall through to the public stream case.
elif user_profile.is_guest:
# Guest users don't get automatic access to public stream messages
if not Subscription.objects.filter(user_profile=user_profile, active=True,
recipient=message.recipient).exists():
return False
else:
# Otherwise, the message was sent to a public stream in
# your realm, so return the message, user_message pair
pass
return True
def bulk_access_messages(user_profile: UserProfile, messages: Sequence[Message]) -> List[Message]:
filtered_messages = []
for message in messages:
user_message = get_usermessage_by_message_id(user_profile, message.id)
if has_message_access(user_profile, message, user_message):
filtered_messages.append(message)
return filtered_messages
def bulk_access_messages_expect_usermessage(
user_profile_id: int, message_ids: Sequence[int]) -> List[int]:
'''
Like bulk_access_messages, but faster and potentially stricter.
Returns a subset of `message_ids` containing only messages the
user can access. Makes O(1) database queries.
Use this function only when the user is expected to have a
UserMessage row for every message in `message_ids`. If a
UserMessage row is missing, the message will be omitted even if
the user has access (e.g. because it went to a public stream.)
See also: `access_message`, `bulk_access_messages`.
'''
return UserMessage.objects.filter(
user_profile_id=user_profile_id,
message_id__in=message_ids,
).values_list('message_id', flat=True)
def render_markdown(message: Message,
content: str,
realm: Optional[Realm]=None,
realm_alert_words_automaton: Optional[ahocorasick.Automaton]=None,
user_ids: Optional[Set[int]]=None,
mention_data: Optional[bugdown.MentionData]=None,
email_gateway: Optional[bool]=False) -> str:
'''
This is basically just a wrapper for do_render_markdown.
'''
if user_ids is None:
message_user_ids = set() # type: Set[int]
else:
message_user_ids = user_ids
if realm is None:
realm = message.get_realm()
sender = get_user_profile_by_id(message.sender_id)
sent_by_bot = sender.is_bot
translate_emoticons = sender.translate_emoticons
rendered_content = do_render_markdown(
message=message,
content=content,
realm=realm,
realm_alert_words_automaton=realm_alert_words_automaton,
message_user_ids=message_user_ids,
sent_by_bot=sent_by_bot,
translate_emoticons=translate_emoticons,
mention_data=mention_data,
email_gateway=email_gateway,
)
return rendered_content
def do_render_markdown(message: Message,
content: str,
realm: Realm,
message_user_ids: Set[int],
sent_by_bot: bool,
translate_emoticons: bool,
realm_alert_words_automaton: Optional[ahocorasick.Automaton]=None,
mention_data: Optional[bugdown.MentionData]=None,
email_gateway: Optional[bool]=False) -> str:
"""Return HTML for given markdown. Bugdown may add properties to the
message object such as `mentions_user_ids`, `mentions_user_group_ids`, and
`mentions_wildcard`. These are only on this Django object and are not
saved in the database.
"""
message.mentions_wildcard = False
message.mentions_user_ids = set()
message.mentions_user_group_ids = set()
message.alert_words = set()
message.links_for_preview = set()
message.user_ids_with_alert_words = set()
# DO MAIN WORK HERE -- call bugdown to convert
rendered_content = bugdown.convert(
content,
realm_alert_words_automaton=realm_alert_words_automaton,
message=message,
message_realm=realm,
sent_by_bot=sent_by_bot,
translate_emoticons=translate_emoticons,
mention_data=mention_data,
email_gateway=email_gateway
)
return rendered_content
def huddle_users(recipient_id: int) -> str:
display_recipient = get_display_recipient_by_id(recipient_id,
Recipient.HUDDLE,
None) # type: Union[str, List[Dict[str, Any]]]
# str is for streams.
assert not isinstance(display_recipient, str)
user_ids = [obj['id'] for obj in display_recipient] # type: List[int]
user_ids = sorted(user_ids)
return ','.join(str(uid) for uid in user_ids)
def aggregate_message_dict(input_dict: Dict[int, Dict[str, Any]],
lookup_fields: List[str],
collect_senders: bool) -> List[Dict[str, Any]]:
lookup_dict = dict() # type: Dict[Tuple[Any, ...], Dict[str, Any]]
'''
A concrete example might help explain the inputs here:
input_dict = {
1002: dict(stream_id=5, topic='foo', sender_id=40),
1003: dict(stream_id=5, topic='foo', sender_id=41),
1004: dict(stream_id=6, topic='baz', sender_id=99),
}
lookup_fields = ['stream_id', 'topic']
The first time through the loop:
attribute_dict = dict(stream_id=5, topic='foo', sender_id=40)
lookup_dict = (5, 'foo')
lookup_dict = {
(5, 'foo'): dict(stream_id=5, topic='foo',
unread_message_ids=[1002, 1003],
sender_ids=[40, 41],
),
...
}
result = [
dict(stream_id=5, topic='foo',
unread_message_ids=[1002, 1003],
sender_ids=[40, 41],
),
...
]
'''
for message_id, attribute_dict in input_dict.items():
lookup_key = tuple([attribute_dict[f] for f in lookup_fields])
if lookup_key not in lookup_dict:
obj = {}
for f in lookup_fields:
obj[f] = attribute_dict[f]
obj['unread_message_ids'] = []
if collect_senders:
obj['sender_ids'] = set()
lookup_dict[lookup_key] = obj
bucket = lookup_dict[lookup_key]
bucket['unread_message_ids'].append(message_id)
if collect_senders:
bucket['sender_ids'].add(attribute_dict['sender_id'])
for dct in lookup_dict.values():
dct['unread_message_ids'].sort()
if collect_senders:
dct['sender_ids'] = sorted(list(dct['sender_ids']))
sorted_keys = sorted(lookup_dict.keys())
return [lookup_dict[k] for k in sorted_keys]
def get_inactive_recipient_ids(user_profile: UserProfile) -> List[int]:
rows = get_stream_subscriptions_for_user(user_profile).filter(
active=False,
).values(
'recipient_id'
)
inactive_recipient_ids = [
row['recipient_id']
for row in rows]
return inactive_recipient_ids
def get_muted_stream_ids(user_profile: UserProfile) -> List[int]:
rows = get_stream_subscriptions_for_user(user_profile).filter(
active=True,
is_muted=True,
).values(
'recipient__type_id'
)
muted_stream_ids = [
row['recipient__type_id']
for row in rows]
return muted_stream_ids
def get_starred_message_ids(user_profile: UserProfile) -> List[int]:
return list(UserMessage.objects.filter(
user_profile=user_profile,
).extra(
where=[UserMessage.where_starred()]
).order_by(
'message_id'
).values_list('message_id', flat=True)[0:10000])
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult:
excluded_recipient_ids = get_inactive_recipient_ids(user_profile)
user_msgs = UserMessage.objects.filter(
user_profile=user_profile
).exclude(
message__recipient_id__in=excluded_recipient_ids
).extra(
where=[UserMessage.where_unread()]
).values(
'message_id',
'message__sender_id',
MESSAGE__TOPIC,
'message__recipient_id',
'message__recipient__type',
'message__recipient__type_id',
'flags',
).order_by("-message_id")
# Limit unread messages for performance reasons.
user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES])
rows = list(reversed(user_msgs))
muted_stream_ids = get_muted_stream_ids(user_profile)
topic_mute_checker = build_topic_mute_checker(user_profile)
def is_row_muted(stream_id: int, recipient_id: int, topic: str) -> bool:
if stream_id in muted_stream_ids:
return True
if topic_mute_checker(recipient_id, topic):
return True
return False
huddle_cache = {} # type: Dict[int, str]
def get_huddle_users(recipient_id: int) -> str:
if recipient_id in huddle_cache:
return huddle_cache[recipient_id]
user_ids_string = huddle_users(recipient_id)
huddle_cache[recipient_id] = user_ids_string
return user_ids_string
pm_dict = {}
stream_dict = {}
unmuted_stream_msgs = set()
huddle_dict = {}
mentions = set()
for row in rows:
message_id = row['message_id']
msg_type = row['message__recipient__type']
recipient_id = row['message__recipient_id']
sender_id = row['message__sender_id']
if msg_type == Recipient.STREAM:
stream_id = row['message__recipient__type_id']
topic = row[MESSAGE__TOPIC]
stream_dict[message_id] = dict(
stream_id=stream_id,
topic=topic,
sender_id=sender_id,
)
if not is_row_muted(stream_id, recipient_id, topic):
unmuted_stream_msgs.add(message_id)
elif msg_type == Recipient.PERSONAL:
pm_dict[message_id] = dict(
sender_id=sender_id,
)
elif msg_type == Recipient.HUDDLE:
user_ids_string = get_huddle_users(recipient_id)
huddle_dict[message_id] = dict(
user_ids_string=user_ids_string,
)
is_mentioned = (row['flags'] & UserMessage.flags.mentioned) != 0
if is_mentioned:
mentions.add(message_id)
return dict(
pm_dict=pm_dict,
stream_dict=stream_dict,
muted_stream_ids=muted_stream_ids,
unmuted_stream_msgs=unmuted_stream_msgs,
huddle_dict=huddle_dict,
mentions=mentions,
)
def aggregate_unread_data(raw_data: RawUnreadMessagesResult) -> UnreadMessagesResult:
pm_dict = raw_data['pm_dict']
stream_dict = raw_data['stream_dict']
unmuted_stream_msgs = raw_data['unmuted_stream_msgs']
huddle_dict = raw_data['huddle_dict']
mentions = list(raw_data['mentions'])
count = len(pm_dict) + len(unmuted_stream_msgs) + len(huddle_dict)
pm_objects = aggregate_message_dict(
input_dict=pm_dict,
lookup_fields=[
'sender_id',
],
collect_senders=False,
)
stream_objects = aggregate_message_dict(
input_dict=stream_dict,
lookup_fields=[
'stream_id',
'topic',
],
collect_senders=True,
)
huddle_objects = aggregate_message_dict(
input_dict=huddle_dict,
lookup_fields=[
'user_ids_string',
],
collect_senders=False,
)
result = dict(
pms=pm_objects,
streams=stream_objects,
huddles=huddle_objects,
mentions=mentions,
count=count) # type: UnreadMessagesResult
return result
def apply_unread_message_event(user_profile: UserProfile,
state: RawUnreadMessagesResult,
message: Dict[str, Any],
flags: List[str]) -> None:
message_id = message['id']
if message['type'] == 'stream':
message_type = 'stream'
elif message['type'] == 'private':
others = [
recip for recip in message['display_recipient']
if recip['id'] != message['sender_id']
]
if len(others) <= 1:
message_type = 'private'
else:
message_type = 'huddle'
else:
raise AssertionError("Invalid message type %s" % (message['type'],))
sender_id = message['sender_id']
if message_type == 'stream':
stream_id = message['stream_id']
topic = message[TOPIC_NAME]
new_row = dict(
stream_id=stream_id,
topic=topic,
sender_id=sender_id,
)
state['stream_dict'][message_id] = new_row
if stream_id not in state['muted_stream_ids']:
# This next check hits the database.
if not topic_is_muted(user_profile, stream_id, topic):
state['unmuted_stream_msgs'].add(message_id)
elif message_type == 'private':
sender_id = message['sender_id']
new_row = dict(
sender_id=sender_id,
)
state['pm_dict'][message_id] = new_row
else:
display_recipient = message['display_recipient']
user_ids = [obj['id'] for obj in display_recipient]
user_ids = sorted(user_ids)
user_ids_string = ','.join(str(uid) for uid in user_ids)
new_row = dict(
user_ids_string=user_ids_string,
)
state['huddle_dict'][message_id] = new_row
if 'mentioned' in flags:
state['mentions'].add(message_id)
def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult,
message_id: int) -> None:
# The opposite of apply_unread_message_event; removes a read or
# deleted message from a raw_unread_msgs data structure.
state['pm_dict'].pop(message_id, None)
state['stream_dict'].pop(message_id, None)
state['huddle_dict'].pop(message_id, None)
state['unmuted_stream_msgs'].discard(message_id)
state['mentions'].discard(message_id)
def estimate_recent_messages(realm: Realm, hours: int) -> int:
stat = COUNT_STATS['messages_sent:is_bot:hour']
d = timezone_now() - datetime.timedelta(hours=hours)
return RealmCount.objects.filter(property=stat.property, end_time__gt=d,
realm=realm).aggregate(Sum('value'))['value__sum'] or 0
def get_first_visible_message_id(realm: Realm) -> int:
return realm.first_visible_message_id
def maybe_update_first_visible_message_id(realm: Realm, lookback_hours: int) -> None:
recent_messages_count = estimate_recent_messages(realm, lookback_hours)
if realm.message_visibility_limit is not None and recent_messages_count > 0:
update_first_visible_message_id(realm)
def update_first_visible_message_id(realm: Realm) -> None:
if realm.message_visibility_limit is None:
realm.first_visible_message_id = 0
else:
try:
first_visible_message_id = Message.objects.filter(sender__realm=realm).values('id').\
order_by('-id')[realm.message_visibility_limit - 1]["id"]
except IndexError:
first_visible_message_id = 0
realm.first_visible_message_id = first_visible_message_id
realm.save(update_fields=["first_visible_message_id"])
def get_recent_conversations_recipient_id(user_profile: UserProfile,
recipient_id: int,
sender_id: int) -> int:
"""Helper for doing lookups of the recipient_id that
get_recent_private_conversations would have used to record that
message in its data structure.
"""
my_recipient_id = Recipient.objects.get(type=Recipient.PERSONAL,
type_id=user_profile.id).id
if recipient_id == my_recipient_id:
return Recipient.objects.get(type=Recipient.PERSONAL,
type_id=sender_id).id
return recipient_id
def get_recent_private_conversations(user_profile: UserProfile) -> Dict[int, Dict[str, Any]]:
"""This function uses some carefully optimized SQL queries, designed
to use the UserMessage index on private_messages. It is
significantly complicated by the fact that for 1:1 private
messages, we store the message against a recipient_id of whichever
user was the recipient, and thus for 1:1 private messages sent
directly to us, we need to look up the other user from the
sender_id on those messages. You'll see that pattern repeated
both here and also in zerver/lib/events.py.
Ideally, we would write these queries using Django, but even
without the UNION ALL, that seems to not be possible, because the
equivalent Django syntax (for the first part of this query):
message_data = UserMessage.objects.select_related("message__recipient_id").filter(
user_profile=user_profile,
).extra(
where=[UserMessage.where_private()]
).order_by("-message_id")[:1000].values(
"message__recipient_id").annotate(last_message_id=Max("message_id"))
does not properly nest the GROUP BY (from .annotate) with the slicing.
We return a dictionary structure for convenient modification
below; this structure is converted into its final form by
post_process.
"""
RECENT_CONVERSATIONS_LIMIT = 1000
recipient_map = {}
my_recipient_id = Recipient.objects.get(type=Recipient.PERSONAL,
type_id=user_profile.id).id
query = '''
SELECT
subquery.recipient_id, MAX(subquery.message_id)
FROM (
(SELECT
um.message_id AS message_id,
m.recipient_id AS recipient_id
FROM
zerver_usermessage um
JOIN
zerver_message m
ON
um.message_id = m.id
WHERE
um.user_profile_id=%(user_profile_id)d AND
um.flags & 2048 <> 0 AND
m.recipient_id <> %(my_recipient_id)d
ORDER BY message_id DESC
LIMIT %(conversation_limit)d)
UNION ALL
(SELECT
um.message_id AS message_id,
r.id AS recipient_id
FROM
zerver_usermessage um
JOIN
zerver_message m
ON
um.message_id = m.id
JOIN
zerver_recipient r
ON
r.type = 1 AND
r.type_id = m.sender_id
WHERE
um.user_profile_id=%(user_profile_id)d AND
um.flags & 2048 <> 0 AND
m.recipient_id=%(my_recipient_id)d
ORDER BY message_id DESC
LIMIT %(conversation_limit)d)
) AS subquery
GROUP BY subquery.recipient_id
''' % dict(
user_profile_id=user_profile.id,
conversation_limit=RECENT_CONVERSATIONS_LIMIT,
my_recipient_id=my_recipient_id,
)
cursor = connection.cursor()
cursor.execute(query)
rows = cursor.fetchall()
cursor.close()
# The resulting rows will be (recipient_id, max_message_id)
# objects for all parties we've had recent (group?) private
# message conversations with, including PMs with yourself (those
# will generate an empty list of user_ids).
for recipient_id, max_message_id in rows:
recipient_map[recipient_id] = dict(
max_message_id=max_message_id,
user_ids=list(),
)
# Now we need to map all the recipient_id objects to lists of user IDs
for (recipient_id, user_profile_id) in Subscription.objects.filter(
recipient_id__in=recipient_map.keys()).exclude(
user_profile_id=user_profile.id).values_list(
"recipient_id", "user_profile_id"):
recipient_map[recipient_id]['user_ids'].append(user_profile_id)
return recipient_map
| 36.793321
| 108
| 0.629431
|
e580665cfc569f132558234d1ff61ee8811ca446
| 22,220
|
py
|
Python
|
homeassistant/components/fritz/common.py
|
andersonshatch/home-assistant
|
10e2caf9e698759c48f4e859d3ed7d5c335a18b8
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/fritz/common.py
|
andersonshatch/home-assistant
|
10e2caf9e698759c48f4e859d3ed7d5c335a18b8
|
[
"Apache-2.0"
] | 19
|
2021-11-24T06:23:55.000Z
|
2022-03-31T06:22:45.000Z
|
homeassistant/components/fritz/common.py
|
andersonshatch/home-assistant
|
10e2caf9e698759c48f4e859d3ed7d5c335a18b8
|
[
"Apache-2.0"
] | null | null | null |
"""Support for AVM FRITZ!Box classes."""
from __future__ import annotations
from collections.abc import Callable, ValuesView
from dataclasses import dataclass, field
from datetime import datetime, timedelta
import logging
from types import MappingProxyType
from typing import Any, TypedDict, cast
from fritzconnection import FritzConnection
from fritzconnection.core.exceptions import (
FritzActionError,
FritzConnectionException,
FritzSecurityError,
FritzServiceError,
)
from fritzconnection.lib.fritzhosts import FritzHosts
from fritzconnection.lib.fritzstatus import FritzStatus
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN
from homeassistant.components.device_tracker.const import (
CONF_CONSIDER_HOME,
DEFAULT_CONSIDER_HOME,
)
from homeassistant.components.switch import DOMAIN as DEVICE_SWITCH_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import update_coordinator
from homeassistant.helpers.device_registry import (
CONNECTION_NETWORK_MAC,
async_entries_for_config_entry,
async_get,
format_mac,
)
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_registry import (
EntityRegistry,
RegistryEntry,
async_entries_for_device,
)
from homeassistant.util import dt as dt_util
from .const import (
DEFAULT_DEVICE_NAME,
DEFAULT_HOST,
DEFAULT_PORT,
DEFAULT_USERNAME,
DOMAIN,
SERVICE_CLEANUP,
SERVICE_REBOOT,
SERVICE_RECONNECT,
MeshRoles,
)
_LOGGER = logging.getLogger(__name__)
def _is_tracked(mac: str, current_devices: ValuesView) -> bool:
"""Check if device is already tracked."""
for tracked in current_devices:
if mac in tracked:
return True
return False
def device_filter_out_from_trackers(
mac: str,
device: FritzDevice,
current_devices: ValuesView,
) -> bool:
"""Check if device should be filtered out from trackers."""
reason: str | None = None
if device.ip_address == "":
reason = "Missing IP"
elif _is_tracked(mac, current_devices):
reason = "Already tracked"
if reason:
_LOGGER.debug(
"Skip adding device %s [%s], reason: %s", device.hostname, mac, reason
)
return bool(reason)
def _cleanup_entity_filter(device: RegistryEntry) -> bool:
"""Filter only relevant entities."""
return device.domain == DEVICE_TRACKER_DOMAIN or (
device.domain == DEVICE_SWITCH_DOMAIN and "_internet_access" in device.entity_id
)
class ClassSetupMissing(Exception):
"""Raised when a Class func is called before setup."""
def __init__(self) -> None:
"""Init custom exception."""
super().__init__("Function called before Class setup")
@dataclass
class Device:
"""FRITZ!Box device class."""
connected: bool
connected_to: str
connection_type: str
ip_address: str
name: str
ssid: str | None
wan_access: bool = True
class Interface(TypedDict):
"""Interface details."""
device: str
mac: str
op_mode: str
ssid: str | None
type: str
class HostInfo(TypedDict):
"""FRITZ!Box host info class."""
mac: str
name: str
ip: str
status: bool
class FritzBoxTools(update_coordinator.DataUpdateCoordinator):
"""FrtizBoxTools class."""
def __init__(
self,
hass: HomeAssistant,
password: str,
username: str = DEFAULT_USERNAME,
host: str = DEFAULT_HOST,
port: int = DEFAULT_PORT,
) -> None:
"""Initialize FritzboxTools class."""
super().__init__(
hass=hass,
logger=_LOGGER,
name=f"{DOMAIN}-{host}-coordinator",
update_interval=timedelta(seconds=30),
)
self._devices: dict[str, FritzDevice] = {}
self._options: MappingProxyType[str, Any] | None = None
self._unique_id: str | None = None
self.connection: FritzConnection = None
self.fritz_hosts: FritzHosts = None
self.fritz_status: FritzStatus = None
self.hass = hass
self.host = host
self.mesh_role = MeshRoles.NONE
self.device_is_router: bool = True
self.password = password
self.port = port
self.username = username
self._model: str | None = None
self._current_firmware: str | None = None
self._latest_firmware: str | None = None
self._update_available: bool = False
async def async_setup(
self, options: MappingProxyType[str, Any] | None = None
) -> None:
"""Wrap up FritzboxTools class setup."""
self._options = options
await self.hass.async_add_executor_job(self.setup)
def setup(self) -> None:
"""Set up FritzboxTools class."""
self.connection = FritzConnection(
address=self.host,
port=self.port,
user=self.username,
password=self.password,
timeout=60.0,
pool_maxsize=30,
)
if not self.connection:
_LOGGER.error("Unable to establish a connection with %s", self.host)
return
self.fritz_status = FritzStatus(fc=self.connection)
info = self.connection.call_action("DeviceInfo:1", "GetInfo")
if not self._unique_id:
self._unique_id = info["NewSerialNumber"]
self._model = info.get("NewModelName")
self._current_firmware = info.get("NewSoftwareVersion")
self._update_available, self._latest_firmware = self._update_device_info()
self.device_is_router = "WANIPConn1" in self.connection.services
@callback
async def _async_update_data(self) -> None:
"""Update FritzboxTools data."""
try:
self.fritz_hosts = FritzHosts(fc=self.connection)
await self.async_scan_devices()
except (FritzSecurityError, FritzConnectionException) as ex:
raise update_coordinator.UpdateFailed from ex
@property
def unique_id(self) -> str:
"""Return unique id."""
if not self._unique_id:
raise ClassSetupMissing()
return self._unique_id
@property
def model(self) -> str:
"""Return device model."""
if not self._model:
raise ClassSetupMissing()
return self._model
@property
def current_firmware(self) -> str:
"""Return current SW version."""
if not self._current_firmware:
raise ClassSetupMissing()
return self._current_firmware
@property
def latest_firmware(self) -> str | None:
"""Return latest SW version."""
return self._latest_firmware
@property
def update_available(self) -> bool:
"""Return if new SW version is available."""
return self._update_available
@property
def mac(self) -> str:
"""Return device Mac address."""
if not self._unique_id:
raise ClassSetupMissing()
return self._unique_id
@property
def devices(self) -> dict[str, FritzDevice]:
"""Return devices."""
return self._devices
@property
def signal_device_new(self) -> str:
"""Event specific per FRITZ!Box entry to signal new device."""
return f"{DOMAIN}-device-new-{self._unique_id}"
@property
def signal_device_update(self) -> str:
"""Event specific per FRITZ!Box entry to signal updates in devices."""
return f"{DOMAIN}-device-update-{self._unique_id}"
def _update_hosts_info(self) -> list[HostInfo]:
"""Retrieve latest hosts information from the FRITZ!Box."""
try:
return self.fritz_hosts.get_hosts_info() # type: ignore [no-any-return]
except Exception as ex: # pylint: disable=[broad-except]
if not self.hass.is_stopping:
raise HomeAssistantError("Error refreshing hosts info") from ex
return []
def _update_device_info(self) -> tuple[bool, str | None]:
"""Retrieve latest device information from the FRITZ!Box."""
version = self.connection.call_action("UserInterface1", "GetInfo").get(
"NewX_AVM-DE_Version"
)
return bool(version), version
async def async_scan_devices(self, now: datetime | None = None) -> None:
"""Wrap up FritzboxTools class scan."""
await self.hass.async_add_executor_job(self.scan_devices, now)
def scan_devices(self, now: datetime | None = None) -> None:
"""Scan for new devices and return a list of found device ids."""
_LOGGER.debug("Checking host info for FRITZ!Box router %s", self.host)
self._update_available, self._latest_firmware = self._update_device_info()
try:
topology = self.fritz_hosts.get_mesh_topology()
except FritzActionError:
self.mesh_role = MeshRoles.SLAVE
return
_LOGGER.debug("Checking devices for FRITZ!Box router %s", self.host)
_default_consider_home = DEFAULT_CONSIDER_HOME.total_seconds()
if self._options:
consider_home = self._options.get(
CONF_CONSIDER_HOME, _default_consider_home
)
else:
consider_home = _default_consider_home
new_device = False
hosts = {}
for host in self._update_hosts_info():
if not host.get("mac"):
continue
hosts[host["mac"]] = Device(
name=host["name"],
connected=host["status"],
connected_to="",
connection_type="",
ip_address=host["ip"],
ssid=None,
wan_access=False,
)
mesh_intf = {}
# first get all meshed devices
for node in topology["nodes"]:
if not node["is_meshed"]:
continue
for interf in node["node_interfaces"]:
int_mac = interf["mac_address"]
mesh_intf[interf["uid"]] = Interface(
device=node["device_name"],
mac=int_mac,
op_mode=interf.get("op_mode", ""),
ssid=interf.get("ssid", ""),
type=interf["type"],
)
if format_mac(int_mac) == format_mac(self.mac):
self.mesh_role = MeshRoles(node["mesh_role"])
# second get all client devices
for node in topology["nodes"]:
if node["is_meshed"]:
continue
for interf in node["node_interfaces"]:
dev_mac = interf["mac_address"]
for link in interf["node_links"]:
intf = mesh_intf.get(link["node_interface_1_uid"])
if (
intf is not None
and link["state"] == "CONNECTED"
and dev_mac in hosts
):
dev_info: Device = hosts[dev_mac]
if intf["op_mode"] != "AP_GUEST":
dev_info.wan_access = not self.connection.call_action(
"X_AVM-DE_HostFilter:1",
"GetWANAccessByIP",
NewIPv4Address=dev_info.ip_address,
).get("NewDisallow")
dev_info.connected_to = intf["device"]
dev_info.connection_type = intf["type"]
dev_info.ssid = intf.get("ssid")
if dev_mac in self._devices:
self._devices[dev_mac].update(dev_info, consider_home)
else:
device = FritzDevice(dev_mac, dev_info.name)
device.update(dev_info, consider_home)
self._devices[dev_mac] = device
new_device = True
dispatcher_send(self.hass, self.signal_device_update)
if new_device:
dispatcher_send(self.hass, self.signal_device_new)
async def async_trigger_firmware_update(self) -> bool:
"""Trigger firmware update."""
results = await self.hass.async_add_executor_job(
self.connection.call_action, "UserInterface:1", "X_AVM-DE_DoUpdate"
)
return cast(bool, results["NewX_AVM-DE_UpdateState"])
async def async_trigger_reboot(self) -> None:
"""Trigger device reboot."""
await self.hass.async_add_executor_job(
self.connection.call_action, "DeviceConfig1", "Reboot"
)
async def async_trigger_reconnect(self) -> None:
"""Trigger device reconnect."""
await self.hass.async_add_executor_job(
self.connection.call_action, "WANIPConn1", "ForceTermination"
)
async def service_fritzbox(
self, service_call: ServiceCall, config_entry: ConfigEntry
) -> None:
"""Define FRITZ!Box services."""
_LOGGER.debug("FRITZ!Box router: %s", service_call.service)
if not self.connection:
raise HomeAssistantError("Unable to establish a connection")
try:
if service_call.service == SERVICE_REBOOT:
_LOGGER.warning(
'Service "fritz.reboot" is deprecated, please use the corresponding button entity instead'
)
await self.hass.async_add_executor_job(
self.connection.call_action, "DeviceConfig1", "Reboot"
)
return
if service_call.service == SERVICE_RECONNECT:
_LOGGER.warning(
'Service "fritz.reconnect" is deprecated, please use the corresponding button entity instead'
)
await self.hass.async_add_executor_job(
self.connection.call_action,
"WANIPConn1",
"ForceTermination",
)
return
device_hosts_list: list[dict] = []
if service_call.service == SERVICE_CLEANUP:
device_hosts_list = await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_info
)
except (FritzServiceError, FritzActionError) as ex:
raise HomeAssistantError("Service or parameter unknown") from ex
except FritzConnectionException as ex:
raise HomeAssistantError("Service not supported") from ex
entity_reg: EntityRegistry = (
await self.hass.helpers.entity_registry.async_get_registry()
)
ha_entity_reg_list: list[
RegistryEntry
] = self.hass.helpers.entity_registry.async_entries_for_config_entry(
entity_reg, config_entry.entry_id
)
entities_removed: bool = False
device_hosts_macs = set()
device_hosts_names = set()
for device in device_hosts_list:
device_hosts_macs.add(device["mac"])
device_hosts_names.add(device["name"])
for entry in ha_entity_reg_list:
if entry.original_name is None:
continue
entry_name = entry.name or entry.original_name
entry_host = entry_name.split(" ")[0]
entry_mac = entry.unique_id.split("_")[0]
if not _cleanup_entity_filter(entry) or (
entry_mac in device_hosts_macs and entry_host in device_hosts_names
):
_LOGGER.debug(
"Skipping entity %s [mac=%s, host=%s]",
entry_name,
entry_mac,
entry_host,
)
continue
_LOGGER.info("Removing entity: %s", entry_name)
entity_reg.async_remove(entry.entity_id)
entities_removed = True
if entities_removed:
self._async_remove_empty_devices(entity_reg, config_entry)
@callback
def _async_remove_empty_devices(
self, entity_reg: EntityRegistry, config_entry: ConfigEntry
) -> None:
"""Remove devices with no entities."""
device_reg = async_get(self.hass)
device_list = async_entries_for_config_entry(device_reg, config_entry.entry_id)
for device_entry in device_list:
if not async_entries_for_device(
entity_reg,
device_entry.id,
include_disabled_entities=True,
):
_LOGGER.info("Removing device: %s", device_entry.name)
device_reg.async_remove_device(device_entry.id)
@dataclass
class FritzData:
"""Storage class for platform global data."""
tracked: dict = field(default_factory=dict)
profile_switches: dict = field(default_factory=dict)
class FritzDeviceBase(update_coordinator.CoordinatorEntity):
"""Entity base class for a device connected to a FRITZ!Box router."""
def __init__(self, router: FritzBoxTools, device: FritzDevice) -> None:
"""Initialize a FRITZ!Box device."""
super().__init__(router)
self._router = router
self._mac: str = device.mac_address
self._name: str = device.hostname or DEFAULT_DEVICE_NAME
@property
def name(self) -> str:
"""Return device name."""
return self._name
@property
def ip_address(self) -> str | None:
"""Return the primary ip address of the device."""
if self._mac:
return self._router.devices[self._mac].ip_address
return None
@property
def mac_address(self) -> str:
"""Return the mac address of the device."""
return self._mac
@property
def hostname(self) -> str | None:
"""Return hostname of the device."""
if self._mac:
return self._router.devices[self._mac].hostname
return None
@property
def should_poll(self) -> bool:
"""No polling needed."""
return False
async def async_process_update(self) -> None:
"""Update device."""
raise NotImplementedError()
async def async_on_demand_update(self) -> None:
"""Update state."""
await self.async_process_update()
self.async_write_ha_state()
class FritzDevice:
"""Representation of a device connected to the FRITZ!Box."""
def __init__(self, mac: str, name: str) -> None:
"""Initialize device info."""
self._connected = False
self._connected_to: str | None = None
self._connection_type: str | None = None
self._ip_address: str | None = None
self._last_activity: datetime | None = None
self._mac = mac
self._name = name
self._ssid: str | None = None
self._wan_access = False
def update(self, dev_info: Device, consider_home: float) -> None:
"""Update device info."""
utc_point_in_time = dt_util.utcnow()
if self._last_activity:
consider_home_evaluated = (
utc_point_in_time - self._last_activity
).total_seconds() < consider_home
else:
consider_home_evaluated = dev_info.connected
if not self._name:
self._name = dev_info.name or self._mac.replace(":", "_")
self._connected = dev_info.connected or consider_home_evaluated
if dev_info.connected:
self._last_activity = utc_point_in_time
self._connected_to = dev_info.connected_to
self._connection_type = dev_info.connection_type
self._ip_address = dev_info.ip_address
self._ssid = dev_info.ssid
self._wan_access = dev_info.wan_access
@property
def connected_to(self) -> str | None:
"""Return connected status."""
return self._connected_to
@property
def connection_type(self) -> str | None:
"""Return connected status."""
return self._connection_type
@property
def is_connected(self) -> bool:
"""Return connected status."""
return self._connected
@property
def mac_address(self) -> str:
"""Get MAC address."""
return self._mac
@property
def hostname(self) -> str:
"""Get Name."""
return self._name
@property
def ip_address(self) -> str | None:
"""Get IP address."""
return self._ip_address
@property
def last_activity(self) -> datetime | None:
"""Return device last activity."""
return self._last_activity
@property
def ssid(self) -> str | None:
"""Return device connected SSID."""
return self._ssid
@property
def wan_access(self) -> bool:
"""Return device wan access."""
return self._wan_access
class SwitchInfo(TypedDict):
"""FRITZ!Box switch info class."""
description: str
friendly_name: str
icon: str
type: str
callback_update: Callable
callback_switch: Callable
class FritzBoxBaseEntity:
"""Fritz host entity base class."""
def __init__(self, fritzbox_tools: FritzBoxTools, device_name: str) -> None:
"""Init device info class."""
self._fritzbox_tools = fritzbox_tools
self._device_name = device_name
@property
def mac_address(self) -> str:
"""Return the mac address of the main device."""
return self._fritzbox_tools.mac
@property
def device_info(self) -> DeviceInfo:
"""Return the device information."""
return DeviceInfo(
configuration_url=f"http://{self._fritzbox_tools.host}",
connections={(CONNECTION_NETWORK_MAC, self.mac_address)},
identifiers={(DOMAIN, self._fritzbox_tools.unique_id)},
manufacturer="AVM",
model=self._fritzbox_tools.model,
name=self._device_name,
sw_version=self._fritzbox_tools.current_firmware,
)
| 32.772861
| 113
| 0.610666
|
cf77e268055a7930c748fb3ec0f3f07b8ed7fd52
| 4,662
|
py
|
Python
|
core/dr_utils/dib_renderer_x/renderer/texrender_batch.py
|
weiqi-luo/Self6D-Diff-Renderer
|
1e1caad49f0f8de90a332995814de29261598982
|
[
"Apache-2.0"
] | null | null | null |
core/dr_utils/dib_renderer_x/renderer/texrender_batch.py
|
weiqi-luo/Self6D-Diff-Renderer
|
1e1caad49f0f8de90a332995814de29261598982
|
[
"Apache-2.0"
] | null | null | null |
core/dr_utils/dib_renderer_x/renderer/texrender_batch.py
|
weiqi-luo/Self6D-Diff-Renderer
|
1e1caad49f0f8de90a332995814de29261598982
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
from __future__ import division
from ..rasterizer import linear_rasterizer
from ..utils import datanormalize
from .fragment_shaders.frag_tex import fragmentshader
from .vertex_shaders.perpsective import perspective_projection
import torch
import torch.nn as nn
import numpy as np
##################################################################
class TexRenderBatch(nn.Module):
def __init__(self, height, width, filtering='nearest'):
super(TexRenderBatch, self).__init__()
self.height = height
self.width = width
self.filtering = filtering
def forward(self,
points,
cameras,
uv_bxpx2,
texture_bx3xthxtw,
ft_fx3=None):
"""
points: b x [points_1xpx3, faces_fx3]
cameras: [camera_rot_bx3x3, camera_pos_bx3, camera_proj_3x1]
uv_bxpx2: b x [1xpx2]
texture_bx3xthxtw: b x [1x3xthxtw]
ft_fx3: b x [fx3]
"""
b = len(points)
assert b > 0, b
points3d_1xfx9_list = []
points2d_1xfx6_list = []
normalz_1xfx1_list = []
normal1_1xfx3_list = []
uv_1xfx9_list = []
single_intrinsic = True
if cameras[2].ndim == 3:
assert cameras[2].shape[0] == b
single_intrinsic = False
for i in range(b):
##############################################################
# first, MVP projection in vertexshader
points_1xpx3, faces_fx3 = points[i]
if single_intrinsic:
cam_params = [cameras[0][i:i + 1], cameras[1][i:i + 1], cameras[2]]
else:
cam_params = [cameras[0][i:i + 1], cameras[1][i:i + 1], cameras[2][i]]
# use faces_fx3 as ft_fx3 if not given
if ft_fx3 is None:
ft_fx3_single = faces_fx3
else:
ft_fx3_single = ft_fx3[i]
points3d_1xfx9, points2d_1xfx6, normal_1xfx3 = \
perspective_projection(points_1xpx3, faces_fx3, cam_params)
################################################################
# normal
# decide which faces are front and which faces are back
normalz_1xfx1 = normal_1xfx3[:, :, 2:3]
# normalz_bxfx1 = torch.abs(normalz_bxfx1)
# normalize normal
normal1_1xfx3 = datanormalize(normal_1xfx3, axis=2)
############################################################
# second, rasterization
uv_1xpx2 = uv_bxpx2[i]
c0 = uv_1xpx2[:, ft_fx3_single[:, 0], :]
c1 = uv_1xpx2[:, ft_fx3_single[:, 1], :]
c2 = uv_1xpx2[:, ft_fx3_single[:, 2], :]
mask = torch.ones_like(c0[:, :, :1])
uv_1xfx9 = torch.cat((c0, mask, c1, mask, c2, mask), dim=2)
# append data
points3d_1xfx9_list.append(points3d_1xfx9)
points2d_1xfx6_list.append(points2d_1xfx6)
normalz_1xfx1_list.append(normalz_1xfx1)
normal1_1xfx3_list.append(normal1_1xfx3)
uv_1xfx9_list.append(uv_1xfx9)
# put the object with larger depth earlier
# imrender = torch.empty((1, self.height, self.width, 3), device=device, dtype=torch.float32)
# improb_1xhxwx1 = torch.empty((1, self.height, self.width, 1), device=device, dtype=torch.float32)
# fg_mask = torch.empty((1, self.height, self.width, 1), device=device, dtype=torch.float32)
ren_ims = []
ren_masks = []
ren_probs = []
for i in range(b):
imfeat, improb_1xhxwx1_i = linear_rasterizer(self.width, self.height, points3d_1xfx9_list[i],
points2d_1xfx6_list[i], normalz_1xfx1_list[i],
uv_1xfx9_list[i])
imtexcoords = imfeat[:, :, :, :2] # (1,H,W,2)
hardmask = imfeat[:, :, :, 2:3] # (1,H,W,1) mask
# fragrement shader
texture_1x3xthxtw = texture_bx3xthxtw[i]
imrender_i = fragmentshader(imtexcoords, texture_1x3xthxtw, hardmask)
ren_ims.append(imrender_i) # 1HW3
ren_probs.append(improb_1xhxwx1_i)
ren_masks.append(hardmask)
imrender = torch.cat(ren_ims, dim=0) # bHW3
improb_bxhxwx1 = torch.cat(ren_probs, dim=0)
mask_bxhxwx1 = torch.cat(ren_masks, dim=0)
# return imrender, improb_1xhxwx1, normal1_1xfx3_list
return imrender, improb_bxhxwx1, normal1_1xfx3_list, mask_bxhxwx1
| 39.176471
| 107
| 0.54526
|
3f158cd585a9bcbbb893fbf75847a24a8075f0b4
| 103,555
|
py
|
Python
|
src/grassfire/test/test_parallel_failing.py
|
bmmeijers/grassfire
|
20995d97ca41763d40b23f0fadb6e0581dff859b
|
[
"MIT"
] | 1
|
2021-07-09T14:53:34.000Z
|
2021-07-09T14:53:34.000Z
|
src/grassfire/test/test_parallel_failing.py
|
bmmeijers/grassfire
|
20995d97ca41763d40b23f0fadb6e0581dff859b
|
[
"MIT"
] | null | null | null |
src/grassfire/test/test_parallel_failing.py
|
bmmeijers/grassfire
|
20995d97ca41763d40b23f0fadb6e0581dff859b
|
[
"MIT"
] | null | null | null |
import unittest
from tri.delaunay.helpers import ToPointsAndSegments
from grassfire import calc_skel
from grassfire.events import at_same_location
PAUSE = False
OUTPUT = False
LOGGING = False
class TestMoreAdvancedParallelEvents(unittest.TestCase):
def setUp(self):
pass
def test_capital_T(self):
"""Capital T, has more than one triangle in parallel fan
Exhibits infinite event loop because of flipping; when all flip
events are handled first inside the event loop
"""
# T
ring = [(15.5055, 28.7004), (20.8063, 28.7004), (20.8063, 44.1211), (26.7445, 44.1211), (26.7445, 47.8328), (9.5668, 47.8328), (9.5668, 44.1211), (15.5055, 44.1211), (15.5055, 28.7004)]
conv = ToPointsAndSegments()
conv.add_polygon([ring])
skel = calc_skel(conv, pause=PAUSE, output=OUTPUT)
# check the amount of segments in the skeleton
assert len(skel.segments()) == 21, len(skel.segments())
# check the amount of skeleton nodes
assert len(skel.sk_nodes) == 14, len(skel.sk_nodes)
# check the amount of kinetic vertices that are (not) stopped
not_stopped = filter(lambda v: v.stops_at is None, skel.vertices)
stopped = filter(lambda v: v.stops_at is not None, skel.vertices)
assert len(not_stopped) == 8, len(not_stopped)
assert len(stopped) == 13, len(stopped)
# check cross relationship between kinetic vertices and skeleton nodes
for v in skel.vertices:
assert at_same_location((v.start_node, v), v.starts_at)
if v.stops_at is not None and not v.inf_fast:
assert at_same_location((v.stop_node, v), v.stops_at), \
"{} {} {}".format(id(v),
v.stop_node.pos,
v.position_at(v.stops_at) )
def test_small_t(self):
ring = [(100.908, 42.0027), (100.908, 40.2512), (103.188, 40.2512), (103.188, 31.7734), (103.250359375, 30.4847203125), (103.393189453, 29.8978896484), (103.668125, 29.3748875), (104.118419922, 28.9348306641), (104.787328125, 28.5968359375), (105.718103516, 28.3800201172), (106.954, 28.3035), (107.811, 28.3438375), (108.677, 28.4609), (108.677, 30.3953), (107.35, 30.2371), (106.713328125, 30.322746875), (106.191125, 30.58245), (105.837859375, 31.020353125), (105.708, 31.6406), (105.708, 40.2512), (108.782, 40.2512), (108.782, 42.0027), (105.708, 42.0027), (105.708, 45.634), (103.188, 44.8391), (103.188, 42.0012), (100.908, 42.0027)],
conv = ToPointsAndSegments()
conv.add_polygon(ring)
skel = calc_skel(conv, pause=PAUSE, output=OUTPUT)
# check the amount of segments in the skeleton
assert len(skel.segments()) == 89, len(skel.segments())
# check the amount of skeleton nodes
assert len(skel.sk_nodes) == 64, len(skel.sk_nodes)
# check the amount of kinetic vertices that are (not) stopped
not_stopped = filter(lambda v: v.stops_at is None, skel.vertices)
stopped = filter(lambda v: v.stops_at is not None, skel.vertices)
assert len(not_stopped) == 10, len(not_stopped)
self.assertEqual(len(stopped), 79)
# check cross relationship between kinetic vertices and skeleton nodes
for v in skel.vertices:
assert at_same_location((v.start_node, v), v.starts_at)
if v.stops_at is not None and not v.inf_fast:
assert at_same_location((v.stop_node, v), v.stops_at), \
"{} {} {}".format(id(v),
v.stop_node.pos,
v.position_at(v.stops_at) )
#
#
# def test_another_parallel(self):
# j = """{
# "type": "FeatureCollection",
# "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::28992" } },
#
# "features": [
# { "type": "Feature", "properties": { "id": 21 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.673575314055, 0.166666666667 ], [ 0.866025403784, 0.166666666667 ] ] } },
# { "type": "Feature", "properties": { "id": 25 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.673575314055, -0.166666666667 ], [ 0.57735026919, -0.0 ] ] } },
# { "type": "Feature", "properties": { "id": 27 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.866025403784, -0.166666666667 ], [ 0.673575314055, -0.166666666667 ] ] } },
# { "type": "Feature", "properties": { "id": 32 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.57735026919, -0.0 ], [ 0.673575314055, 0.166666666667 ] ] } }
# ]
# }"""
# import json
# x = json.loads(j)
# parse segments from geo-json
# segments = []
# for y in x['features']:
# segments.append(tuple(map(tuple, y['geometry']['coordinates'])))
# convert to triangulation input
# conv = ToPointsAndSegments()
# for line in segments:
# conv.add_point(line[0])
# conv.add_point(line[1])
# conv.add_segment(*line)
# skeletonize / offset
# skel = calc_skel(conv, pause=PAUSE, output=OUTPUT)
#
#
# # #
# # #
# # #
#
# def test_goes_backwards(self):
# """ Backwards """
# import json
# s = """
# {
# "type": "FeatureCollection",
# "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::28992" } },
#
# "features": [
# { "type": "Feature", "properties": { "id": 139837494387920.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.683514927812, -0.965898400098 ], [ -0.71748146456, -0.739454821784 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388112.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.748142137672, -0.553743666202 ], [ -0.759958434594, -0.429672548517 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388112.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.759958434594, -0.429672548517 ], [ -0.785948285345, -0.265070160424 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388240.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.785948285345, -0.265070160424 ], [ -0.763489766712, -0.262007635155 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388240.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.763489766712, -0.262007635155 ], [ -0.766507673675, -0.238870348437 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388752.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.586154320712, -0.215676326008 ], [ -0.5828660885, -0.215227930706 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388880.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.498236806516, -0.203241344712 ], [ -0.49846422843, -0.201933668707 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494388880.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.582925827116, -0.214789847523 ], [ -0.498236806516, -0.203241344712 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494389136.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.360290252631, 0.00997489159772 ], [ -0.334809898799, 0.0137497588324 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494389136.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.334809898799, 0.0137497588324 ], [ -0.337617422344, 0.0390174707462 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494390352.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.337617422344, 0.0390174707462 ], [ -0.174312345657, 0.0612863440448 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494390352.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.174312345657, 0.0612863440448 ], [ -0.179478424108, 0.180106148433 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494390416.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.179478424108, 0.180106148433 ], [ -0.187294454259, 0.269556271262 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494390480.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.187294454259, 0.269556271262 ], [ -0.187878286313, 0.275978423863 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494390544.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.187878286313, 0.275978423863 ], [ -0.2025829896, 0.3719914865 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494390672.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.2025829896, 0.3719914865 ], [ -0.218077993356, 0.474987687936 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494394960.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.218077993356, 0.474987687936 ], [ -0.21920884215, 0.48516532708 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494395024.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.21920884215, 0.48516532708 ], [ -0.242380040932, 0.698340355884 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494395792.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.242380040932, 0.698340355884 ], [ -0.28298744088, 1.01626658474 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494395920.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.766507673675, -0.238870348437 ], [ -0.718823272293, -0.233572081612 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494395920.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.718823272293, -0.233572081612 ], [ -0.716010192022, -0.225132840802 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494396048.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.716010192022, -0.225132840802 ], [ -0.591156693313, -0.211990367249 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494396112.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.591156693313, -0.211990367249 ], [ -0.586154320712, -0.215676326008 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494396432.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.134572834673, 1.04195400369 ], [ 0.0896659427279, 1.07127753612 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494396496.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.0896659427279, 1.07127753612 ], [ 0.363524008481, 1.10590326858 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494396880.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.742836674715, -0.518684331281 ], [ 0.779930828904, -0.847585831751 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397136.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.0522328078103, -1.09193542446 ], [ -0.173404655063, -1.11449917074 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397264.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.173404655063, -1.11449917074 ], [ -0.17629841398, -1.09641317751 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397264.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.17629841398, -1.09641317751 ], [ -0.203682508102, -1.10032519096 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397392.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.489107842308, -0.96346158327 ], [ -0.575119174813, -0.977042319984 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397520.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.575119174813, -0.977042319984 ], [ -0.583019619349, -0.973092097716 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397520.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.583019619349, -0.973092097716 ], [ -0.661513833405, -0.984305556864 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397648.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.661513833405, -0.984305556864 ], [ -0.664364520136, -0.963400520837 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494397648.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.664364520136, -0.963400520837 ], [ -0.683514927812, -0.965898400098 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494398096.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.71594215456, -0.739262408033 ], [ -0.7346200948, -0.602290846274 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494398736.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.7346200948, -0.602290846274 ], [ -0.742017017808, -0.552978026219 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494398736.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.742017017808, -0.552978026219 ], [ -0.748142137672, -0.553743666202 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494405136.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.337024201923, -0.176153514066 ], [ -0.360290252631, 0.00997489159772 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494405136.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.338876402512, -0.176400474145 ], [ -0.337024201923, -0.176153514066 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494405264.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.404451022738, -0.189113686111 ], [ -0.404541862841, -0.188417245322 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494405264.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.49846422843, -0.201933668707 ], [ -0.404451022738, -0.189113686111 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494430416.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.52373504408, 0.746891951123 ], [ 0.565107394626, 0.534388514229 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494430544.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.565107394626, 0.534388514229 ], [ 0.579336454202, 0.459685951454 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494430544.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.579336454202, 0.459685951454 ], [ 0.593836136934, 0.387187537797 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494430864.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.593836136934, 0.387187537797 ], [ 0.608366977169, 0.300910673896 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494430928.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.608366977169, 0.300910673896 ], [ 0.62264631675, 0.21701955386 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494431056.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.652201074178, 0.0468991978764 ], [ 0.668001986794, -0.0593180480459 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494431056.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.668001986794, -0.0593180480459 ], [ 0.694887044697, -0.184781651591 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494431440.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.694887044697, -0.184781651591 ], [ 0.719436290171, -0.362081757792 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494431440.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.719436290171, -0.362081757792 ], [ 0.742836674715, -0.518684331281 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494477328.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.779930828904, -0.847585831751 ], [ 0.566338424494, -0.905598583566 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494477328.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.566338424494, -0.905598583566 ], [ 0.483144591664, -0.964590574117 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494477776.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.483144591664, -0.964590574117 ], [ 0.439943571247, -0.994831288409 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494477904.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.439943571247, -0.994831288409 ], [ 0.378451489401, -1.00986268619 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494478032.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.311542696901, -1.05160968452 ], [ 0.21017331046, -1.05160968452 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494478032.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.21017331046, -1.05160968452 ], [ 0.212025328206, -1.07012986198 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494478480.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.212025328206, -1.07012986198 ], [ 0.0512062864389, -1.08474977486 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494478480.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.0512062864389, -1.08474977486 ], [ 0.0522328078103, -1.09193542446 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494478928.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.2308185896, -0.920610265136 ], [ -0.31901751229, -0.933210111236 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494478928.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.220126648577, -1.00079982281 ], [ -0.2308185896, -0.920610265136 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494479056.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.217307179971, -1.00041093059 ], [ -0.220126648577, -1.00079982281 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494479056.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.203682508102, -1.10032519096 ], [ -0.217307179971, -1.00041093059 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494479184.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.31901751229, -0.933210111236 ], [ -0.340747110537, -0.936314339557 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494479248.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.340747110537, -0.936314339557 ], [ -0.340661171519, -0.936973205365 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494479312.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.340661171519, -0.936973205365 ], [ -0.48452650262, -0.958553005033 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494479376.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.48452650262, -0.958553005033 ], [ -0.489107842308, -0.96346158327 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494503056.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.338542168251, -0.178517291136 ], [ -0.338876402512, -0.176400474145 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494503056.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.404541862841, -0.188417245322 ], [ -0.338542168251, -0.178517291136 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494503888.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.378451489401, -1.00986268619 ], [ 0.308891739908, -1.02377463609 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494503888.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.308891739908, -1.02377463609 ], [ 0.311542696901, -1.05160968452 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494525072.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.62264631675, 0.21701955386 ], [ 0.636851638193, 0.132675457792 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494525072.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.636851638193, 0.132675457792 ], [ 0.652201074178, 0.0468991978764 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494548944.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.462313019605, 1.11799540513 ], [ 0.512320830173, 0.759606096061 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494549008.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.49867568417, 0.739986388874 ], [ 0.52373504408, 0.746891951123 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494549072.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.512320830173, 0.759606096061 ], [ 0.496302681955, 0.756936404692 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494549072.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.496302681955, 0.756936404692 ], [ 0.49867568417, 0.739986388874 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494550096.000000, "side": 0 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.363524008481, 1.10590326858 ], [ 0.363524008481, 1.10487498959 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494550096.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.363524008481, 1.10487498959 ], [ 0.462313019605, 1.11799540513 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494550416.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.28298744088, 1.01626658474 ], [ -0.166820821139, 1.03286181614 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494550480.000000, "side": 1 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.166820821139, 1.03286181614 ], [ -0.133726051289, 1.03687330339 ] ] } },
# { "type": "Feature", "properties": { "id": 139837494550480.000000, "side": 2 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.133726051289, 1.03687330339 ], [ -0.134572834673, 1.04195400369 ] ] } }
# ]
# }
# """
# # s = """{
# # "type": "FeatureCollection",
# # "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::28992" } },
# #
# # "features": [
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51076.45, 391503.5 ], [ 51075.45, 391503.4 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51075.45, 391503.4 ], [ 51075.25, 391504.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51075.25, 391504.65 ], [ 51073.85, 391504.45 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51085.65, 391504.8 ], [ 51084.55, 391504.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51076.25, 391504.9 ], [ 51076.45, 391503.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51073.85, 391504.45 ], [ 51073.7, 391505.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51090.35, 391507.3 ], [ 51086.8, 391506.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51091.2, 391506.4 ], [ 51090.4, 391506.4 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51086.8, 391506.5 ], [ 51085.5, 391506.3 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51091.1, 391507.45 ], [ 51091.2, 391506.4 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51086.8, 391506.85 ], [ 51086.8, 391506.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51090.4, 391506.4 ], [ 51090.35, 391507.3 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51084.55, 391504.7 ], [ 51084.45, 391506.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51084.45, 391506.05 ], [ 51076.25, 391504.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51073.7, 391505.55 ], [ 51075.0, 391505.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51085.5, 391506.3 ], [ 51085.65, 391504.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51092.6, 391507.7 ], [ 51091.1, 391507.45 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.9, 391511.9 ], [ 51046.75, 391512.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.75, 391512.9 ], [ 51047.95, 391513.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51048.55, 391511.0 ], [ 51048.2, 391510.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51048.2, 391510.95 ], [ 51048.05, 391512.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51054.55, 391511.9 ], [ 51053.55, 391511.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51100.15, 391508.6 ], [ 51099.65, 391508.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51049.9, 391510.95 ], [ 51048.95, 391510.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51072.95, 391510.0 ], [ 51072.75, 391511.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51074.4, 391510.2 ], [ 51072.95, 391510.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51048.95, 391510.8 ], [ 51048.55, 391511.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51050.6, 391511.7 ], [ 51049.9, 391510.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51072.75, 391511.5 ], [ 51074.2, 391511.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51075.0, 391505.7 ], [ 51074.4, 391510.2 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51050.45, 391512.5 ], [ 51050.6, 391511.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51053.55, 391511.75 ], [ 51053.4, 391512.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51048.05, 391512.05 ], [ 51046.9, 391511.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51054.4, 391513.05 ], [ 51054.55, 391511.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51053.4, 391512.9 ], [ 51050.45, 391512.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51064.5, 391513.4 ], [ 51063.45, 391513.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51059.6, 391512.7 ], [ 51058.55, 391512.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51058.55, 391512.55 ], [ 51058.4, 391513.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51059.45, 391513.8 ], [ 51059.6, 391512.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51058.4, 391513.6 ], [ 51054.4, 391513.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.4, 391515.7 ], [ 51046.3, 391516.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.9, 391520.8 ], [ 51045.7, 391520.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.65, 391520.65 ], [ 51109.95, 391520.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.35, 391521.0 ], [ 51109.95, 391520.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.15, 391520.45 ], [ 51109.35, 391521.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51045.55, 391521.75 ], [ 51046.7, 391521.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51045.7, 391520.65 ], [ 51045.55, 391521.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.6, 391524.7 ], [ 51109.85, 391521.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.65, 391520.65 ], [ 51109.85, 391521.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.6, 391524.7 ], [ 51111.85, 391524.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.75, 391526.2 ], [ 51111.85, 391524.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.75, 391526.2 ], [ 51110.6, 391526.1 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.6, 391526.1 ], [ 51110.45, 391527.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.5, 391527.8 ], [ 51110.45, 391527.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.4, 391528.95 ], [ 51110.55, 391528.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.5, 391527.8 ], [ 51111.4, 391528.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51047.95, 391513.05 ], [ 51047.55, 391515.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51073.75, 391514.95 ], [ 51072.2, 391514.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51072.2, 391514.75 ], [ 51072.05, 391515.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51099.65, 391508.5 ], [ 51098.0, 391514.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51091.75, 391514.2 ], [ 51092.6, 391507.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51074.2, 391511.7 ], [ 51073.75, 391514.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51098.5, 391519.3 ], [ 51100.15, 391508.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51063.45, 391513.25 ], [ 51063.3, 391514.3 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51064.35, 391514.55 ], [ 51064.5, 391513.4 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51063.3, 391514.3 ], [ 51059.45, 391513.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51069.45, 391514.15 ], [ 51068.45, 391514.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51098.0, 391514.6 ], [ 51091.75, 391514.2 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51069.3, 391515.3 ], [ 51069.45, 391514.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.45, 391514.05 ], [ 51068.3, 391515.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.3, 391515.05 ], [ 51064.35, 391514.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51072.05, 391515.65 ], [ 51069.3, 391515.3 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51047.55, 391515.85 ], [ 51046.4, 391515.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51047.45, 391516.8 ], [ 51046.9, 391520.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.3, 391516.65 ], [ 51047.45, 391516.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.1, 391519.3 ], [ 51106.75, 391519.2 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51106.75, 391519.2 ], [ 51106.6, 391520.2 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51106.6, 391520.2 ], [ 51098.5, 391519.3 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.1, 391519.3 ], [ 51108.15, 391520.45 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51043.6, 391535.0 ], [ 51043.5, 391536.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51043.5, 391536.05 ], [ 51044.6, 391536.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51043.1, 391538.6 ], [ 51042.95, 391539.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.35, 391540.9 ], [ 51044.5, 391541.35 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.5, 391541.35 ], [ 51045.45, 391541.45 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51049.25, 391541.65 ], [ 51050.35, 391541.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.05, 391539.7 ], [ 51043.9, 391540.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51042.95, 391539.55 ], [ 51044.05, 391539.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.5, 391540.15 ], [ 51049.45, 391540.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51043.9, 391540.85 ], [ 51044.35, 391540.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.4, 391540.75 ], [ 51046.5, 391540.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51045.45, 391541.45 ], [ 51046.4, 391540.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51049.45, 391540.55 ], [ 51049.25, 391541.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51050.35, 391541.8 ], [ 51050.5, 391540.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51059.15, 391543.1 ], [ 51060.25, 391543.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51064.3, 391542.75 ], [ 51064.15, 391543.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51065.3, 391542.95 ], [ 51068.15, 391543.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51065.15, 391544.05 ], [ 51065.3, 391542.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51064.15, 391543.9 ], [ 51065.15, 391544.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.0, 391544.5 ], [ 51069.5, 391544.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.5, 391543.9 ], [ 51109.45, 391543.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.15, 391543.55 ], [ 51068.0, 391544.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.5, 391543.9 ], [ 51108.4, 391546.1 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.4, 391546.1 ], [ 51109.15, 391546.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51069.5, 391544.7 ], [ 51069.1, 391547.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51069.1, 391547.8 ], [ 51067.65, 391547.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.0, 391547.5 ], [ 51109.15, 391546.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.25, 391541.55 ], [ 51109.6, 391542.3 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51055.45, 391541.45 ], [ 51059.35, 391541.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51054.35, 391541.25 ], [ 51054.2, 391542.35 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51055.3, 391542.5 ], [ 51055.45, 391541.45 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51108.25, 391541.55 ], [ 51108.35, 391539.35 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51050.5, 391540.7 ], [ 51054.35, 391541.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51054.2, 391542.35 ], [ 51055.3, 391542.5 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51059.35, 391541.95 ], [ 51059.15, 391543.1 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51060.25, 391543.25 ], [ 51060.35, 391542.2 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.6, 391542.3 ], [ 51109.45, 391543.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51060.35, 391542.2 ], [ 51064.3, 391542.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.35, 391537.05 ], [ 51110.0, 391537.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.6, 391536.25 ], [ 51044.25, 391538.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.9, 391538.3 ], [ 51110.0, 391537.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.25, 391538.75 ], [ 51043.1, 391538.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.9, 391538.3 ], [ 51109.85, 391538.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.85, 391538.6 ], [ 51108.35, 391539.35 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.8, 391533.55 ], [ 51110.55, 391533.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.25, 391530.85 ], [ 51045.35, 391531.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51045.35, 391531.0 ], [ 51044.8, 391535.15 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.2, 391532.6 ], [ 51109.8, 391533.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.4, 391529.85 ], [ 51044.25, 391530.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51045.6, 391530.05 ], [ 51044.4, 391529.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.2, 391532.6 ], [ 51109.55, 391530.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51046.7, 391521.9 ], [ 51045.6, 391530.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.55, 391528.85 ], [ 51109.55, 391530.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51110.55, 391533.8 ], [ 51110.35, 391534.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51044.8, 391535.15 ], [ 51043.6, 391535.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.65, 391534.95 ], [ 51110.35, 391534.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.35, 391537.05 ], [ 51109.65, 391534.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51066.75, 391553.8 ], [ 51066.6, 391555.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51088.2, 391551.9 ], [ 51094.7, 391552.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51076.9, 391557.8 ], [ 51078.05, 391557.95 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51067.95, 391555.2 ], [ 51067.8, 391556.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51069.1, 391555.35 ], [ 51077.1, 391556.6 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.8, 391549.25 ], [ 51068.2, 391554.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.85, 391555.05 ], [ 51103.45, 391553.75 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.2, 391554.0 ], [ 51066.75, 391553.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51087.65, 391557.55 ], [ 51088.2, 391551.9 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51066.6, 391555.0 ], [ 51067.95, 391555.2 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51068.9, 391556.7 ], [ 51069.1, 391555.35 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51067.8, 391556.55 ], [ 51068.9, 391556.7 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51078.25, 391556.65 ], [ 51087.65, 391557.55 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51077.1, 391556.6 ], [ 51076.9, 391557.8 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51078.05, 391557.95 ], [ 51078.25, 391556.65 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51104.05, 391549.0 ], [ 51104.15, 391549.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51067.65, 391547.6 ], [ 51067.45, 391549.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51095.35, 391547.85 ], [ 51104.05, 391549.0 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51109.0, 391547.5 ], [ 51113.35, 391548.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51067.45, 391549.05 ], [ 51068.8, 391549.25 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51094.7, 391552.0 ], [ 51095.35, 391547.85 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51111.85, 391555.05 ], [ 51113.35, 391548.05 ] ] } },
# # { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ 51104.15, 391549.0 ], [ 51103.45, 391553.75 ] ] } }
# # ]
# # }"""
# x = json.loads(s)
# # parse segments from geo-json
# segments = []
# for y in x['features']:
# segments.append(tuple(map(tuple, y['geometry']['coordinates'])))
# # convert to triangulation input
# conv = ToPointsAndSegments()
# for line in segments:
# conv.add_point(line[0])
# conv.add_point(line[1])
# conv.add_segment(*line)
# # skeletonize / offset
# skel = calc_skel(conv, pause=False, output=False)
# #
# #
# #
def test_goes(self):
segments = [ ((51076.45,391503.5),(51075.45,391503.4)), ((51075.45,391503.4),(51075.25,391504.65)), ((51075.25,391504.65),(51073.85,391504.45)), ((51085.65,391504.8),(51084.55,391504.7)), ((51076.25,391504.9),(51076.45,391503.5)), ((51073.85,391504.45),(51073.7,391505.55)), ((51090.35,391507.3),(51086.8,391506.85)), ((51091.2,391506.4),(51090.4,391506.4)), ((51086.8,391506.5),(51085.5,391506.3)), ((51091.1,391507.45),(51091.2,391506.4)), ((51086.8,391506.85),(51086.8,391506.5)), ((51090.4,391506.4),(51090.35,391507.3)), ((51084.55,391504.7),(51084.45,391506.05)), ((51084.45,391506.05),(51076.25,391504.9)), ((51073.7,391505.55),(51075.0,391505.7)), ((51085.5,391506.3),(51085.65,391504.8)), ((51092.6,391507.7),(51091.1,391507.45)), ((51046.9,391511.9),(51046.75,391512.9)), ((51046.75,391512.9),(51047.95,391513.05)), ((51102.4,391509.15),(51100.15,391508.6)), ((51103.9,391510.2),(51102.4,391509.15)), ((51048.55,391511.0),(51048.2,391510.95)), ((51048.2,391510.95),(51048.05,391512.05)), ((51054.55,391511.9),(51053.55,391511.75)), ((51100.15,391508.6),(51099.65,391508.5)), ((51049.9,391510.95),(51048.95,391510.8)), ((51072.95,391510.0),(51072.75,391511.5)), ((51074.4,391510.2),(51072.95,391510.0)), ((51048.95,391510.8),(51048.55,391511.0)), ((51050.6,391511.7),(51049.9,391510.95)), ((51072.75,391511.5),(51074.2,391511.7)), ((51075.0,391505.7),(51074.4,391510.2)), ((51050.45,391512.5),(51050.6,391511.7)), ((51053.55,391511.75),(51053.4,391512.9)), ((51048.05,391512.05),(51046.9,391511.9)), ((51054.4,391513.05),(51054.55,391511.9)), ((51053.4,391512.9),(51050.45,391512.5)), ((51064.5,391513.4),(51063.45,391513.25)), ((51059.6,391512.7),(51058.55,391512.55)), ((51058.55,391512.55),(51058.4,391513.6)), ((51059.45,391513.8),(51059.6,391512.7)), ((51058.4,391513.6),(51054.4,391513.05)), ((51046.4,391515.7),(51046.3,391516.65)), ((51046.9,391520.8),(51045.7,391520.65)), ((51110.65,391520.65),(51109.95,391520.25)), ((51109.35,391521.0),(51109.95,391520.25)), ((51108.15,391520.45),(51109.35,391521.0)), ((51045.55,391521.75),(51046.7,391521.9)), ((51045.7,391520.65),(51045.55,391521.75)), ((51110.6,391524.7),(51109.85,391521.7)), ((51110.65,391520.65),(51109.85,391521.7)), ((51110.6,391524.7),(51111.85,391524.9)), ((51111.75,391526.2),(51111.85,391524.9)), ((51111.75,391526.2),(51110.6,391526.1)), ((51110.6,391526.1),(51110.45,391527.65)), ((51111.5,391527.8),(51110.45,391527.65)), ((51111.4,391528.95),(51110.55,391528.85)), ((51111.5,391527.8),(51111.4,391528.95)), ((51047.95,391513.05),(51047.55,391515.85)), ((51073.75,391514.95),(51072.2,391514.75)), ((51072.2,391514.75),(51072.05,391515.65)), ((51099.65,391508.5),(51098.0,391514.6)), ((51091.75,391514.2),(51092.6,391507.7)), ((51109.4,391514.1),(51103.9,391510.2)), ((51074.2,391511.7),(51073.75,391514.95)), ((51103.9,391510.2),(51102.45,391519.7)), ((51098.5,391519.3),(51100.15,391508.6)), ((51063.45,391513.25),(51063.3,391514.3)), ((51064.35,391514.55),(51064.5,391513.4)), ((51063.3,391514.3),(51059.45,391513.8)), ((51069.45,391514.15),(51068.45,391514.05)), ((51098.0,391514.6),(51091.75,391514.2)), ((51069.3,391515.3),(51069.45,391514.15)), ((51068.45,391514.05),(51068.3,391515.05)), ((51068.3,391515.05),(51064.35,391514.55)), ((51072.05,391515.65),(51069.3,391515.3)), ((51047.55,391515.85),(51046.4,391515.7)), ((51117.5,391516.3),(51109.4,391514.1)), ((51047.45,391516.8),(51046.9,391520.8)), ((51046.3,391516.65),(51047.45,391516.8)), ((51108.1,391519.3),(51106.75,391519.2)), ((51102.45,391519.7),(51098.5,391519.3)), ((51106.75,391519.2),(51106.6,391520.2)), ((51106.6,391520.2),(51098.5,391519.3)), ((51108.1,391519.3),(51108.15,391520.45)), ((51043.6,391535.0),(51043.5,391536.05)), ((51043.5,391536.05),(51044.6,391536.25)), ((51043.1,391538.6),(51042.95,391539.55)), ((51044.35,391540.9),(51044.5,391541.35)), ((51044.5,391541.35),(51045.45,391541.45)), ((51049.25,391541.65),(51050.35,391541.8)), ((51044.05,391539.7),(51043.9,391540.85)), ((51042.95,391539.55),(51044.05,391539.7)), ((51046.5,391540.15),(51049.45,391540.55)), ((51043.9,391540.85),(51044.35,391540.9)), ((51046.4,391540.75),(51046.5,391540.15)), ((51045.45,391541.45),(51046.4,391540.75)), ((51049.45,391540.55),(51049.25,391541.65)), ((51050.35,391541.8),(51050.5,391540.7)), ((51059.15,391543.1),(51060.25,391543.25)), ((51064.3,391542.75),(51064.15,391543.9)), ((51065.3,391542.95),(51068.15,391543.55)), ((51065.15,391544.05),(51065.3,391542.95)), ((51064.15,391543.9),(51065.15,391544.05)), ((51068.0,391544.5),(51069.5,391544.7)), ((51108.5,391543.9),(51109.45,391543.65)), ((51068.15,391543.55),(51068.0,391544.5)), ((51108.5,391543.9),(51108.4,391546.1)), ((51108.4,391546.1),(51109.15,391546.15)), ((51069.5,391544.7),(51069.1,391547.8)), ((51069.1,391547.8),(51067.65,391547.6)), ((51109.0,391547.5),(51109.15,391546.15)), ((51108.25,391541.55),(51109.6,391542.3)), ((51055.45,391541.45),(51059.35,391541.95)), ((51054.35,391541.25),(51054.2,391542.35)), ((51055.3,391542.5),(51055.45,391541.45)), ((51108.25,391541.55),(51108.35,391539.35)), ((51050.5,391540.7),(51054.35,391541.25)), ((51054.2,391542.35),(51055.3,391542.5)), ((51059.35,391541.95),(51059.15,391543.1)), ((51060.25,391543.25),(51060.35,391542.2)), ((51109.6,391542.3),(51109.45,391543.65)), ((51060.35,391542.2),(51064.3,391542.75)), ((51109.35,391537.05),(51110.0,391537.15)), ((51044.6,391536.25),(51044.25,391538.75)), ((51109.9,391538.3),(51110.0,391537.15)), ((51044.25,391538.75),(51043.1,391538.6)), ((51114.7,391538.3),(51109.9,391538.3)), ((51109.9,391538.3),(51109.85,391538.6)), ((51109.85,391538.6),(51108.35,391539.35)), ((51109.8,391533.55),(51110.55,391533.8)), ((51044.25,391530.85),(51045.35,391531.0)), ((51045.35,391531.0),(51044.8,391535.15)), ((51109.2,391532.6),(51109.8,391533.55)), ((51044.4,391529.85),(51044.25,391530.85)), ((51045.6,391530.05),(51044.4,391529.85)), ((51109.2,391532.6),(51109.55,391530.0)), ((51116.0,391529.6),(51117.5,391516.3)), ((51046.7,391521.9),(51045.6,391530.05)), ((51116.0,391529.6),(51111.4,391528.95)), ((51110.55,391528.85),(51109.55,391530.0)), ((51110.55,391533.8),(51110.35,391534.85)), ((51044.8,391535.15),(51043.6,391535.0)), ((51109.65,391534.95),(51110.35,391534.85)), ((51109.35,391537.05),(51109.65,391534.95)), ((51114.7,391538.3),(51116.0,391529.6)), ((51066.75,391553.8),(51066.6,391555.0)), ((51088.2,391551.9),(51094.7,391552.0)), ((51076.9,391557.8),(51078.05,391557.95)), ((51067.95,391555.2),(51067.8,391556.55)), ((51069.1,391555.35),(51077.1,391556.6)), ((51068.8,391549.25),(51068.2,391554.0)), ((51103.45,391553.75),(51098.5,391553.45)), ((51098.5,391553.45),(51089.3,391553.25)), ((51111.85,391555.05),(51103.45,391553.75)), ((51068.2,391554.0),(51066.75,391553.8)), ((51087.65,391557.55),(51088.2,391551.9)), ((51066.6,391555.0),(51067.95,391555.2)), ((51068.9,391556.7),(51069.1,391555.35)), ((51067.8,391556.55),(51068.9,391556.7)), ((51078.25,391556.65),(51087.65,391557.55)), ((51077.1,391556.6),(51076.9,391557.8)), ((51078.05,391557.95),(51078.25,391556.65)), ((51087.2,391561.25),(51087.05,391561.25)), ((51081.65,391563.15),(51076.55,391562.45)), ((51087.05,391561.25),(51086.5,391565.2)), ((51081.4,391564.6),(51081.65,391563.15)), ((51091.3,391562.6),(51110.1,391565.85)), ((51090.45,391566.55),(51091.3,391562.6)), ((51086.5,391565.2),(51081.4,391564.6)), ((51086.95,391565.15),(51086.5,391565.2)), ((51082.6,391559.55),(51076.75,391559.15)), ((51097.45,391558.95),(51110.95,391561.1)), ((51097.3,391561.2),(51097.45,391558.95)), ((51110.95,391561.1),(51111.85,391555.05)), ((51089.3,391553.25),(51088.35,391559.85)), ((51082.6,391560.75),(51082.6,391559.55)), ((51097.3,391561.2),(51087.45,391559.75)), ((51087.45,391559.75),(51087.2,391561.25)), ((51088.35,391559.85),(51097.3,391561.2)), ((51097.3,391561.2),(51091.7,391560.75)), ((51087.05,391561.25),(51082.6,391560.75)), ((51110.1,391565.85),(51110.95,391561.1)), ((51076.75,391559.15),(51076.55,391562.45)), ((51091.3,391562.6),(51091.7,391560.75)), ((51090.35,391567.35),(51090.45,391566.55)), ((51076.55,391562.45),(51076.15,391568.75)), ((51086.95,391565.15),(51086.45,391570.2)), ((51076.15,391568.75),(51076.1,391568.75)), ((51090.35,391567.35),(51089.95,391570.5)), ((51076.1,391568.75),(51076.05,391569.9)), ((51086.45,391570.2),(51076.15,391568.75)), ((51087.95,391576.1),(51085.1,391575.7)), ((51089.85,391575.1),(51092.4,391575.6)), ((51092.4,391575.6),(51093.3,391575.75)), ((51089.85,391575.1),(51089.55,391576.15)), ((51085.1,391575.7),(51085.05,391576.15)), ((51093.3,391575.75),(51093.2,391576.45)), ((51087.95,391576.1),(51089.55,391576.15)), ((51087.0,391586.5),(51085.3,391586.1)), ((51085.45,391582.4),(51085.3,391586.1)), ((51093.2,391576.45),(51107.7,391580.05)), ((51089.8,391579.75),(51088.55,391579.45)), ((51075.55,391575.6),(51074.7,391581.15)), ((51107.7,391580.05),(51108.5,391575.3)), ((51085.05,391576.15),(51084.85,391580.35)), ((51106.9,391584.05),(51107.7,391580.05)), ((51092.55,391580.45),(51089.8,391579.75)), ((51085.55,391580.45),(51084.85,391580.35)), ((51088.0,391583.7),(51088.55,391579.45)), ((51085.55,391580.45),(51085.45,391582.4)), ((51106.9,391584.05),(51092.55,391580.45)), ((51074.7,391581.15),(51085.45,391582.4)), ((51088.0,391583.7),(51088.45,391579.8)), ((51074.7,391581.15),(51073.85,391586.8)), ((51088.45,391579.8),(51087.0,391586.5)), ((51087.4,391589.05),(51105.0,391593.9)), ((51102.3,391606.85),(51101.4,391606.7)), ((51070.95,391611.35),(51071.65,391611.45)), ((51070.95,391611.35),(51076.6,391612.05)), ((51071.65,391611.45),(51076.6,391612.05)), ((51101.1,391615.45),(51102.3,391606.85)), ((51078.65,391612.35),(51079.55,391612.45)), ((51076.6,391612.05),(51078.65,391612.35)), ((51076.6,391612.05),(51079.55,391612.45)), ((51079.55,391612.45),(51079.5,391612.75)), ((51079.5,391612.75),(51086.0,391613.6)), ((51086.0,391613.6),(51094.7,391614.7)), ((51094.7,391614.7),(51094.7,391614.6)), ((51094.7,391614.6),(51101.1,391615.45)), ((51102.45,391599.2),(51102.45,391599.15)), ((51102.45,391599.15),(51088.2,391597.3)), ((51073.05,391594.15),(51086.35,391595.85)), ((51088.2,391597.3),(51073.0,391595.3)), ((51086.35,391595.85),(51087.4,391589.05)), ((51101.4,391606.7),(51102.45,391599.15)), ((51101.4,391606.7),(51102.45,391599.2)), ((51073.0,391595.3),(51070.95,391611.35)), ((51088.2,391597.3),(51086.0,391613.6)), ((51073.85,391586.8),(51073.8,391587.25)), ((51079.75,391588.0),(51073.8,391587.25)), ((51079.75,391588.0),(51079.7,391587.55)), ((51088.0,391583.7),(51087.5,391588.2)), ((51106.1,391588.25),(51088.0,391583.7)), ((51106.1,391588.25),(51106.9,391584.05)), ((51105.0,391593.9),(51106.1,391588.25)), ((51073.8,391587.25),(51073.05,391594.15)), ((51079.7,391587.55),(51087.5,391588.2)), ((51087.4,391589.05),(51087.5,391588.2)), ((51092.9,391572.65),(51093.15,391571.25)), ((51109.3,391570.6),(51110.1,391565.85)), ((51076.05,391569.9),(51075.6,391575.05)), ((51109.3,391570.6),(51090.35,391567.35)), ((51093.15,391571.25),(51089.95,391570.5)), ((51108.5,391575.3),(51109.3,391570.6)), ((51075.6,391575.05),(51075.55,391575.6)), ((51092.9,391572.65),(51108.5,391575.3)), ((51092.4,391575.6),(51092.9,391572.65)), ((51085.05,391576.15),(51075.6,391575.05)), ((51104.05,391549.0),(51104.15,391549.0)), ((51067.65,391547.6),(51067.45,391549.05)), ((51095.35,391547.85),(51104.05,391549.0)), ((51109.0,391547.5),(51113.35,391548.05)), ((51113.35,391548.05),(51114.7,391538.3)), ((51067.45,391549.05),(51068.8,391549.25)), ((51094.7,391552.0),(51095.35,391547.85)), ((51111.85,391555.05),(51113.35,391548.05)), ((51104.15,391549.0),(51103.45,391553.75)) ]
conv = ToPointsAndSegments()
for line in segments:
conv.add_point(line[0])
conv.add_point(line[1])
conv.add_segment(*line)
skel = calc_skel(conv, pause=PAUSE, output=OUTPUT)
# check the amount of segments in the skeleton
assert len(skel.segments()) == 985, len(skel.segments())
# check the amount of skeleton nodes
assert len(skel.sk_nodes) == 718, len(skel.sk_nodes)
# check the amount of kinetic vertices that are (not) stopped
self.assertEqual(len(filter(lambda v: v.stops_at is None, skel.vertices)), 17)
self.assertEqual(len(filter(lambda v: v.stops_at is not None, skel.vertices)), 968)
# check cross relationship between kinetic vertices and skeleton nodes
for v in skel.vertices:
assert at_same_location((v.start_node, v), v.starts_at)
if v.stops_at is not None and not v.inf_fast:
assert at_same_location((v.stop_node, v), v.stops_at), \
"{} {} {}".format(id(v),
v.stop_node.pos,
v.position_at(v.stops_at) )
# def test_goes_gobackwards_20160303(self):
# s = """
# {
# "type": "FeatureCollection",
# "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::28992" } },
#
# "features": [
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817155664.000000, "to": 140493816672936.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.133100733426, 0.282656616556 ], [ -0.148050438054, 0.380269393835 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817155904.000000, "to": 140493817155664.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.132355020073, 0.274453769672 ], [ -0.133100733426, 0.282656616556 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817156264.000000, "to": 140493817155904.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.124425641702, 0.183706439431 ], [ -0.132355020073, 0.274453769672 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817156504.000000, "to": 140493817156264.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.121501168335, 0.116443551985 ], [ -0.124425641702, 0.183706439431 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817158904.000000, "to": 140493817159384.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.553983773526, 0.291702685122 ], [ 0.568263113107, 0.207811565086 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817159384.000000, "to": 140493816748824.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.568263113107, 0.207811565086 ], [ 0.582507258649, 0.123236950932 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817176264.000000, "to": 140493816749784.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.640529242934, -0.194355587932 ], [ 0.664840953673, -0.369940165498 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493817176624.000000, "to": 140493817158904.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.539584850771, 0.377196288981 ], [ 0.553983773526, 0.291702685122 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816672936.000000, "to": 140493816674136.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.148050438054, 0.380269393835 ], [ -0.163375913628, 0.482138731472 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816673536.000000, "to": 140493816250928.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.183077060318, 0.6631881194 ], [ -0.182297614308, 0.663287747838 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816673776.000000, "to": 140493816673536.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.164381721724, 0.491191004336 ], [ -0.183077060318, 0.6631881194 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816674136.000000, "to": 140493816673776.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.163375913628, 0.482138731472 ], [ -0.164381721724, 0.491191004336 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816693536.000000, "to": 140493816693896.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.480975141943, 0.677895532379 ], [ 0.510945310226, 0.523957849833 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816693896.000000, "to": 140493816694256.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.510945310226, 0.523957849833 ], [ 0.525200742742, 0.449116829127 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816694256.000000, "to": 140493817176624.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.525200742742, 0.449116829127 ], [ 0.539584850771, 0.377196288981 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816694496.000000, "to": 140493815920952.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.446697902722, 0.713433930809 ], [ 0.446697902722, 0.668449815889 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816694856.000000, "to": 140493816694976.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.450269787031, 0.8051822885 ], [ 0.434227453755, 0.802508566288 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816694976.000000, "to": 140493816694496.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.434227453755, 0.802508566288 ], [ 0.446697902722, 0.713433930809 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816695456.000000, "to": 140493816694856.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.415257415039, 1.05610428778 ], [ 0.450269787031, 0.8051822885 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816695576.000000, "to": 140493816695456.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.308366800535, 1.04190787804 ], [ 0.415257415039, 1.05610428778 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816695936.000000, "to": 140493816695576.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.308366800535, 1.0433330301 ], [ 0.308366800535, 1.04190787804 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816696056.000000, "to": 140493816695936.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.0967013694352, 1.01657073422 ], [ 0.308366800535, 1.0433330301 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816709200.000000, "to": 140493816696056.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.0707741493383, 0.994670089456 ], [ 0.0967013694352, 1.01657073422 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816709800.000000, "to": 140493816226232.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.0713325311092, 0.988875228579 ], [ -0.0698446294759, 0.989092970282 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816710160.000000, "to": 140493816709800.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.159601229642, 0.978175992394 ], [ -0.0713325311092, 0.988875228579 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816710520.000000, "to": 140493816710160.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.221389561168, 0.969349087887 ], [ -0.159601229642, 0.978175992394 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816712200.000000, "to": 140493815919512.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.698618859107, -0.491966802989 ], [ -0.705210931729, -0.422750040464 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816712920.000000, "to": 140493816879056.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.704542595805, -0.2874819799 ], [ -0.67765666086, -0.284494653792 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816729680.000000, "to": 140493815893480.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.606655435305, -0.269083759995 ], [ -0.600865455417, -0.273350060965 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816730760.000000, "to": 140493816297424.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.156497578363, -0.015554576225 ], [ -0.111934761382, -0.00859142206911 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816731120.000000, "to": 140493816357784.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.275582274593, -0.223606591433 ], [ -0.273240156394, -0.223294309007 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816731600.000000, "to": 140493816297064.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.224709673355, -0.86402036886 ], [ -0.326817919445, -0.878607261159 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816731840.000000, "to": 140493816357304.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.172776052199, -0.938589288061 ], [ -0.183588343757, -0.857497101371 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816732080.000000, "to": 140493816822192.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.0294927618655, -1.0096290022 ], [ -0.0143637873812, -1.01577961581 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816732440.000000, "to": 140493816732080.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.0310901581828, -1.03119385248 ], [ 0.0294927618655, -1.0096290022 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816732560.000000, "to": 140493816732920.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.0105070042516, -1.04277709772 ], [ -0.127155059212, -1.05444190321 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816732920.000000, "to": 140493816896880.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.127155059212, -1.05444190321 ], [ -0.13040343446, -1.03413955791 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816733520.000000, "to": 140493816822072.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.248303391063, -0.969368261353 ], [ 0.250882840132, -0.996452476573 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816746184.000000, "to": 140493816410672.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.366488893059, -0.956005669538 ], [ 0.310666246195, -0.967170198908 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816746544.000000, "to": 140493816785928.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.262216376644, -0.853714914008 ], [ 0.276720919522, -0.96463200661 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816748824.000000, "to": 140493816896520.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.582507258649, 0.123236950932 ], [ 0.597763519865, 0.0379813735458 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816749784.000000, "to": 140493816857736.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.664840953673, -0.369940165498 ], [ 0.688138291933, -0.525853121539 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816767744.000000, "to": 140493816786408.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.542426143839, -0.854937813189 ], [ 0.451376749473, -0.919500111012 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816768224.000000, "to": 140493815919392.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.402353483721, -0.890452777355 ], [ -0.511727389338, -0.9068588632 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816769664.000000, "to": 140493816712920.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.701703304536, -0.309249879629 ], [ -0.704542595805, -0.2874819799 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816770384.000000, "to": 140493816295744.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.520821934369, -0.261988806009 ], [ -0.434091573458, -0.25016193861 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816783288.000000, "to": 140493817156504.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.119661462503, 0.116443551985 ], [ -0.121501168335, 0.116443551985 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816785928.000000, "to": 140493816733520.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.276720919522, -0.96463200661 ], [ 0.248303391063, -0.969368261353 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816786288.000000, "to": 140493815891320.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.156531168729, -1.03787209138 ], [ -0.170119695637, -0.938222894053 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816786408.000000, "to": 140493816822552.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.451376749473, -0.919500111012 ], [ 0.416792178104, -0.94370931097 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816786768.000000, "to": 140493816746544.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.279675282177, -0.852597544054 ], [ 0.262216376644, -0.853714914008 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816805688.000000, "to": 140493816244536.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.654549201639, -0.787174742311 ], [ -0.65333324616, -0.787022747877 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816819792.000000, "to": 140493816894360.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.298871285716, -0.0366852165948 ], [ -0.274116852895, -0.0330178932137 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816820032.000000, "to": 140493816730760.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.159925009388, 0.00758058319987 ], [ -0.156497578363, -0.015554576225 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816822072.000000, "to": 140493815893720.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.250882840132, -0.996452476573 ], [ 0.14922528172, -0.996452476573 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816822192.000000, "to": 140493816732560.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.0143637873812, -1.01577961581 ], [ -0.0105070042516, -1.04277709772 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816822552.000000, "to": 140493816746184.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.416792178104, -0.94370931097 ], [ 0.366488893059, -0.956005669538 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816840872.000000, "to": 140493816858216.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.566261978626, -0.91980328487 ], [ -0.573763380512, -0.916052583926 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816841352.000000, "to": 140493816769664.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.722680350863, -0.312110385947 ], [ -0.701703304536, -0.309249879629 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816841712.000000, "to": 140493816729680.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.674914760296, -0.276268952102 ], [ -0.606655435305, -0.269083759995 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816842432.000000, "to": 140493816820032.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.276856111355, -0.00836456707083 ], [ -0.159925009388, 0.00758058319987 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816856656.000000, "to": 140493816732440.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.151603840382, -1.02023806319 ], [ 0.0310901581828, -1.03119385248 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816856776.000000, "to": 140493816767744.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.719819179214, -0.806756988766 ], [ 0.542426143839, -0.854937813189 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816857736.000000, "to": 140493816856776.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.688138291933, -0.525853121539 ], [ 0.719819179214, -0.806756988766 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816858216.000000, "to": 140493816878456.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.573763380512, -0.916052583926 ], [ -0.614362494031, -0.921852457285 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816858456.000000, "to": 140493816893640.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.660680016052, -0.582117912949 ], [ -0.681354888155, -0.585563724966 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816878456.000000, "to": 140493816916160.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.614362494031, -0.921852457285 ], [ -0.617122275661, -0.901614058663 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816878816.000000, "to": 140493816897120.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.659103657501, -0.747848464811 ], [ -0.675256761564, -0.629392368346 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816879056.000000, "to": 140493816841712.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.67765666086, -0.284494653792 ], [ -0.674914760296, -0.276268952102 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816879416.000000, "to": 140493816296224.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.434318995372, -0.248854262606 ], [ -0.342664560563, -0.236355930584 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816879656.000000, "to": 140493816245136.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.342849550639, -0.234937673335 ], [ -0.275390862177, -0.224818870068 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816880016.000000, "to": 140493817176264.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.613702562266, -0.0691644114832 ], [ 0.640529242934, -0.194355587932 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816893640.000000, "to": 140493816299224.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.681354888155, -0.585563724966 ], [ -0.695453771838, -0.49157116708 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816894240.000000, "to": 140493816819792.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.283850446476, -0.156851930513 ], [ -0.298871285716, -0.0366852165948 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816894360.000000, "to": 140493816842432.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.274116852895, -0.0330178932137 ], [ -0.276856111355, -0.00836456707083 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816896520.000000, "to": 140493816880016.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.597763519865, 0.0379813735458 ], [ 0.613702562266, -0.0691644114832 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816896880.000000, "to": 140493816786288.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.13040343446, -1.03413955791 ], [ -0.156531168729, -1.03787209138 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816897120.000000, "to": 140493816897360.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.675256761564, -0.629392368346 ], [ -0.654661979557, -0.626706092432 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816897360.000000, "to": 140493816858456.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.654661979557, -0.626706092432 ], [ -0.660680016052, -0.582117912949 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816914600.000000, "to": 140493816783288.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.115499793047, 0.0508972580504 ], [ -0.119661462503, 0.116443551985 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816915680.000000, "to": 140493816840872.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.516471271698, -0.911941594301 ], [ -0.566261978626, -0.91980328487 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816915920.000000, "to": 140493816805688.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.636994499801, -0.904206087898 ], [ -0.654549201639, -0.787174742311 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816916160.000000, "to": 140493816915920.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.617122275661, -0.901614058663 ], [ -0.636994499801, -0.904206087898 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816410672.000000, "to": 140493816786768.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.310666246195, -0.967170198908 ], [ 0.279675282177, -0.852597544054 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816250928.000000, "to": 140493816710520.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.182297614308, 0.663287747838 ], [ -0.221389561168, 0.969349087887 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816226232.000000, "to": 140493816709200.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.0698446294759, 0.989092970282 ], [ -0.0707741493383, 0.994670089456 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816244536.000000, "to": 140493816298504.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.65333324616, -0.787022747877 ], [ -0.65747824195, -0.747645287868 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816245136.000000, "to": 140493816731120.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.275390862177, -0.224818870068 ], [ -0.275582274593, -0.223606591433 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816357304.000000, "to": 140493816298144.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.183588343757, -0.857497101371 ], [ -0.225020119861, -0.861640278982 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816357784.000000, "to": 140493815918912.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.273240156394, -0.223294309007 ], [ -0.282431615608, -0.156656229704 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816295744.000000, "to": 140493816879416.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.434091573458, -0.25016193861 ], [ -0.434318995372, -0.248854262606 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816296224.000000, "to": 140493816879656.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.342664560563, -0.236355930584 ], [ -0.342849550639, -0.234937673335 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816296584.000000, "to": 140493816770384.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.520762195752, -0.262426889195 ], [ -0.520821934369, -0.261988806009 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816297064.000000, "to": 140493816298744.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.326817919445, -0.878607261159 ], [ -0.402488539497, -0.889417349739 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816297424.000000, "to": 140493816914600.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.111934761382, -0.00859142206911 ], [ -0.115499793047, 0.0508972580504 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816298144.000000, "to": 140493816731600.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.225020119861, -0.861640278982 ], [ -0.224709673355, -0.86402036886 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816298504.000000, "to": 140493816878816.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.65747824195, -0.747645287868 ], [ -0.659103657501, -0.747848464811 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816298744.000000, "to": 140493816768224.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.402488539497, -0.889417349739 ], [ -0.402353483721, -0.890452777355 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493816299224.000000, "to": 140493816712200.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.695453771838, -0.49157116708 ], [ -0.698618859107, -0.491966802989 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815891320.000000, "to": 140493816731840.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.170119695637, -0.938222894053 ], [ -0.172776052199, -0.938589288061 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815893480.000000, "to": 140493816296584.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.600865455417, -0.273350060965 ], [ -0.520762195752, -0.262426889195 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815893720.000000, "to": 140493816856656.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.14922528172, -0.996452476573 ], [ 0.151603840382, -1.02023806319 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815918912.000000, "to": 140493816894240.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.282431615608, -0.156656229704 ], [ -0.283850446476, -0.156851930513 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815919392.000000, "to": 140493816915680.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.511727389338, -0.9068588632 ], [ -0.516471271698, -0.911941594301 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815919512.000000, "to": 140493816841352.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ -0.705210931729, -0.422750040464 ], [ -0.722680350863, -0.312110385947 ] ] } },
# { "type": "Feature", "properties": { "time": 0.050000, "from": 140493815920952.000000, "to": 140493816693536.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 0.446697902722, 0.668449815889 ], [ 0.480975141943, 0.677895532379 ] ] } }
# ]
# }
# """
# import json
# x = json.loads(s)
# # parse segments from geo-json
# segments = []
# for y in x['features']:
# segments.append(tuple(map(tuple, y['geometry']['coordinates'])))
# # convert to triangulation input
# conv = ToPointsAndSegments()
# for line in segments:
# conv.add_point(line[0])
# conv.add_point(line[1])
# conv.add_segment(*line)
# # skeletonize / offset
# skel = calc_skel(conv, pause=True, output=True)
def test_capital_U(self):
polys = [
[(38.3852, 32.0156), (39.2659501953, 32.0912681641), (40.0374453125, 32.3105390625), (40.6971646484, 32.6618123047), (41.2425875, 33.1334875), (41.6711931641, 33.7139642578), (41.9804609375, 34.3916421875), (42.1678701172, 35.1549208984), (42.2309, 35.9922), (42.2309, 47.834), (47.5316, 47.834), (47.5316, 35.7273), (47.4732092773, 34.7657740479), (47.3213726562, 33.8784173828), (47.081449707, 33.063555542), (46.7588, 32.3195140625), (46.3587831055, 31.6446184814), (45.8867585938, 31.0371943359), (45.3480860352, 30.4955671631), (44.748125, 30.0180625), (44.0922350586, 29.6030058838), (43.3857757812, 29.2487228516), (41.8425875, 28.7157796875), (40.1614367187, 28.4058373047), (38.3852, 28.3055), (36.6090451172, 28.4058373047), (34.9279234375, 28.7157796875), (33.3847244141, 29.2487228516), (32.6782488525, 29.6030058838), (32.0223375, 30.0180625), (31.4223515381, 30.4955671631), (30.8836521484, 31.0371943359), (30.4116005127, 31.6446184814), (30.0115578125, 32.3195140625), (29.6888852295, 33.063555542), (29.4489439453, 33.8784173828), (29.2970951416, 34.7657740479), (29.2387, 35.7273), (29.2387, 47.834), (34.5395, 47.834), (34.5395, 35.9922), (34.6025257812, 35.1549208984), (34.789925, 34.3916421875), (35.0991804687, 33.7139642578), (35.527775, 33.1334875), (36.0731914062, 32.6618123047), (36.7329125, 32.3105390625), (37.5044210937, 32.0912681641), (38.3852, 32.0156)],
]
conv = ToPointsAndSegments()
for ring in polys:
conv.add_polygon([ring])
skel = calc_skel(conv, pause=False, output=OUTPUT, shrink=True)#, pause=False, output=False)
# check the amount of segments in the skeleton
assert len(skel.segments()) == 158, len(skel.segments())
# check the amount of skeleton nodes
self.assertEqual(len(skel.sk_nodes), 111)
# check the amount of kinetic vertices that are (not) stopped
not_stopped = filter(lambda v: v.stops_at is None, skel.vertices)
stopped = filter(lambda v: v.stops_at is not None, skel.vertices)
assert len(not_stopped) == 30, len(not_stopped)
assert len(stopped) == 128, len(stopped)
# check cross relationship between kinetic vertices and skeleton nodes
for v in skel.vertices:
assert at_same_location((v.start_node, v), v.starts_at)
# if v.stops_at is not None and not v.inf_fast:
# assert at_same_location((v.stop_node, v), v.stops_at), \
# "{} {} LINESTRING({} {})".format(id(v),
# v.stop_node.pos,
# v.position_at(v.starts_at),
# v.position_at(v.stops_at) )
def test_tudelft_logo(self):
polys = [
# flame
[(28.2387, 57.1504), (27.7545962891, 57.0337472656), (27.2828078125, 56.993484375), (26.8394935547, 57.0375167969), (26.4408125, 57.17375), (26.1029236328, 57.4100894531), (25.8419859375, 57.754440625), (25.6741583984, 58.2147089844), (25.6156, 58.7988), (25.6856849121, 59.2881812744), (25.8839386719, 59.7683330078), (26.1934848145, 60.2400170654), (26.597446875, 60.7039953125), (27.6211128906, 61.6118818359), (28.819925, 62.4980875), (30.0588714844, 63.3687072266), (31.202940625, 64.2298359375), (32.1171207031, 65.0875685547), (32.4458111816, 65.5170659912), (32.6664, 65.948), (32.8248125, 66.6851625), (32.7710109375, 66.9061765625), (32.6176, 66.9805), (32.5208703125, 66.9222546875), (32.4679125, 66.7729125), (32.3706484375, 66.5442390625), (32.141, 66.248), (31.1034759766, 65.3984353516), (29.9355515625, 64.7423015625), (28.6692482422, 64.2321388672), (27.3365875, 63.8204875), (24.6002796875, 63.1028796875), (23.2606755859, 62.7020037109), (21.9828, 62.2098), (20.9997419922, 61.7483013672), (19.7656484375, 61.0788734375), (18.4207775391, 60.1820806641), (17.1053875, 59.0384875), (16.5025784912, 58.3680671631), (15.9597365234, 57.6286583984), (15.4943938721, 56.8178317627), (15.1240828125, 55.9331578125), (14.8663356201, 54.9722071045), (14.7386845703, 53.9325501953), (14.7586619385, 52.8117576416), (14.9438, 51.6074), (15.122925, 50.8023), (15.252640625, 50.40393125), (15.3949, 50.2336), (15.5243578125, 50.3437421875), (15.5897375, 50.6433625), (15.6117, 51.6262), (15.6561465332, 52.3362411621), (15.8000691406, 52.9857136719), (16.031892334, 53.5809723145), (16.340040625, 54.128371875), (17.1390105469, 55.1050128906), (18.104375, 55.966475), (20.163871875, 57.547215625), (21.0727964844, 58.3681707031), (21.7777, 59.2773), (22.104725, 59.739675), (22.2554875, 59.862834375), (22.3512, 59.8191), (22.3023, 59.3027), (22.0503148438, 58.5393394531), (21.6885625, 57.836665625), (20.851325, 56.570375), (20.4836242188, 55.9852566406), (20.221725, 55.417821875), (20.1195195312, 54.8573199219), (20.2309, 54.293), (20.6030839844, 53.7075248047), (21.082534375, 53.4021359375), (21.6320488281, 53.3341009766), (22.214425, 53.4606875), (22.7924605469, 53.7391630859), (23.328953125, 54.1267953125), (23.7867003906, 54.5808517578), (24.1285, 55.0586), (24.368925, 55.470225), (24.465971875, 55.57165625), (24.5609, 55.5859), (24.6368625, 55.3106625), (24.5941, 54.791), (24.2621640625, 53.2469984375), (23.7833125, 51.9836375), (23.4592181641, 51.4272880859), (23.0629046875, 50.9052078125), (22.0063, 49.916), (21.566953125, 49.6562546875), (21.130475, 49.4675625), (20.815009375, 49.2970390625), (20.7395761719, 49.2020642578), (20.7387, 49.0918), (20.9814125, 49.0273125), (21.4195, 49.0469), (22.2465202881, 49.156970874), (23.0534919922, 49.3736341797), (23.8374688721, 49.6869346924), (24.5955046875, 50.0869171875), (26.0219681641, 51.1071072266), (27.3093125, 52.3545625), (28.4339677734, 53.7496412109), (29.3723640625, 55.2127015625), (30.1009314453, 56.6641017578), (30.5961, 58.0242), (30.6886375, 58.3597625), (30.6215, 58.5781), (30.509940625, 58.5979578125), (30.381, 58.5274875), (30.0922, 58.2668), (29.2161125, 57.616425), (28.2387, 57.1504)],
#T
[(15.5055, 28.7004), (20.8063, 28.7004), (20.8063, 44.1211), (26.7445, 44.1211), (26.7445, 47.8328), (9.5668, 47.8328), (9.5668, 44.1211), (15.5055, 44.1211), (15.5055, 28.7004)],
# U
[(38.3852, 32.0156), (39.2659501953, 32.0912681641), (40.0374453125, 32.3105390625), (40.6971646484, 32.6618123047), (41.2425875, 33.1334875), (41.6711931641, 33.7139642578), (41.9804609375, 34.3916421875), (42.1678701172, 35.1549208984), (42.2309, 35.9922), (42.2309, 47.834), (47.5316, 47.834), (47.5316, 35.7273), (47.4732092773, 34.7657740479), (47.3213726562, 33.8784173828), (47.081449707, 33.063555542), (46.7588, 32.3195140625), (46.3587831055, 31.6446184814), (45.8867585938, 31.0371943359), (45.3480860352, 30.4955671631), (44.748125, 30.0180625), (44.0922350586, 29.6030058838), (43.3857757812, 29.2487228516), (41.8425875, 28.7157796875), (40.1614367187, 28.4058373047), (38.3852, 28.3055), (36.6090451172, 28.4058373047), (34.9279234375, 28.7157796875), (33.3847244141, 29.2487228516), (32.6782488525, 29.6030058838), (32.0223375, 30.0180625), (31.4223515381, 30.4955671631), (30.8836521484, 31.0371943359), (30.4116005127, 31.6446184814), (30.0115578125, 32.3195140625), (29.6888852295, 33.063555542), (29.4489439453, 33.8784173828), (29.2970951416, 34.7657740479), (29.2387, 35.7273), (29.2387, 47.834), (34.5395, 47.834), (34.5395, 35.9922), (34.6025257812, 35.1549208984), (34.789925, 34.3916421875), (35.0991804687, 33.7139642578), (35.527775, 33.1334875), (36.0731914062, 32.6618123047), (36.7329125, 32.3105390625), (37.5044210937, 32.0912681641), (38.3852, 32.0156)],
# D -- exterior
[(55.4875, 45.5563), (59.4066, 45.5563), (60.2057835693, 45.5178564697),
(60.9454076172, 45.4051830078), (61.6265759033, 45.2222653076),
(62.2503921875, 44.9730890625), (62.8179602295, 44.6616399658),
(63.3303837891, 44.2919037109), (64.1942125, 43.3935125),
(64.8507083984, 42.3098009766), (65.3087015625, 41.0726546875),
(65.5770220703, 39.7139591797), (65.6645, 38.2656),
(65.5770220703, 36.8175103516), (65.3087015625, 35.4592765625),
(64.8507083984, 34.2227138672), (64.1942125, 33.1396375),
(63.3303837891, 32.2418626953), (62.8179602295, 31.8724056396),
(62.2503921875, 31.5612046875), (61.6265759033, 31.3122367432),
(60.9454076172, 31.1294787109), (60.2057835693, 31.0169074951),
(59.4066, 30.9785), (55.4875, 30.9785),
(55.4875, 45.5563)],
# D -- interior
[
(52.8324, 28.7004), (59.4059, 28.7004),
(60.8560672363, 28.7788331543),
(62.1440332031, 29.0031808594),
(63.2792692871, 29.3570154785), (64.271246875, 29.823909375), (65.1294373535, 30.3874349121), (65.8633121094, 31.0311644531), (66.4823425293, 31.7386703613), (66.996, 32.493525), (67.4137559082, 33.2793007324), (67.7450816406, 34.0795699219), (68.186328125, 35.657878125), (68.3955105469, 37.0970285156), (68.4484, 38.2656), (68.3955105469, 39.4344525391), (68.186328125, 40.8740328125), (67.7450816406, 42.4528623047), (67.4137559082, 43.2534084717), (66.996, 44.0394625), (66.4823425293, 44.7945895752), (65.8633121094, 45.5023548828), (65.1294373535, 46.1463236084), (64.271246875, 46.7100609375), (63.2792692871, 47.1771320557), (62.1440332031, 47.5311021484), (60.8560672363, 47.7555364014), (59.4059, 47.834), (52.8324, 47.834), (52.8324, 28.7004)],
# #e
# #e -- outershell
[(82.9195, 34.8762), (82.9195, 36.123), (82.8224828125, 37.4505816406), (82.53454375, 38.658784375), (82.0603515625, 39.7298449219), (81.404575, 40.646), (80.5718828125, 41.3894863281), (79.56694375, 41.942540625), (78.3944265625, 42.2873996094), (77.059, 42.4063),(76.2952375244, 42.3687171631), (75.5838064453, 42.2585341797), (74.9242850342, 42.0795993408), (74.3162515625, 41.8357609375), (73.7592843018, 41.5308672607), (73.2529615234, 41.1687666016), (72.3905625, 40.2883375), (71.7256806641, 39.2252599609), (71.2549421875, 38.0103203125), (70.9749732422, 36.6743048828), (70.8824, 35.248), (70.9637001953, 33.823009375), (71.2144078125, 32.50744375), (71.6447333984, 31.3261375), (72.2648875, 30.303925), (73.0850806641, 29.465640625), (73.5733826904, 29.1232322266), (74.1155234375, 28.83611875), (74.7127792236, 28.6074044922), (75.3664263672, 28.44019375), (76.848, 28.3027), (77.9991910156, 28.3734771484), (79.058021875, 28.5858296875), (80.0117917969, 28.9397892578), (80.8478, 29.4353875), (81.5533457031, 30.0726560547), (82.115728125, 30.8516265625), (82.5222464844, 31.7723306641), (82.7602, 32.8348), (80.1098, 32.8348), (79.9671755859, 32.1632625), (79.7567359375, 31.59635), (79.4750064453, 31.1294125), (79.1185125, 30.7578), (78.6837794922, 30.4768625), (78.1673328125, 30.28195), (77.5656978516, 30.1684125), (76.8754, 30.1316), (75.9894021484, 30.2347720703), (75.2544671875, 30.5276953125), (74.6604455078, 30.9854802734), (74.1971875, 31.5832375), (73.8545435547, 32.2960775391), (73.6223640625, 33.0991109375), (73.4904994141, 33.9674482422), (73.4488, 34.8762), (82.9195, 34.8762), (82.9195, 34.8762)],
#e -- innershell
[(73.5055, 36.6262), (73.5694832031, 37.3917933594), (73.744890625, 38.118946875), (74.0270464844, 38.7880457031), (74.411275, 39.379475), (74.8929003906, 39.8736199219), (75.467246875, 40.250865625), (76.1296386719, 40.4915972656), (76.8754, 40.5762), (77.7209189453, 40.4999767578), (78.4335015625, 40.2795953125), (79.0193740234, 39.9274880859), (79.4847625, 39.4560875), (79.8358931641, 38.8778259766), (80.0789921875, 38.2051359375), (80.2202857422, 37.4504498047), (80.266, 36.6262), (73.5055, 36.6262)],
#
# #l
[(85.973, 28.6992), (88.49331, 28.6992), (88.49331, 47.834), (85.973, 47.834), (85.973, 28.6992), (85.973, 28.6992)],
# #f
[(96.3883, 28.7004), (96.3883, 40.2512), (99.4605, 40.2512), (99.4605, 42.0027), (96.3883, 42.0027), (96.3883, 44.1512), (96.4229054688, 44.6702857422), (96.52635625, 45.0817171875), (96.6981039062, 45.3973431641), (96.9376, 45.6290125), (97.2442960938, 45.7885740234), (97.61764375, 45.8878765625), (98.5621, 45.9531), (99.8336, 45.875), (99.8336, 47.9656), (98.9403125, 48.1487), (98.0309, 48.2313), (97.1673613281, 48.1749609375), (96.374484375, 48.004725), (95.6659777344, 47.7187640625), (95.05555, 47.31525), (94.5569097656, 46.7923546875), (94.183765625, 46.14825), (93.9498261719, 45.3811078125), (93.8688, 44.4891), (93.8688, 42.0027), (91.273, 42.0027), (91.273, 40.2512), (93.8688, 40.2512), (93.8688, 28.7004), (96.3883, 28.7004)],
#t
[(100.908, 42.0027), (100.908, 40.2512), (103.188, 40.2512), (103.188, 31.7734), (103.250359375, 30.4847203125), (103.393189453, 29.8978896484), (103.668125, 29.3748875), (104.118419922, 28.9348306641), (104.787328125, 28.5968359375), (105.718103516, 28.3800201172), (106.954, 28.3035), (107.811, 28.3438375), (108.677, 28.4609), (108.677, 30.3953), (107.35, 30.2371), (106.713328125, 30.322746875), (106.191125, 30.58245), (105.837859375, 31.020353125), (105.708, 31.6406), (105.708, 40.2512), (108.782, 40.2512), (108.782, 42.0027), (105.708, 42.0027), (105.708, 45.634), (103.188, 44.8391), (103.188, 42.0012), (100.908, 42.0027)],
]
conv = ToPointsAndSegments()
for ring in polys:
conv.add_polygon([ring])
skel = calc_skel(conv, pause=False, output=OUTPUT)#, pause=False, output=False)
# check the amount of segments in the skeleton
assert len(skel.segments()) == 1398, len(skel.segments())
# check the amount of skeleton nodes
assert len(skel.sk_nodes) == 1041, len(skel.sk_nodes)
# check the amount of kinetic vertices that are (not) stopped
not_stopped = filter(lambda v: v.stops_at is None, skel.vertices)
stopped = filter(lambda v: v.stops_at is not None, skel.vertices)
assert len(not_stopped) == 14, len(not_stopped)
assert len(stopped) == 1384, len(stopped)
# check cross relationship between kinetic vertices and skeleton nodes
for v in skel.vertices:
assert at_same_location((v.start_node, v), v.starts_at)
# if v.stops_at is not None and not v.inf_fast:
# assert at_same_location((v.stop_node, v), v.stops_at), \
# "{} {} LINESTRING({} {})".format(id(v),
# v.stop_node.pos,
# v.position_at(v.starts_at),
# v.position_at(v.stops_at) )
if __name__ == "__main__":
if LOGGING:
import logging
import sys
root = logging.getLogger()
root.setLevel(logging.WARNING)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter = logging.Formatter('%(asctime)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
unittest.main(verbosity=2)
| 158.099237
| 11,542
| 0.612679
|
e5271ef9bfcae191d78572c871ee0e9103c76d21
| 2,034
|
py
|
Python
|
examples/ex2_ackley.py
|
evdcush/neorl
|
a1af069072e752ab79e7279a88ad95d195a81821
|
[
"MIT"
] | 20
|
2021-04-20T19:15:33.000Z
|
2022-03-19T17:00:12.000Z
|
examples/ex2_ackley.py
|
evdcush/neorl
|
a1af069072e752ab79e7279a88ad95d195a81821
|
[
"MIT"
] | 17
|
2021-04-07T21:52:41.000Z
|
2022-03-06T16:05:31.000Z
|
examples/ex2_ackley.py
|
evdcush/neorl
|
a1af069072e752ab79e7279a88ad95d195a81821
|
[
"MIT"
] | 8
|
2021-05-07T03:36:30.000Z
|
2021-12-15T03:41:41.000Z
|
#---------------------------------
# Import packages
#---------------------------------
import numpy as np
import matplotlib.pyplot as plt
from neorl import PSO, DE, XNES
from math import exp, sqrt, cos, pi
np.random.seed(50)
#---------------------------------
# Fitness function
#---------------------------------
def ACKLEY(individual):
#Ackley objective function.
d = len(individual)
f=20 - 20 * exp(-0.2*sqrt(1.0/d * sum(x**2 for x in individual))) \
+ exp(1) - exp(1.0/d * sum(cos(2*pi*x) for x in individual))
return f
#---------------------------------
# Parameter Space
#---------------------------------
#Setup the parameter space (d=8)
d=8
lb=-32
ub=32
BOUNDS={}
for i in range(1,d+1):
BOUNDS['x'+str(i)]=['float', lb, ub]
#---------------------------------
# PSO
#---------------------------------
pso=PSO(mode='min', bounds=BOUNDS, fit=ACKLEY, npar=60,
c1=2.05, c2=2.1, speed_mech='constric', seed=1)
x_best, y_best, pso_hist=pso.evolute(ngen=120, verbose=1)
#---------------------------------
# DE
#---------------------------------
de=DE(mode='min', bounds=BOUNDS, fit=ACKLEY, npop=60,
F=0.5, CR=0.7, ncores=1, seed=1)
x_best, y_best, de_hist=de.evolute(ngen=120, verbose=1)
#---------------------------------
# NES
#---------------------------------
amat = np.eye(d)
xnes = XNES(mode='min', fit=ACKLEY, bounds=BOUNDS, A=amat, npop=60,
eta_Bmat=0.04, eta_sigma=0.1, adapt_sampling=True, ncores=1, seed=1)
x_best, y_best, nes_hist=xnes.evolute(120, verbose=1)
#---------------------------------
# Plot
#---------------------------------
#Plot fitness for both methods
plt.figure()
plt.plot(pso_hist['global_fitness'], label='PSO')
plt.plot(de_hist['global_fitness'], label='DE')
plt.plot(nes_hist['fitness'], label='NES')
plt.xlabel('Generation')
plt.ylabel('Fitness')
plt.legend()
plt.savefig('ex2_fitness.png',format='png', dpi=300, bbox_inches="tight")
plt.close()
| 31.292308
| 81
| 0.483284
|
eee9760308d11b14da1050887cd000f517c32c0a
| 1,844
|
py
|
Python
|
tests/test_verify.py
|
arjenzijlstra/python-rest-api
|
81cfc0c3621864ab88128fb7fefb3e4bdec58429
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_verify.py
|
arjenzijlstra/python-rest-api
|
81cfc0c3621864ab88128fb7fefb3e4bdec58429
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_verify.py
|
arjenzijlstra/python-rest-api
|
81cfc0c3621864ab88128fb7fefb3e4bdec58429
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
from messagebird import Client
try:
from unittest.mock import Mock
except ImportError:
# mock was added to unittest in Python 3.3, but was an external library
# before.
from mock import Mock
class TestVerify(unittest.TestCase):
def test_verify(self):
http_client = Mock()
http_client.request.return_value = '{"id": "verify-id","href": "https://rest.messagebird.com/verify/verify-id","recipient": 31612345678,"reference": "MyReference","messages": {"href": "https://rest.messagebird.com/messages/message-id"},"status": "verified","createdDatetime": "2017-05-30T12:39:50+00:00","validUntilDatetime": "2017-05-30T12:40:20+00:00"}'
verify = Client('', http_client).verify('verify-id')
http_client.request.assert_called_once_with('verify/verify-id', 'GET', None)
self.assertEqual('verify-id', verify.id)
def test_verify_create(self):
http_client = Mock()
http_client.request.return_value = '{}'
Client('', http_client).verify_create('31612345678', {})
http_client.request.assert_called_once_with('verify', 'POST', {'recipient': '31612345678'})
def test_verify_verify(self):
http_client = Mock()
http_client.request.return_value = '{"id": "verify-id","href": "https://rest.messagebird.com/verify/verify-id","recipient": 31612345678,"reference": "MyReference","messages": {"href": "https://rest.messagebird.com/messages/63b168423592d681641eb07b76226648"},"status": "verified","createdDatetime": "2017-05-30T12:39:50+00:00","validUntilDatetime": "2017-05-30T12:40:20+00:00"}'
verify = Client('', http_client).verify_verify('verify-id', 'secret')
http_client.request.assert_called_once_with('verify/verify-id', 'GET', {'token': 'secret'})
self.assertEqual('verified', verify.status)
| 44.97561
| 385
| 0.691432
|
4bcd0f0ae9636a0c5fd8d174dcc6d030cfa3d67f
| 17,580
|
py
|
Python
|
pyADVISE/report.py
|
lightnerdevtech/pyADVISE
|
7496729b57a777c5ac73d1ac08589b9794a77955
|
[
"Apache-2.0"
] | null | null | null |
pyADVISE/report.py
|
lightnerdevtech/pyADVISE
|
7496729b57a777c5ac73d1ac08589b9794a77955
|
[
"Apache-2.0"
] | null | null | null |
pyADVISE/report.py
|
lightnerdevtech/pyADVISE
|
7496729b57a777c5ac73d1ac08589b9794a77955
|
[
"Apache-2.0"
] | null | null | null |
###########################################
###########################################
#### Function to generate reports in Reportlab
###########################################
###########################################
# libraries
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
## report lab
from reportlab.pdfgen import canvas
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.utils import ImageReader
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
# text options
from textwrap import wrap
# import palettes
from pyADVISE import palettes
### Bring in the needed font
pdfmetrics.registerFont(TTFont('Gill Sans MT', 'gil_____.ttf'))
pdfmetrics.registerFont(TTFont('Gill Sans MT Bold', 'gilb____.ttf'))
pdfmetrics.registerFont(TTFont('Gill Sans MT It', 'gili____.ttf'))
palette= palettes.USAID_general()
def inch(a):
a = a*72
return a
# split the notes at the bottom
def split(obs, n):
obs = "\n".join(wrap(obs, n)).split('\n')
return obs
def apply_scripting(textobject, text, rise, font='Gill Sans MT'):
textobject.setFont(font, 6)
textobject.setRise(rise)
textobject.textOut(text)
textobject.setFont(font, 12)
textobject.setRise(0)
########################################
###### simple table
#######################################
def table(c, df, column_w, split_list, height, margin, width, gap, text_font='something', table_ystart=9.19, enter=.24, vert_lines = [3, 3.6, 4.1], font_color = ['Rich Black', 'Rich Black','Rich Black', 'Medium Gray', 'Medium Gray'], font_type = ['', '', '', 'It', 'It'], top_label=False, column_citation = ['footnote_1', None, None, None, None], pic_size=(.25,.25),
second_line=False, table_title='Table', font='Gill Sans MT', line_width=.5, image_folder='C:/Users/alightner/Documents/Shared/pyADVISE/images/',
picture=False, picture_name='Picture', column_labels =['country', 'SSA', 'Year']):
'''This function generates a table in the style of
the country reports. '''
palette= palettes.USAID_general()
###################################
### mapping options
####################################
# we only want to plot the non-footnote columns, generate a different dataset, drop witin main
footnotes_df = df[[c for c in df.columns if c.lower()[:-2] == 'footnotes']]
df = df[[c for c in df.columns if c.lower()[:-2] != 'footnotes']]
#### determine the number of enters needed per column
enters_by_row = pd.DataFrame()
for col in range(0, len(list(df.columns))):
# access the values of the column
values = df.iloc[:,col].values
# split the values on the associated split_num
values_split = [split(values[i], split_list[col]) for i in range(0, len(values))]
# generate the lenght (number of enters for each row in each column)
length_of_values = [len(values_split[i]) for i in range(0, len(values_split))]
# add lengths to dataframe
enters_by_row[col] = length_of_values
# generate the max of each row, place into an array
table_gap = enters_by_row.max(axis=1).to_frame().reset_index()[0].values
# for some unknown reason, this only works after changing to a list
table_gap = list(table_gap)
# this var is called table gap in th rest of the code
# adjust for the difference between gaps for line space and between lines of text.
for i in range(0, len(table_gap)):
# because there is a header, I subtract one from the count
a = table_gap[i]-1
#make all gaps betweeen text only 50 percent of enter value
if a>0:
gap = 1+(a*.45)
table_gap[i] = gap
# starting point to be iterated on
y = table_ystart
########################################################
# generate horizontal lines for table
#######################################################
c.setLineWidth(line_width)
c.setFillColor(palette['Rich Black'])
c.setStrokeColor(palette['Rich Black'])
for i in range(0, len(table_gap)):
c.line(inch(margin), inch(y), inch(margin+column_w), inch(y))
#print(table_gap[i])
y= y-(enter*table_gap[i])
# final line outside of the loop
c.line(inch(margin), inch(y), inch(margin+column_w), inch(y))
# mark the end of the table => return w
end_of_table = inch(y)
##############################################
# generate values for the tables
##############################################
#################### formatting choices
# set font
c.setFont(font, size=10, leading = None)
# set indent for
indent_value=0.09
# choose levels of *** the order matters, the order refers to where each line of text will begin relative to the margin.
indent = [indent_value]+ [vert_lines[i]+indent_value for i in range(0,len(vert_lines))]
# generate column_footnotes list for plotting
footnotes_dict = {str(i): footnotes_df['footnotes_'+str(i)].values for i in range(0, len(footnotes_df.columns))}
keys = list(footnotes_dict.keys())
# iterate over each colomn
for col in range(0, len(list(df))):
# select the values in the column of interest
values = df.iloc[:,col].values
# generate a list of the given text which consists of strings the size of the split_list[s] - will not cut words apart.
values = [split(values[i], split_list[col]) for i in range(0, len(values))]
#################################
#### Font Settings
#################################
### Select the font type
if font_type[col]=='It':
c.setFont(font+' It', size=10, leading = None)
elif font_type[col]=='Bold':
c.setFont(font+' Bold', size=10, leading = None)
else:
c.setFont(font, size=10, leading = None)
# Select color of the text
c.setFillColor(palette[font_color[col]])
c.setStrokeColor(palette[font_color[col]])
############################
###### place text
############################
# choose where the text starts relative to the first line in the table
y_s = table_ystart-0.16
# loop over each row in a particular column (for longer rows values)
for i in range(0, len(values)):
# number of rows
lines = len(values)
# place text in the respective row (think about generalizing this in the future -- now it just works)
if lines==1:
n = 0
# for the one value we have.
for g in values[i]:
# generate text with superscripting
textobject = c.beginText()
textobject.setTextOrigin(inch(margin+indent[col]),inch(y_s-(n*0.65*enter)))
textobject.textOut(g[0])
try:
apply_scripting(textobject, footnotes_dict[keys[col]][i], 4)
print(keys[col])
except:
a = 0
c.drawText(textobject)
n +=1
y_s = y_s-(enter*table_gap[i])
if lines>1:
n = 0
for f in range(0, len(values[i])):
# generate text with superscripting
textobject = c.beginText()
textobject.setTextOrigin(inch(margin+indent[col]),inch(y_s-(n*0.53*enter)))
textobject.textOut(values[i][f])
try:
if f == len(values[i])-1:
apply_scripting(textobject, footnotes_dict[keys[col]][i], 4)
except:
a = 0
c.drawText(textobject)
n +=1
y_s = y_s-(enter*table_gap[i])
########### draw column lines
column1 = vert_lines
c.setFillColor(palette['Rich Black'])
c.setStrokeColor(palette['Rich Black'])
for i in range(0, len(column1)):
c.line(inch(margin+column1[i]), inch(table_ystart),inch(margin+column1[i]), end_of_table)
if second_line ==True:
c.line(inch(margin+column1+1.35), inch(table_ystart),inch(margin+column1+1.35), inch(table_ystart-(enter*4)))
########### draw title
c.setFont(font+' Bold', size=12, leading = None)
c.setFillColor(palette['Rich Black'])
c.setStrokeColor(palette['Rich Black'])
c.drawString(inch(margin+pic_size[1]+0.08+0.2), inch(table_ystart+.075), table_title)
########### draw table labels
c.setFont(font+' Bold', size=8.5, leading = None)
if top_label ==True:
for i in range(0, len(column_labels)):
c.drawString(inch(margin+vert_lines[i]+0.05), inch(table_ystart+.075), column_labels[i])
############ place the visuals
############ default is the image folder in pyADVISE
if picture ==True:
image = image_folder+picture_name
c.drawImage(image, inch(margin+.08), inch(table_ystart+.03), width=np.round(inch(pic_size[1])), height=np.round(inch(pic_size[0])))
# return end of the table for reference for next table or plot
return c, end_of_table
#####################################
###### USAID Header
#####################################
def USAID_header(c, height, margin, column_w, gap, width, country='Malawi', date='July 2018',):
palette= palettes.USAID_general()
# set current color, every fill color will be this color after this point until changed
c.setFillColor(palette['Medium Blue'])
c.setStrokeColor(palette['Medium Blue'])
# set the top of the box circle
top_box = height-0.7
# blue top
c.rect(0, inch(top_box), inch(9), inch(top_box), fill=1)
#blue line
c.setLineWidth(2)
c.line(0, inch(top_box-.55), inch(9), inch(top_box-.55))
c.setLineWidth(1)
# grey box
c.setFillColor(palette['Light Gray'])
c.setStrokeColor(palette['Light Gray'])
c.rect(inch(margin), inch(9.7), inch(margin+column_w-0.5), inch(.6), fill=1)
# title and country
c.setFont('OpenSans-Light', size=30, leading = None)
c.setFillColor(palette['White'])
c.setStrokeColor(palette['White'])
c.drawString(inch(margin+0.12), inch(top_box+.15), 'COUNTRY PROFILE')
c.setFillColor(palette['White'])
c.setStrokeColor(palette['White'])
c.setFont('OpenSans-Light', size=12, leading = None)
#c.drawString(inch(margin+0.12), inch(top_box+.15), 'COUNTRY PROFILE')
c.drawRightString(inch(width-margin), inch(top_box+.15), 'USAID Data Services (EADS)')
c.setFillColor(palette['Medium Blue'])
c.setStrokeColor(palette['Medium Blue'])
c.setFont('OpenSans-Bold', size=24, leading = None)
c.drawString(inch(margin+0.12), inch(top_box-.4), country.upper())
c.setFillColor(palette['Medium Blue'])
c.setStrokeColor(palette['Medium Blue'])
c.setFont('OpenSans-Bold', size=15, leading = None)
c.drawRightString(inch(width-margin), inch(top_box-.4), date.upper())
return c
def USAID_footer_text_page1(c, location=(150, 60), font='OpenSans-Light', size=8):
# begin the text object
textobject = c.beginText()
# place the text object
textobject.setTextOrigin(location[0], location[1])
# set font for the text options
textobject.setFont(font, size=size, leading = None)
textobject.textLines('''
Prepared by USAID Data Services with data from the International Data and Economic Analysis
website (https://idea.usaid.gov/). DISCLAIMER: The views expressed in this publication do not necessarily reflect
the views of the United States Agency for International Development (USAID) or the United States Government.
''')
c.drawText(textobject)
return c
def SDG_footer_text_page2(c, location=(150, 60), font='OpenSans-Light', size=8):
# set palette
c.setFillColor(palette['Rich Black'])
c.setStrokeColor(palette['Rich Black'])
# begin the text object
textobject = c.beginText()
# place the text object
textobject.setTextOrigin(location[0], location[1])
# set font for the text options
textobject.setFont(font, size=size, leading = None)
textobject.textLines('''
Sources: 1. Regions based on USAID classifications.; 2. World Bank, World Development Indicators (WDI); 3. Calculated by Data Services, based on World Bank, World Development Indicators; 4. International
Monetary Fund (IMF), World Economic Outlook Database (WEO); 5. World Economic Forum (WEF), Enabling Trade Index; 6. U.S. International Trade Commission (USITC), Trade DataWeb; 7. Food and Agri-
cultural Organization (FAO), FAOSTAT Land and Fertilizer Data; 8. World Economic Forum (WEF), Global Competitiveness Index; 9. Notre Dame Climate Adaptation Initiative (ND-GAIN) Country Index; 10. UN
Office for Disaster Risk Reduction (UNISDR), Global Assessment Report on Disaster Risk Reduction; 11. CIESIN and Yale, Environmental Performance Index (EPI); 12. Demographic and Health Surveys (DHS),
STATcompiler; 13. Food and Agricultural Organization (FAO), AQUASTAT; 14. WHO/UNICEF, Joint Monitoring Programme (JMP) for Water Supply, Sanitation, and Hygiene; 15. World Economic Forum (WEF),
Networked Readiness Index; 16. World Bank, Millennium Development Goals; 17. World Bank, Enterprise Surveys; 18. World Bank, Enabling the Business of Agriculture; 19. International Telecommun-
ication Union (ITU), World Telecommunication/ICT Indicators Database'''
)
c.drawText(textobject)
return c
#####################################
###### SDG Header
#####################################
def SDG_header(c, gray_list, height, margin, column_w, gap, width, country='Malawi', date='July 2018',
title_text='SUSTAINABLE DEVELOPMENT PROFILE', subtitle_text='PREPARED BY USAID DATA SERVICES'):
# set current color, every fill color will be this color after this point until changed
c.setFillColor(palette['Medium Blue'])
c.setStrokeColor(palette['Medium Blue'])
# set the top of the box circle
top_box = height-1
# blue top
c.rect(0, inch(top_box), inch(9), inch(top_box), fill=1)
#blue line
c.setLineWidth(1)
c.line(0, inch(top_box-.4), inch(9), inch(top_box-.4))
c.setLineWidth(1)
#####################
# grey box
#####################
gray_start = top_box-1.1
gray_height = top_box -gray_start- 0.45
c.setFillColor(palette['Light Gray'])
c.setStrokeColor(palette['Light Gray'])
c.rect(inch(margin), inch(gray_start), inch(margin+column_w-0.25), inch(gray_height), fill=1)
# gray texts = Region, Subregion, Income group
c.setFont('Gill Sans MT Bold', size=10, leading = None)
text_start = (gray_start + gray_height)
c.setFillColor(palette['Rich Black'])
c.setStrokeColor(palette['Rich Black'])
labels = ['blank(index at 0)', 'Region¹', 'Subregion', 'Income Group']
for i in [1,2,3]:
c.setFont('Gill Sans MT Bold', size=10, leading = None)
c.drawString(inch(margin+.14),inch(text_start+.09-(0.22*i)), labels[i])
c.setFont('Gill Sans MT', size=10, leading = None)
c.drawString(inch(margin+1.8),inch(text_start+.09-(0.22*i)), gray_list[i-1])
##############################
# title and country
################################
c.setFont('Gill Sans MT', size=33.5, leading = None)
c.setFillColor(palette['White'])
c.setStrokeColor(palette['White'])
c.drawString(inch(margin+0.02), inch(top_box+.48), title_text)
c.setFillColor(palette['White'])
c.setStrokeColor(palette['White'])
c.setFont('OpenSans-Light', size=12, leading = None)
c.drawString(inch(margin+0.02), inch(top_box+.17), subtitle_text)
c.setFillColor(palette['Medium Blue'])
c.setStrokeColor(palette['Medium Blue'])
c.setFont('Gill Sans MT Bold', size=24, leading = None)
c.drawString(inch(margin+0.02), inch(top_box-.33), country.upper())
c.setFont('Gill Sans MT Bold', size=15, leading = None)
c.drawRightString(inch(width-margin), inch(top_box-.26), date.upper())
return c
def SDG_header_page2(c, country, date, height, margin, width):
# usaid palette
palette= palettes.USAID_general()
# set start_point for the text
start = height-.3
# write country
c.setFillColor(palette['Medium Blue'])
c.setStrokeColor(palette['Medium Blue'])
c.setFont('Gill Sans MT Bold', size=14, leading = None)
c.drawString(inch(margin+0.02), inch(start), country.upper())
# write date
c.setFont('Gill Sans MT Bold', size=15, leading = None)
c.drawRightString(inch(width-margin), inch(start), date.upper())
return c
| 35.301205
| 366
| 0.584528
|
f5b6d8cfba02a75aa43bc910456dfe4c2b04d16f
| 6,168
|
py
|
Python
|
homeassistant/components/sensor/haveibeenpwned.py
|
shire210/home-assistant
|
63cd8bbee6f1b74ae9c6c249ac820119a8a573d8
|
[
"Apache-2.0"
] | 2
|
2017-02-25T00:27:06.000Z
|
2017-02-25T03:09:30.000Z
|
homeassistant/components/sensor/haveibeenpwned.py
|
shire210/home-assistant
|
63cd8bbee6f1b74ae9c6c249ac820119a8a573d8
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/sensor/haveibeenpwned.py
|
shire210/home-assistant
|
63cd8bbee6f1b74ae9c6c249ac820119a8a573d8
|
[
"Apache-2.0"
] | null | null | null |
"""
Support for haveibeenpwned (email breaches) sensor.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.haveibeenpwned/
"""
from datetime import timedelta
import logging
import voluptuous as vol
import requests
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (STATE_UNKNOWN, CONF_EMAIL)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
from homeassistant.helpers.event import track_point_in_time
_LOGGER = logging.getLogger(__name__)
DATE_STR_FORMAT = "%Y-%m-%d %H:%M:%S"
USER_AGENT = "Home Assistant HaveIBeenPwned Sensor Component"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
MIN_TIME_BETWEEN_FORCED_UPDATES = timedelta(seconds=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_EMAIL): vol.All(cv.ensure_list, [cv.string]),
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the HaveIBeenPwnedSensor sensor."""
emails = config.get(CONF_EMAIL)
data = HaveIBeenPwnedData(emails)
devices = []
for email in emails:
devices.append(HaveIBeenPwnedSensor(data, hass, email))
add_devices(devices)
# To make sure we get initial data for the sensors ignoring the normal
# throttle of 15 minutes but using an update throttle of 5 seconds
for sensor in devices:
sensor.update_nothrottle()
class HaveIBeenPwnedSensor(Entity):
"""Implementation of a HaveIBeenPwnedSensor."""
def __init__(self, data, hass, email):
"""Initialize the HaveIBeenPwnedSensor sensor."""
self._state = STATE_UNKNOWN
self._data = data
self._hass = hass
self._email = email
self._unit_of_measurement = "Breaches"
@property
def name(self):
"""Return the name of the sensor."""
return "Breaches {}".format(self._email)
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def device_state_attributes(self):
"""Return the atrributes of the sensor."""
val = {}
if self._email not in self._data.data:
return val
for idx, value in enumerate(self._data.data[self._email]):
tmpname = "breach {}".format(idx+1)
tmpvalue = "{} {}".format(
value["Title"],
dt_util.as_local(dt_util.parse_datetime(
value["AddedDate"])).strftime(DATE_STR_FORMAT))
val[tmpname] = tmpvalue
return val
def update_nothrottle(self, dummy=None):
"""Update sensor without throttle."""
self._data.update_no_throttle()
# Schedule a forced update 5 seconds in the future if the update above
# returned no data for this sensors email. This is mainly to make sure
# that we don't get HTTP Error "too many requests" and to have initial
# data after hass startup once we have the data it will update as
# normal using update
if self._email not in self._data.data:
track_point_in_time(self._hass,
self.update_nothrottle,
dt_util.now() +
MIN_TIME_BETWEEN_FORCED_UPDATES)
return
if self._email in self._data.data:
self._state = len(self._data.data[self._email])
self.schedule_update_ha_state()
def update(self):
"""Update data and see if it contains data for our email."""
self._data.update()
if self._email in self._data.data:
self._state = len(self._data.data[self._email])
class HaveIBeenPwnedData(object):
"""Class for handling the data retrieval."""
def __init__(self, emails):
"""Initialize the data object."""
self._email_count = len(emails)
self._current_index = 0
self.data = {}
self._email = emails[0]
self._emails = emails
def set_next_email(self):
"""Set the next email to be looked up."""
self._current_index = (self._current_index + 1) % self._email_count
self._email = self._emails[self._current_index]
def update_no_throttle(self):
"""Get the data for a specific email."""
self.update(no_throttle=True)
@Throttle(MIN_TIME_BETWEEN_UPDATES, MIN_TIME_BETWEEN_FORCED_UPDATES)
def update(self, **kwargs):
"""Get the latest data for current email from REST service."""
try:
url = "https://haveibeenpwned.com/api/v2/breachedaccount/{}". \
format(self._email)
_LOGGER.info("Checking for breaches for email %s", self._email)
req = requests.get(url, headers={"User-agent": USER_AGENT},
allow_redirects=True, timeout=5)
except requests.exceptions.RequestException:
_LOGGER.error("failed fetching HaveIBeenPwned Data for '%s'",
self._email)
return
if req.status_code == 200:
self.data[self._email] = sorted(req.json(),
key=lambda k: k["AddedDate"],
reverse=True)
# only goto next email if we had data so that
# the forced updates try this current email again
self.set_next_email()
elif req.status_code == 404:
self.data[self._email] = []
# only goto next email if we had data so that
# the forced updates try this current email again
self.set_next_email()
else:
_LOGGER.error("failed fetching HaveIBeenPwned Data for '%s'"
"(HTTP Status_code = %d)", self._email,
req.status_code)
| 34.266667
| 78
| 0.630026
|
e00fd19768e4ad1ea8bec3214f0514e2ed7b3e28
| 1,571
|
py
|
Python
|
final-project/repositories/Real-time_3D_Rendering/HLS3D-master/render/transformable.py
|
bol-edu/2020-fall-ntu
|
5e009875dec5a3bbcebd1b3fae327990371d1b6a
|
[
"MIT"
] | 7
|
2021-02-10T17:59:48.000Z
|
2021-09-27T15:02:56.000Z
|
final-project/repositories/Real-time_3D_Rendering/HLS3D-master/render/transformable.py
|
bol-edu/2020-fall-ntu
|
5e009875dec5a3bbcebd1b3fae327990371d1b6a
|
[
"MIT"
] | null | null | null |
final-project/repositories/Real-time_3D_Rendering/HLS3D-master/render/transformable.py
|
bol-edu/2020-fall-ntu
|
5e009875dec5a3bbcebd1b3fae327990371d1b6a
|
[
"MIT"
] | 1
|
2022-03-22T01:46:01.000Z
|
2022-03-22T01:46:01.000Z
|
import numpy as np
class Transformable:
def __init__(self, transform):
self.transform = transform
self.scale = 1
def set_scale(self, scale):
self.scale = scale
def translate_local_z(self, offset):
pass
def translate_local_y(self, offset):
pass
def translate_local_z(self, offset):
pass
def lookat(self, pos):
pass
def translate_x(self, offset):
self.transform[0, 3] += offset
def translate_y(self, offset):
self.transform[1, 3] += offset
def translate_z(self, offset):
self.transform[2, 3] += offset
def rotate_x(self, degree):
c = np.cos(degree * np.pi / 180)
s = np.sin(degree * np.pi / 180)
self.transform = self.transform @ np.array([
[1.0, 0.0, 0.0, 0.0],
[0.0, c, -s, 0.0],
[0.0, s, c, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
def rotate_y(self, degree):
c = np.cos(degree * np.pi / 180)
s = np.sin(degree * np.pi / 180)
self.transform = self.transform @ np.array([
[ c, 0.0, s, 0.0],
[0.0, 1.0, 0.0, 0.0],
[ -s, 0.0, c, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
def rotate_z(self, degree):
c = np.cos(degree * np.pi / 180)
s = np.sin(degree * np.pi / 180)
self.transform = self.transform @ np.array([
[ c, -s, 0.0, 0.0],
[ s, c, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
| 25.33871
| 52
| 0.471674
|
cc6743275539a06849c3845b90a8f850d3b8e367
| 4,212
|
py
|
Python
|
utils.py
|
carrier-io/security
|
a1dd3f0f0d9a923dc97b8e1b5ee45f5828f2454e
|
[
"Apache-2.0"
] | null | null | null |
utils.py
|
carrier-io/security
|
a1dd3f0f0d9a923dc97b8e1b5ee45f5828f2454e
|
[
"Apache-2.0"
] | 1
|
2022-01-20T10:26:03.000Z
|
2022-01-20T10:26:03.000Z
|
utils.py
|
carrier-io/security
|
a1dd3f0f0d9a923dc97b8e1b5ee45f5828f2454e
|
[
"Apache-2.0"
] | 3
|
2021-06-16T11:36:34.000Z
|
2021-07-21T09:09:47.000Z
|
import json
from queue import Empty
from typing import Tuple, Union
from pydantic import ValidationError
from pylon.core.tools import log
from .models.api_tests import SecurityTestsDAST
from .models.security_results import SecurityResultsDAST
from ..tasks.api.utils import run_task
from ..projects.models.statistics import Statistic
def run_test(test: SecurityTestsDAST, config_only=False):
security_results = SecurityResultsDAST(
project_id=test.project_id,
test_id=test.id,
test_uid=test.test_uid,
test_name=test.name
)
security_results.insert()
event = []
test.results_test_id = security_results.id
test.commit()
event.append(test.configure_execution_json("cc"))
if config_only:
return event[0]
response = run_task(test.project_id, event)
response['redirect'] = f'/task/{response["task_id"]}/results'
statistic = Statistic.query.filter_by(project_id=test.project_id).first()
statistic.dast_scans += 1
statistic.commit()
response['result_id'] = security_results.id
return response
class ValidationErrorPD(Exception):
def __init__(self, loc: Union[str, list], msg: str):
self.loc = [loc] if isinstance(loc, str) else loc
self.msg = msg
super().__init__({'loc': self.loc, 'msg': msg})
def json(self):
return json.dumps(self.dict())
def dict(self):
return {'loc': self.loc, 'msg': self.msg}
def parse_test_data(project_id: int, request_data: dict, *,
rpc=None, common_kwargs: dict = None,
test_create_rpc_kwargs: dict = None,
raise_immediately: bool = False,
skip_validation_if_undefined: bool = True,
) -> Tuple[dict, list]:
if not rpc:
from ..shared.utils.rpc import RpcMixin
rpc = RpcMixin().rpc
common_kwargs = common_kwargs or dict()
test_create_rpc_kwargs = test_create_rpc_kwargs or dict()
errors = list()
test_name = request_data.pop('name', None)
test_description = request_data.pop('description', None)
try:
test_data = rpc.call.security_test_create_common_parameters(
project_id=project_id,
name=test_name,
description=test_description,
**common_kwargs
)
except ValidationError as e:
print('test_data_error 1', e)
test_data = dict()
errors.extend(e.errors())
if raise_immediately:
return test_data, errors
for k, v in request_data.items():
try:
print(f'security test create :: parsing :: [{k}]')
test_data.update(rpc.call_function_with_timeout(
func=f'security_test_create_{k}',
timeout=1,
data=v,
**test_create_rpc_kwargs
))
except Empty:
log.warning(f'Cannot find parser for {k}')
if skip_validation_if_undefined:
test_data.update(v)
# errors.append(ValidationErrorPD('alert_bar', f'Cannot find parser for {i}'))
# return make_response(ValidationErrorPD('alert_bar', f'Cannot find parser for {i}').json(), 404)
except ValidationError as e:
# err_list = e.errors()
# for i in err_list:
# log.warning('QQQ')
# log.warning(type(i))
# log.warning(i)
# i['loc'] = [k, *i['loc']]
# errors.extend(err_list)
for i in e.errors():
log.warning('QQQ')
log.warning(type(i))
log.warning(i)
i['loc'] = [k, *i['loc']]
log.warning('YYY')
log.warning(e.errors())
errors.extend(e.errors())
if raise_immediately:
return test_data, errors
except Exception as e:
log.warning('Exception as e')
log.warning(type(e))
e.loc = [k, *getattr(e, 'loc', [])]
errors.append(ValidationErrorPD(e.loc, str(e)))
if raise_immediately:
return test_data, errors
return test_data, errors
| 31.909091
| 109
| 0.591643
|
4a649381327ffd8fd9291801ee2e78daf1e93563
| 2,762
|
py
|
Python
|
calh/app.py
|
ricky-lim/calh
|
27185862b93cec92c9a5af4705aadea47b7a15d1
|
[
"Apache-2.0"
] | 2
|
2020-05-05T20:12:26.000Z
|
2021-08-19T20:25:46.000Z
|
calh/app.py
|
ricky-lim/calh
|
27185862b93cec92c9a5af4705aadea47b7a15d1
|
[
"Apache-2.0"
] | 6
|
2020-04-11T04:59:28.000Z
|
2020-05-03T06:17:25.000Z
|
calh/app.py
|
ricky-lim/calh
|
27185862b93cec92c9a5af4705aadea47b7a15d1
|
[
"Apache-2.0"
] | null | null | null |
from pathlib import Path
import ipyvuetify as v
from ipywidgets import FileUpload
from calh.visualization import Heatmap
class App:
def __init__(self, app_dir="."):
self.app_dir = app_dir
self.main_loading = v.ProgressLinear(indeterminate=False)
self.main_toolbar = self.create_main_toolbar()
self.heatmap_plot = v.Container()
def create_file_upload(self) -> v.Btn:
file_uploader = FileUpload(description=".ics file", multiple=False)
def on_upload(change):
self.main_loading.indeterminate = True
value = change["new"]
filename = list(value.keys())[0]
uploaded_file = Path(self.app_dir) / filename
try:
with open(uploaded_file, "wb") as outfile:
outfile.write(value[filename]["content"])
hm = Heatmap(input_data=uploaded_file)
hm.draw(title=filename)
self.heatmap_plot.children = [hm.result.canvas]
finally:
Path(uploaded_file).exists() and Path(uploaded_file).unlink()
self.main_loading.indeterminate = False
file_uploader.observe(on_upload, "value")
btn_uploader = v.Btn(class_="mx-2", children=[file_uploader])
return btn_uploader
def create_show_example(self) -> v.Btn:
btn = v.Btn(class_="mx-2", children=["Show example"])
def on_click(*_):
self.heatmap_plot.children = [
v.Img(src="examples/data/processed/png/liverpool.png"),
]
btn.on_event("click", on_click)
return btn
def create_link_button(self, target, text, icon):
return v.Btn(
class_="mx-2",
href=target,
target="_blank",
children=[v.Icon(children=[icon]), text],
)
def create_main_toolbar(self) -> v.Toolbar:
return v.Toolbar(
flat=True,
block=True,
children=[
v.Spacer(),
self.create_file_upload(),
self.create_show_example(),
self.create_link_button(
target="examples/data/raw/ics/liverpool.ics",
text="Example Data",
icon="mdi-file",
),
self.create_link_button(
target="https://github.com/ricky-lim/calh",
text="Source",
icon="mdi-github-face",
),
v.Spacer(),
],
)
def create(self):
return v.Card(
flat=True,
class_="mx-auto",
children=[self.main_loading, self.main_toolbar, self.heatmap_plot],
)
| 32.494118
| 79
| 0.542723
|
bf6eaba0e0e77d681c8edf7e58e7b10aed74c98a
| 418
|
py
|
Python
|
connect/models/activation_response.py
|
ht-albert/connect-python-sdk
|
15de3b9d641e25c058a4ebe4a3644c584b3e6dfa
|
[
"Apache-2.0"
] | null | null | null |
connect/models/activation_response.py
|
ht-albert/connect-python-sdk
|
15de3b9d641e25c058a4ebe4a3644c584b3e6dfa
|
[
"Apache-2.0"
] | null | null | null |
connect/models/activation_response.py
|
ht-albert/connect-python-sdk
|
15de3b9d641e25c058a4ebe4a3644c584b3e6dfa
|
[
"Apache-2.0"
] | null | null | null |
import json
class ActivationTileResponse(object):
tile = 'Activation succeeded'
def __init__(self, markdown=None, *args, **kwargs):
if markdown:
try:
self.tile = json.loads(markdown)
except ValueError:
self.tile = markdown
class ActivationTemplateResponse(object):
def __init__(self, template_id):
self.template_id = template_id
| 23.222222
| 55
| 0.626794
|
a760fe61f383c2dea2d926decfa4b43cc2aa9a83
| 331
|
py
|
Python
|
ai_part/data_collection/rename_img_files.py
|
BuildForSDGCohort2/Team-253-Group-A-Backend
|
6640d3779ca6b2768eb0ea88e12cd78538e71e15
|
[
"MIT"
] | 5
|
2020-09-28T16:50:24.000Z
|
2022-02-16T15:01:22.000Z
|
ai_part/data_collection/rename_img_files.py
|
BuildForSDGCohort2/Team-253-Group-A-Backend
|
6640d3779ca6b2768eb0ea88e12cd78538e71e15
|
[
"MIT"
] | 16
|
2020-08-25T07:50:12.000Z
|
2020-09-30T09:36:53.000Z
|
ai_part/data_collection/rename_img_files.py
|
BuildForSDGCohort2/Team-253-Group-A-Backend
|
6640d3779ca6b2768eb0ea88e12cd78538e71e15
|
[
"MIT"
] | 3
|
2020-08-29T16:32:25.000Z
|
2021-12-20T13:29:45.000Z
|
# This piece of code is used to rename image files and move them to a specific folder.
import os
root_dir = './covidTrash'
directory = './covidTrash/as_trash_clean_v1.clean/'
clean_data_dir = './covidTrash/cleaned_data/'
for file in os.listdir(directory):
os.rename(directory + file, clean_data_dir + 'number_of_dir' + file)
| 33.1
| 86
| 0.752266
|
38cbe548bb23b33f29cf41582e60c6753664673e
| 3,338
|
py
|
Python
|
Pix2Vox-F/models/encoder.py
|
chicleee/Pix2Vox
|
deaf8a161268bdc0cfddc3d2196457886fe12f95
|
[
"MIT"
] | 2
|
2021-04-26T01:49:56.000Z
|
2021-05-18T16:05:01.000Z
|
Pix2Vox-F/models/encoder.py
|
chicleee/Pix2Vox
|
deaf8a161268bdc0cfddc3d2196457886fe12f95
|
[
"MIT"
] | null | null | null |
Pix2Vox-F/models/encoder.py
|
chicleee/Pix2Vox
|
deaf8a161268bdc0cfddc3d2196457886fe12f95
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# import torch
# import torchvision.models
import paddle
from paddle.vision.models import vgg16
from paddle.vision.models import resnet50
class Encoder(paddle.nn.Layer):
def __init__(self, cfg):
super(Encoder, self).__init__()
self.cfg = cfg
# Layer Definition
vgg16_bn = vgg16(pretrained=False, batch_norm=True)
self.backbone = paddle.nn.Sequential(*list(vgg16_bn.features.children())[:27])
# resnet50_ = resnet50(pretrained=False)
# self.backbone = paddle.nn.Sequential(*list(resnet50_.features.children())[:40])
self.layer1 = paddle.nn.Sequential(
paddle.nn.Conv2D(512, 512, kernel_size=1, weight_attr=paddle.nn.initializer.KaimingNormal(), bias_attr=paddle.nn.initializer.Constant(value=0.0)),
paddle.nn.BatchNorm2D(512),
paddle.nn.ELU(),
)
self.layer2 = paddle.nn.Sequential(
paddle.nn.Conv2D(512, 256, kernel_size=3, weight_attr=paddle.nn.initializer.KaimingNormal(), bias_attr=paddle.nn.initializer.Constant(value=0.0)),
paddle.nn.BatchNorm2D(256),
paddle.nn.ELU(),
paddle.nn.MaxPool2D(kernel_size=4)
)
self.layer3 = paddle.nn.Sequential(
paddle.nn.Conv2D(256, 128, kernel_size=3, weight_attr=paddle.nn.initializer.KaimingNormal(), bias_attr=paddle.nn.initializer.Constant(value=0.0)),
paddle.nn.BatchNorm2D(128),
paddle.nn.ELU()
)
# # Don't update params in VGG16
# for param in vgg16_bn.parameters():
# param.requires_grad = False
def forward(self, rendering_images):
# print("rendering_images.shape", rendering_images.shape) # torch.Size([batch_size, n_views, img_c, img_h, img_w])
rendering_images = paddle.transpose(rendering_images, perm=[1, 0, 2, 3, 4]) # pytorch:rendering_images.permute(1, 0, 2, 3, 4).contiguous()
# print("after transpose shape", rendering_images.shape) # [2, 4, 3, 224, 224]
rendering_images = paddle.split(rendering_images, num_or_sections=rendering_images.shape[0], axis=0) # return list @@ len() = num_or_sections 跟pytorch区别大
# print("after split len", len(rendering_images))
image_features = []
for img in rendering_images:
features = self.backbone(paddle.squeeze(img, axis=0))
# print(features.shape) # torch.Size([batch_size, 512, 28, 28])
features = self.layer1(features)
# print(features.shape) # torch.Size([batch_size, 512, 28, 28])
features = self.layer2(features)
# print(features.shape) # torch.Size([batch_size, 256, 6, 6])
features = self.layer3(features)
# print(features.shape) # torch.Size([batch_size, 128, 4, 4])
image_features.append(features)
image_features = paddle.stack(image_features)
# print(image_features.shape)
image_features = paddle.transpose(image_features, perm=[1, 0, 2, 3, 4])
# print(image_features.shape) # torch.Size([batch_size, n_views, 128, 4, 4])
return image_features
if __name__ == "__main__":
from easydict import EasyDict as edict
__C = edict()
cfg = __C
model = paddle.Model(Encoder(cfg))
model.summary((4, 2, 3, 224, 224))
| 46.361111
| 161
| 0.6426
|
488c189e744b3f92142c939ae29974675e7fc7ae
| 11,026
|
py
|
Python
|
models/US CERT National Cyber Awareness System Alerts.py
|
outsideken/brewlytics
|
9326ad1b06e81ac5b64a9e4e83d4d185d20c8d6a
|
[
"CC0-1.0"
] | null | null | null |
models/US CERT National Cyber Awareness System Alerts.py
|
outsideken/brewlytics
|
9326ad1b06e81ac5b64a9e4e83d4d185d20c8d6a
|
[
"CC0-1.0"
] | null | null | null |
models/US CERT National Cyber Awareness System Alerts.py
|
outsideken/brewlytics
|
9326ad1b06e81ac5b64a9e4e83d4d185d20c8d6a
|
[
"CC0-1.0"
] | null | null | null |
################################################################################
################################################################################
## US CERT National Cyber Awareness System Alerts
## RSS Feed
## Author: outsideKen
## Created: 07 July 2020
## Updated: 07 May 2022
##
################################################################################
## CHANGE LOG
## 2020-07-07 - Initial code implementation in Brew
## 2020-07-08 - Modified output_table to include summary details only due to
## encoding issues with the CSV to Table functional
## 2020-07-09 - Added HTML-formatted table to model data for output to Mission
## Presenter
## 2020-07-10 - Implemented Subscriptions for notifications
## 2020-09-26 - Ported to Demo without Subscriptions; added ExcelWriter
## formatting for Excel output
## 2022-05-07 - Updated script to use the new brewlytics Define Python Script
## functional
##
################################################################################
import json
import pandas as pd
import re
import requests
from brewlytics import *
from datetime import datetime
################################################################################
## FUNCTIONS
## Remove brewlytics CV Type substrings from column names
def remove_cv_type_substrings(df):
return [column_names.split('{')[0] for column_names in df.columns]
##------------------------------------------------------------------------------
## Replace HTML Escaped text with weeble-readable text
def weeble(text):
global replacements
for key,val in replacements.items():
text = re.sub(key,val,text)
return text
##------------------------------------------------------------------------------
## Extract Published and Revised dates from Description
def find_dates(desc,pattern):
date = re.findall(pattern,desc,flags=re.IGNORECASE)
if date:
publish = [d for d in date if d != ''][0]
else:
publish = '----'
return publish
##------------------------------------------------------------------------------
## Add formatting to the output_resource Excel spreadsheet
def create_web_resource(df,filename,sheetname):
global widths
## Create a Pandas Excel writer using XlsxWriter as the engine.
writer = pd.ExcelWriter(filename, engine='xlsxwriter')
## Convert the dataframe to an XlsxWriter Excel object.
df.to_excel(writer, sheet_name = sheetname, index = False)
## Get the xlsxwriter workbook and worksheet objects.
workbook = writer.book
worksheet = writer.sheets[sheetname]
## Set the column widths of the output_table
for column,width in widths.items():
worksheet.set_column(column, width)
## Set the autofilter.
worksheet.autofilter('A1:L%d' % len(df))
## Close the Pandas Excel writer and output the Excel file.
writer.save()
return
##------------------------------------------------------------------------------
## Unpacks a packed dictionary - creates a new dictionary with swapped
## key and value pairs
def unpack_dictionary(packed):
unpacked = dict()
for key,val in packed.items():
for v in val:
unpacked[v] = key
return unpacked
################################################################################
## MODEL DATA
iso8601 = '%Y-%m-%dT%H:%M:%S.%fZ'
now = datetime.utcnow()
now_str = now.strftime(iso8601)
##------------------------------------------------------------------------------
## Model Data with model configuration
md = json.loads(inputs.string)
##------------------------------------------------------------------------------
## Weeble replacements
replacements = {'<': '<',
'>': '>',
'&amp;': '&',
' ': ' '}
##------------------------------------------------------------------------------
## Correct column order of output_table
column_order = md['Output Table Column Order']
# column_order = ['Alert Id','Publish Date','Revision Date',
# 'Title','Description','Summary','Link',
# 'Date/Time Retrieved','USCERT URL']
##------------------------------------------------------------------------------
## Regex patterns
id_pat = r'<title>([A-Z0-9\-]*):'
publish_pat = r'date: ([a-z0-9 ,]*)[ |<]{1}'
revision_pat = r'revised: ([a-z0-9 ,]*)<br'
##------------------------------------------------------------------------------
## US CERT Logo URL
cert_logo = 'https://upload.wikimedia.org/wikipedia/commons/7/74/US-CERT_logo.png'
##------------------------------------------------------------------------------
## Configurations/Subscriptions
to_addressees = md['Addressees']
##------------------------------------------------------------------------------
## Set column widths for output_table
packed_widths = {12: ['A:A'],
20: ['B:B','C:C'],
25: ['D:D'],
30: ['E:E','F:F','H:H'],
40: ['G:G','I:I'],
75: ['D:D']}
widths = unpack_dictionary(packed_widths)
##------------------------------------------------------------------------------
## Create an empty EMAIL DF dataframe
email_df = pd.DataFrame([], columns = ['To','CC','Subject','Body'])
################################################################################
## BODY
## Remove brewlytics CV Type substrings from column names
df = inputs.table.copy()
inputs.table.columns = remove_cv_type_substrings(df)
alert_ids = set(df['Alert Id'].tolist())
##------------------------------------------------------------------------------
## Retrieve US CERT Alerts from RSS feed
url = md['US CERT System Alert URL']
r = requests.get(url)
## If data returned, parse to extract new alerts
add_to_repository = list()
if (r.status_code >= 200) and (r.status_code < 300):
## Segment
rss_alerts = r.text.split('<item>')
print('Successful retrieval of data!')
print('There are %d items in this RSS update.' % (len(rss_alerts) - 1))
##--------------------------------------------------------------------------
## Extract US CERT Alerts from RSS results
new_alerts = list()
for alert in rss_alerts[1:]:
##----------------------------------------------------------------------
## Extract alert id from title text
id_pat = r'<title>([A-Z0-9\-]*):'
alert_id = re.findall(id_pat,alert)
if alert_id:
alert_data = {'Alert Id': alert_id[0].strip()}
else:
alert_data = {'Alert Id': 'Unidentified'}
##----------------------------------------------------------------------
## Extract Publish and Revision Dates
publish_pat = r'date: ([a-z0-9 ,]*)[ |<]{1}'
revision_pat = r'revised: ([a-z0-9 ,]*)<br'
alert_data['Publish Date'] = find_dates(weeble(alert),publish_pat)
alert_data['Revision Date'] = find_dates(weeble(alert),revision_pat)
##----------------------------------------------------------------------
## Extract Title, Link, and Description
for tag in ['title','link','description']:
pattern = '<%s>([\s\S]*?)</%s>' % (tag,tag)
scrapped = re.findall(pattern,alert,flags = re.IGNORECASE)
if scrapped:
alert_data[tag.title()] = scrapped[0].strip()
else:
alert_data[tag.title()] = None
##----------------------------------------------------------------------
## Extract Summary
summary = '<h3>Summary</h3>' + alert.split('<h3>Summary</h3>')[-1]
alert_data['Summary'] = weeble(summary)
##----------------------------------------------------------------------
## Add provenance
alert_data['Date/Time Retrieved'] = now_str
alert_data['USCERT URL'] = url
########################################################################
##----------------------------------------------------------------------
## Check if new alerts are already in the repository; if not, add and
## send email
if alert_data['Alert Id'] not in alert_ids:
add_to_repository.append(alert_data)
print('New Alert published!! Sending Notification Email')
h1 = '<h1><a href="%s" target="blank">%s</a></h1>'
stub = '<img src="%s">' % cert_logo
stub += h1 % (alert_data['Link'],alert_data['Title'])
stub += alert_data['Summary']
email = {'To': ','.join(to_addressees),
'CC': '',
'Subject': alert_data['Title'],
'Body': stub}
md['Send Email'] = email
email_df = pdDataFrame([email])
## If new alerts returned, add to repository
if add_to_repository:
print('Adding new alerts to history ...')
new_alerts = pd.DataFrame(add_to_repository)[column_order]
tdf = pd.concat([input_table,new_alerts])[column_order]
tdf.sort_values(by = 'Alert Id', ascending = False, inplace = True)
tdf.reset_index(inplace = True)
tdf.drop(['index'], axis = 1, inplace = True)
else:
tdf = inputs.table.copy()
## Save as a instance local file for output_resource
filename = 'USCERT_Alerts.xlsx'
sheetname = 'US CERT Alerts'
create_web_resource(tdf,filename,sheetname)
##------------------------------------------------------------------------------
## Create HTML-formatted table of all US-CERT Alerts for Mission Presenter
## IFrame
headers = ['Alert Id','Publish Date','Revision Date','Title']
href = '<a href="%s" target="blank">%s</a>'
stub = '<img src="%s">' % cert_logo
stub += '''
<h1><font color="#003366">US-CERT National Cyber Awareness System Alerts</font></h1>
<hr>
<table style="width:100%">
<colgroup>
<col span="1" style="width: 10%;">
<col span="1" style="width: 15%;">
<col span="1" style="width: 15%;">
<col span="1" style="width: 60%;">
</colgroup>'''
stub += ''' <tr>%s</tr>''' % ''.join(['<th>%s</th>' % column_name
for column_name in headers])
for idx in tdf.index:
row = tdf.loc[idx]
values = ''.join(['<td>%s</td>' % row[column_name]
for column_name in headers[:-1]])
values += '<td>%s</td>' % (href % (row['Link'],row['Title']))
stub += '''<tr>%s</tr>''' % values
# stub += '''<tr><td>%s</td></tr>''' % row['Description']
stub += '</table><hr>'
md['Summary Table'] = stub
################################################################################
## OUTPUTS
outputs.resource = filename
outputs.table = email_df
outputs.string = json.dumps(md['Summary Table'])
################################################################################
## SUMMARY
| 34.892405
| 84
| 0.462724
|
9fe82ad76e83bc584053d415d4a4274fe4152ac7
| 463
|
py
|
Python
|
mynn/initializers.py
|
HashimHL/EvolveDNNRL
|
a7d5bfad037af503a994f73f556e172bda825926
|
[
"MIT"
] | 1
|
2019-11-10T15:01:22.000Z
|
2019-11-10T15:01:22.000Z
|
mynn/initializers.py
|
HashimHL/EvolveDNNRL
|
a7d5bfad037af503a994f73f556e172bda825926
|
[
"MIT"
] | null | null | null |
mynn/initializers.py
|
HashimHL/EvolveDNNRL
|
a7d5bfad037af503a994f73f556e172bda825926
|
[
"MIT"
] | null | null | null |
import numpy as np
class Zeros:
def __init__(self):
pass
def create_weights(self,size):
return np.zeros(size)
class Ones:
def __init__(self):
pass
def create_weights(self,size):
return np.ones(size)
class Uniform:
def __init__(self,low=-1,high=1):
self.low=low
self.high=high
def create_weights(self,size):
return np.random.uniform(low=self.low,high=self.high,size=size)
init_dict={
'zeros':Zeros,
'ones':Ones,
'uniform':Uniform
}
| 14.46875
| 65
| 0.712743
|
30fa327ab87717aa0e8a2b1d666de94f486c7ccb
| 17,251
|
py
|
Python
|
.modules/.metagoofil/hachoir_core/tools.py
|
termux-one/EasY_HaCk
|
0a8d09ca4b126b027b6842e02fa0c29d8250e090
|
[
"Apache-2.0"
] | 1,103
|
2018-04-20T14:08:11.000Z
|
2022-03-29T06:22:43.000Z
|
.modules/.metagoofil/hachoir_core/tools.py
|
sshourya948/EasY_HaCk
|
0a8d09ca4b126b027b6842e02fa0c29d8250e090
|
[
"Apache-2.0"
] | 29
|
2019-04-03T14:52:38.000Z
|
2022-03-24T12:33:05.000Z
|
.modules/.metagoofil/hachoir_core/tools.py
|
sshourya948/EasY_HaCk
|
0a8d09ca4b126b027b6842e02fa0c29d8250e090
|
[
"Apache-2.0"
] | 262
|
2017-09-16T22:15:50.000Z
|
2022-03-31T00:38:42.000Z
|
# -*- coding: utf-8 -*-
"""
Various utilities.
"""
from hachoir_core.i18n import _, ngettext
import re
import stat
from datetime import datetime, timedelta, MAXYEAR
from warnings import warn
def deprecated(comment=None):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
Examples: ::
@deprecated
def oldfunc(): ...
@deprecated("use newfunc()!")
def oldfunc2(): ...
Code from: http://code.activestate.com/recipes/391367/
"""
def _deprecated(func):
def newFunc(*args, **kwargs):
message = "Call to deprecated function %s" % func.__name__
if comment:
message += ": " + comment
warn(message, category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
return _deprecated
def paddingSize(value, align):
"""
Compute size of a padding field.
>>> paddingSize(31, 4)
1
>>> paddingSize(32, 4)
0
>>> paddingSize(33, 4)
3
Note: (value + paddingSize(value, align)) == alignValue(value, align)
"""
if value % align != 0:
return align - (value % align)
else:
return 0
def alignValue(value, align):
"""
Align a value to next 'align' multiple.
>>> alignValue(31, 4)
32
>>> alignValue(32, 4)
32
>>> alignValue(33, 4)
36
Note: alignValue(value, align) == (value + paddingSize(value, align))
"""
if value % align != 0:
return value + align - (value % align)
else:
return value
def timedelta2seconds(delta):
"""
Convert a datetime.timedelta() objet to a number of second
(floatting point number).
>>> timedelta2seconds(timedelta(seconds=2, microseconds=40000))
2.04
>>> timedelta2seconds(timedelta(minutes=1, milliseconds=250))
60.25
"""
return delta.microseconds / 1000000.0 \
+ delta.seconds + delta.days * 60*60*24
def humanDurationNanosec(nsec):
"""
Convert a duration in nanosecond to human natural representation.
Returns an unicode string.
>>> humanDurationNanosec(60417893)
u'60.42 ms'
"""
# Nano second
if nsec < 1000:
return u"%u nsec" % nsec
# Micro seconds
usec, nsec = divmod(nsec, 1000)
if usec < 1000:
return u"%.2f usec" % (usec+float(nsec)/1000)
# Milli seconds
msec, usec = divmod(usec, 1000)
if msec < 1000:
return u"%.2f ms" % (msec + float(usec)/1000)
return humanDuration(msec)
def humanDuration(delta):
"""
Convert a duration in millisecond to human natural representation.
Returns an unicode string.
>>> humanDuration(0)
u'0 ms'
>>> humanDuration(213)
u'213 ms'
>>> humanDuration(4213)
u'4 sec 213 ms'
>>> humanDuration(6402309)
u'1 hour 46 min 42 sec'
"""
if not isinstance(delta, timedelta):
delta = timedelta(microseconds=delta*1000)
# Milliseconds
text = []
if 1000 <= delta.microseconds:
text.append(u"%u ms" % (delta.microseconds//1000))
# Seconds
minutes, seconds = divmod(delta.seconds, 60)
hours, minutes = divmod(minutes, 60)
if seconds:
text.append(u"%u sec" % seconds)
if minutes:
text.append(u"%u min" % minutes)
if hours:
text.append(ngettext("%u hour", "%u hours", hours) % hours)
# Days
years, days = divmod(delta.days, 365)
if days:
text.append(ngettext("%u day", "%u days", days) % days)
if years:
text.append(ngettext("%u year", "%u years", years) % years)
if 3 < len(text):
text = text[-3:]
elif not text:
return u"0 ms"
return u" ".join(reversed(text))
def humanFilesize(size):
"""
Convert a file size in byte to human natural representation.
It uses the values: 1 KB is 1024 bytes, 1 MB is 1024 KB, etc.
The result is an unicode string.
>>> humanFilesize(1)
u'1 byte'
>>> humanFilesize(790)
u'790 bytes'
>>> humanFilesize(256960)
u'250.9 KB'
"""
if size < 10000:
return ngettext("%u byte", "%u bytes", size) % size
units = [_("KB"), _("MB"), _("GB"), _("TB")]
size = float(size)
divisor = 1024
for unit in units:
size = size / divisor
if size < divisor:
return "%.1f %s" % (size, unit)
return "%u %s" % (size, unit)
def humanBitSize(size):
"""
Convert a size in bit to human classic representation.
It uses the values: 1 Kbit is 1000 bits, 1 Mbit is 1000 Kbit, etc.
The result is an unicode string.
>>> humanBitSize(1)
u'1 bit'
>>> humanBitSize(790)
u'790 bits'
>>> humanBitSize(256960)
u'257.0 Kbit'
"""
divisor = 1000
if size < divisor:
return ngettext("%u bit", "%u bits", size) % size
units = [u"Kbit", u"Mbit", u"Gbit", u"Tbit"]
size = float(size)
for unit in units:
size = size / divisor
if size < divisor:
return "%.1f %s" % (size, unit)
return u"%u %s" % (size, unit)
def humanBitRate(size):
"""
Convert a bit rate to human classic representation. It uses humanBitSize()
to convert size into human reprensation. The result is an unicode string.
>>> humanBitRate(790)
u'790 bits/sec'
>>> humanBitRate(256960)
u'257.0 Kbit/sec'
"""
return "".join((humanBitSize(size), "/sec"))
def humanFrequency(hertz):
"""
Convert a frequency in hertz to human classic representation.
It uses the values: 1 KHz is 1000 Hz, 1 MHz is 1000 KMhz, etc.
The result is an unicode string.
>>> humanFrequency(790)
u'790 Hz'
>>> humanFrequency(629469)
u'629.5 kHz'
"""
divisor = 1000
if hertz < divisor:
return u"%u Hz" % hertz
units = [u"kHz", u"MHz", u"GHz", u"THz"]
hertz = float(hertz)
for unit in units:
hertz = hertz / divisor
if hertz < divisor:
return u"%.1f %s" % (hertz, unit)
return u"%s %s" % (hertz, unit)
regex_control_code = re.compile(r"([\x00-\x1f\x7f])")
controlchars = tuple({
# Don't use "\0", because "\0"+"0"+"1" = "\001" = "\1" (1 character)
# Same rease to not use octal syntax ("\1")
ord("\n"): r"\n",
ord("\r"): r"\r",
ord("\t"): r"\t",
ord("\a"): r"\a",
ord("\b"): r"\b",
}.get(code, '\\x%02x' % code)
for code in xrange(128)
)
def makePrintable(data, charset, quote=None, to_unicode=False, smart=True):
r"""
Prepare a string to make it printable in the specified charset.
It escapes control characters. Characters with code bigger than 127
are escaped if data type is 'str' or if charset is "ASCII".
Examples with Unicode:
>>> aged = unicode("âgé", "UTF-8")
>>> repr(aged) # text type is 'unicode'
"u'\\xe2g\\xe9'"
>>> makePrintable("abc\0", "UTF-8")
'abc\\0'
>>> makePrintable(aged, "latin1")
'\xe2g\xe9'
>>> makePrintable(aged, "latin1", quote='"')
'"\xe2g\xe9"'
Examples with string encoded in latin1:
>>> aged_latin = unicode("âgé", "UTF-8").encode("latin1")
>>> repr(aged_latin) # text type is 'str'
"'\\xe2g\\xe9'"
>>> makePrintable(aged_latin, "latin1")
'\\xe2g\\xe9'
>>> makePrintable("", "latin1")
''
>>> makePrintable("a", "latin1", quote='"')
'"a"'
>>> makePrintable("", "latin1", quote='"')
'(empty)'
>>> makePrintable("abc", "latin1", quote="'")
"'abc'"
Control codes:
>>> makePrintable("\0\x03\x0a\x10 \x7f", "latin1")
'\\0\\3\\n\\x10 \\x7f'
Quote character may also be escaped (only ' and "):
>>> print makePrintable("a\"b", "latin-1", quote='"')
"a\"b"
>>> print makePrintable("a\"b", "latin-1", quote="'")
'a"b'
>>> print makePrintable("a'b", "latin-1", quote="'")
'a\'b'
"""
if data:
if not isinstance(data, unicode):
data = unicode(data, "ISO-8859-1")
charset = "ASCII"
data = regex_control_code.sub(
lambda regs: controlchars[ord(regs.group(1))], data)
if quote:
if quote in "\"'":
data = data.replace(quote, '\\' + quote)
data = ''.join((quote, data, quote))
elif quote:
data = "(empty)"
data = data.encode(charset, "backslashreplace")
if smart:
# Replace \x00\x01 by \0\1
data = re.sub(r"\\x0([0-7])(?=[^0-7]|$)", r"\\\1", data)
if to_unicode:
data = unicode(data, charset)
return data
def makeUnicode(text):
r"""
Convert text to printable Unicode string. For byte string (type 'str'),
use charset ISO-8859-1 for the conversion to Unicode
>>> makeUnicode(u'abc\0d')
u'abc\\0d'
>>> makeUnicode('a\xe9')
u'a\xe9'
"""
if isinstance(text, str):
text = unicode(text, "ISO-8859-1")
elif not isinstance(text, unicode):
text = unicode(text)
text = regex_control_code.sub(
lambda regs: controlchars[ord(regs.group(1))], text)
text = re.sub(r"\\x0([0-7])(?=[^0-7]|$)", r"\\\1", text)
return text
def binarySearch(seq, cmp_func):
"""
Search a value in a sequence using binary search. Returns index of the
value, or None if the value doesn't exist.
'seq' have to be sorted in ascending order according to the
comparaison function ;
'cmp_func', prototype func(x), is the compare function:
- Return strictly positive value if we have to search forward ;
- Return strictly negative value if we have to search backward ;
- Otherwise (zero) we got the value.
>>> # Search number 5 (search forward)
... binarySearch([0, 4, 5, 10], lambda x: 5-x)
2
>>> # Backward search
... binarySearch([10, 5, 4, 0], lambda x: x-5)
1
"""
lower = 0
upper = len(seq)
while lower < upper:
index = (lower + upper) >> 1
diff = cmp_func(seq[index])
if diff < 0:
upper = index
elif diff > 0:
lower = index + 1
else:
return index
return None
def lowerBound(seq, cmp_func):
f = 0
l = len(seq)
while l > 0:
h = l >> 1
m = f + h
if cmp_func(seq[m]):
f = m
f += 1
l -= h + 1
else:
l = h
return f
def humanUnixAttributes(mode):
"""
Convert a Unix file attributes (or "file mode") to an unicode string.
Original source code:
http://cvs.savannah.gnu.org/viewcvs/coreutils/lib/filemode.c?root=coreutils
>>> humanUnixAttributes(0644)
u'-rw-r--r-- (644)'
>>> humanUnixAttributes(02755)
u'-rwxr-sr-x (2755)'
"""
def ftypelet(mode):
if stat.S_ISREG (mode) or not stat.S_IFMT(mode):
return '-'
if stat.S_ISBLK (mode): return 'b'
if stat.S_ISCHR (mode): return 'c'
if stat.S_ISDIR (mode): return 'd'
if stat.S_ISFIFO(mode): return 'p'
if stat.S_ISLNK (mode): return 'l'
if stat.S_ISSOCK(mode): return 's'
return '?'
chars = [ ftypelet(mode), 'r', 'w', 'x', 'r', 'w', 'x', 'r', 'w', 'x' ]
for i in xrange(1, 10):
if not mode & 1 << 9 - i:
chars[i] = '-'
if mode & stat.S_ISUID:
if chars[3] != 'x':
chars[3] = 'S'
else:
chars[3] = 's'
if mode & stat.S_ISGID:
if chars[6] != 'x':
chars[6] = 'S'
else:
chars[6] = 's'
if mode & stat.S_ISVTX:
if chars[9] != 'x':
chars[9] = 'T'
else:
chars[9] = 't'
return u"%s (%o)" % (''.join(chars), mode)
def createDict(data, index):
"""
Create a new dictionnay from dictionnary key=>values:
just keep value number 'index' from all values.
>>> data={10: ("dix", 100, "a"), 20: ("vingt", 200, "b")}
>>> createDict(data, 0)
{10: 'dix', 20: 'vingt'}
>>> createDict(data, 2)
{10: 'a', 20: 'b'}
"""
return dict( (key,values[index]) for key, values in data.iteritems() )
# Start of UNIX timestamp (Epoch): 1st January 1970 at 00:00
UNIX_TIMESTAMP_T0 = datetime(1970, 1, 1)
def timestampUNIX(value):
"""
Convert an UNIX (32-bit) timestamp to datetime object. Timestamp value
is the number of seconds since the 1st January 1970 at 00:00. Maximum
value is 2147483647: 19 january 2038 at 03:14:07.
May raise ValueError for invalid value: value have to be in 0..2147483647.
>>> timestampUNIX(0)
datetime.datetime(1970, 1, 1, 0, 0)
>>> timestampUNIX(1154175644)
datetime.datetime(2006, 7, 29, 12, 20, 44)
>>> timestampUNIX(1154175644.37)
datetime.datetime(2006, 7, 29, 12, 20, 44, 370000)
>>> timestampUNIX(2147483647)
datetime.datetime(2038, 1, 19, 3, 14, 7)
"""
if not isinstance(value, (float, int, long)):
raise TypeError("timestampUNIX(): an integer or float is required")
if not(0 <= value <= 2147483647):
raise ValueError("timestampUNIX(): value have to be in 0..2147483647")
return UNIX_TIMESTAMP_T0 + timedelta(seconds=value)
# Start of Macintosh timestamp: 1st January 1904 at 00:00
MAC_TIMESTAMP_T0 = datetime(1904, 1, 1)
def timestampMac32(value):
"""
Convert an Mac (32-bit) timestamp to string. The format is the number
of seconds since the 1st January 1904 (to 2040). Returns unicode string.
>>> timestampMac32(0)
datetime.datetime(1904, 1, 1, 0, 0)
>>> timestampMac32(2843043290)
datetime.datetime(1994, 2, 2, 14, 14, 50)
"""
if not isinstance(value, (float, int, long)):
raise TypeError("an integer or float is required")
if not(0 <= value <= 4294967295):
return _("invalid Mac timestamp (%s)") % value
return MAC_TIMESTAMP_T0 + timedelta(seconds=value)
def durationWin64(value):
"""
Convert Windows 64-bit duration to string. The timestamp format is
a 64-bit number: number of 100ns. See also timestampWin64().
>>> str(durationWin64(1072580000))
'0:01:47.258000'
>>> str(durationWin64(2146280000))
'0:03:34.628000'
"""
if not isinstance(value, (float, int, long)):
raise TypeError("an integer or float is required")
if value < 0:
raise ValueError("value have to be a positive or nul integer")
return timedelta(microseconds=value/10)
# Start of 64-bit Windows timestamp: 1st January 1600 at 00:00
WIN64_TIMESTAMP_T0 = datetime(1601, 1, 1, 0, 0, 0)
def timestampWin64(value):
"""
Convert Windows 64-bit timestamp to string. The timestamp format is
a 64-bit number which represents number of 100ns since the
1st January 1601 at 00:00. Result is an unicode string.
See also durationWin64(). Maximum date is 28 may 60056.
>>> timestampWin64(0)
datetime.datetime(1601, 1, 1, 0, 0)
>>> timestampWin64(127840491566710000)
datetime.datetime(2006, 2, 10, 12, 45, 56, 671000)
"""
try:
return WIN64_TIMESTAMP_T0 + durationWin64(value)
except OverflowError:
raise ValueError(_("date newer than year %s (value=%s)") % (MAXYEAR, value))
# Start of 60-bit UUID timestamp: 15 October 1582 at 00:00
UUID60_TIMESTAMP_T0 = datetime(1582, 10, 15, 0, 0, 0)
def timestampUUID60(value):
"""
Convert UUID 60-bit timestamp to string. The timestamp format is
a 60-bit number which represents number of 100ns since the
the 15 October 1582 at 00:00. Result is an unicode string.
>>> timestampUUID60(0)
datetime.datetime(1582, 10, 15, 0, 0)
>>> timestampUUID60(130435676263032368)
datetime.datetime(1996, 2, 14, 5, 13, 46, 303236)
"""
if not isinstance(value, (float, int, long)):
raise TypeError("an integer or float is required")
if value < 0:
raise ValueError("value have to be a positive or nul integer")
try:
return UUID60_TIMESTAMP_T0 + timedelta(microseconds=value/10)
except OverflowError:
raise ValueError(_("timestampUUID60() overflow (value=%s)") % value)
def humanDatetime(value, strip_microsecond=True):
"""
Convert a timestamp to Unicode string: use ISO format with space separator.
>>> humanDatetime( datetime(2006, 7, 29, 12, 20, 44) )
u'2006-07-29 12:20:44'
>>> humanDatetime( datetime(2003, 6, 30, 16, 0, 5, 370000) )
u'2003-06-30 16:00:05'
>>> humanDatetime( datetime(2003, 6, 30, 16, 0, 5, 370000), False )
u'2003-06-30 16:00:05.370000'
"""
text = unicode(value.isoformat())
text = text.replace('T', ' ')
if strip_microsecond and "." in text:
text = text.split(".")[0]
return text
NEWLINES_REGEX = re.compile("\n+")
def normalizeNewline(text):
r"""
Replace Windows and Mac newlines with Unix newlines.
Replace multiple consecutive newlines with one newline.
>>> normalizeNewline('a\r\nb')
'a\nb'
>>> normalizeNewline('a\r\rb')
'a\nb'
>>> normalizeNewline('a\n\nb')
'a\nb'
"""
text = text.replace("\r\n", "\n")
text = text.replace("\r", "\n")
return NEWLINES_REGEX.sub("\n", text)
| 29.590051
| 84
| 0.589995
|
ccb031ea022edf40ae433810471b1f47465df396
| 2,789
|
py
|
Python
|
models/stylegan2/op/fused_act.py
|
jojoon99/Barbershop
|
f5e837166806c27ce8f94dd96dd888a78640fcda
|
[
"MIT"
] | 492
|
2021-06-03T01:14:03.000Z
|
2022-03-31T12:27:41.000Z
|
models/stylegan2/op/fused_act.py
|
jojoon99/Barbershop
|
f5e837166806c27ce8f94dd96dd888a78640fcda
|
[
"MIT"
] | 30
|
2021-06-05T10:14:35.000Z
|
2022-03-15T08:13:33.000Z
|
models/stylegan2/op/fused_act.py
|
jojoon99/Barbershop
|
f5e837166806c27ce8f94dd96dd888a78640fcda
|
[
"MIT"
] | 93
|
2021-06-04T11:08:29.000Z
|
2022-03-31T12:06:00.000Z
|
import os
import torch
from torch import nn
from torch.nn import functional as F
from torch.autograd import Function
from torch.utils.cpp_extension import load
module_path = os.path.dirname(__file__)
fused = load(
"fused",
sources=[
os.path.join(module_path, "fused_bias_act.cpp"),
os.path.join(module_path, "fused_bias_act_kernel.cu"),
],
)
class FusedLeakyReLUFunctionBackward(Function):
@staticmethod
def forward(ctx, grad_output, out, negative_slope, scale):
ctx.save_for_backward(out)
ctx.negative_slope = negative_slope
ctx.scale = scale
empty = grad_output.new_empty(0)
grad_input = fused.fused_bias_act(
grad_output, empty, out, 3, 1, negative_slope, scale
)
dim = [0]
if grad_input.ndim > 2:
dim += list(range(2, grad_input.ndim))
grad_bias = grad_input.sum(dim).detach()
return grad_input, grad_bias
@staticmethod
def backward(ctx, gradgrad_input, gradgrad_bias):
(out,) = ctx.saved_tensors
gradgrad_out = fused.fused_bias_act(
gradgrad_input, gradgrad_bias, out, 3, 1, ctx.negative_slope, ctx.scale
)
return gradgrad_out, None, None, None
class FusedLeakyReLUFunction(Function):
@staticmethod
def forward(ctx, input, bias, negative_slope, scale):
empty = input.new_empty(0)
out = fused.fused_bias_act(input, bias, empty, 3, 0, negative_slope, scale)
ctx.save_for_backward(out)
ctx.negative_slope = negative_slope
ctx.scale = scale
return out
@staticmethod
def backward(ctx, grad_output):
(out,) = ctx.saved_tensors
grad_input, grad_bias = FusedLeakyReLUFunctionBackward.apply(
grad_output, out, ctx.negative_slope, ctx.scale
)
return grad_input, grad_bias, None, None
class FusedLeakyReLU(nn.Module):
def __init__(self, channel, negative_slope=0.2, scale=2 ** 0.5):
super().__init__()
self.bias = nn.Parameter(torch.zeros(channel))
self.negative_slope = negative_slope
self.scale = scale
def forward(self, input):
return fused_leaky_relu(input, self.bias, self.negative_slope, self.scale)
def fused_leaky_relu(input, bias, negative_slope=0.2, scale=2 ** 0.5):
if input.device.type == "cpu":
rest_dim = [1] * (input.ndim - bias.ndim - 1)
return (
F.leaky_relu(
input + bias.view(1, bias.shape[0], *rest_dim), negative_slope=0.2
)
* scale
)
else:
return FusedLeakyReLUFunction.apply(input, bias, negative_slope, scale)
| 28.752577
| 84
| 0.621011
|
e7c7ef03f7f25483e73182198ec74bc03e57e60f
| 1,395
|
py
|
Python
|
flumine/clients/simulatedclient.py
|
betcode-org/flumine
|
b33d82b75175106b2de0d0c4f9851599b085e389
|
[
"MIT"
] | 2
|
2022-03-21T11:42:56.000Z
|
2022-03-26T08:36:18.000Z
|
flumine/clients/simulatedclient.py
|
betcode-org/flumine
|
b33d82b75175106b2de0d0c4f9851599b085e389
|
[
"MIT"
] | 4
|
2022-03-25T09:49:37.000Z
|
2022-03-25T10:18:13.000Z
|
flumine/clients/simulatedclient.py
|
betcode-org/flumine
|
b33d82b75175106b2de0d0c4f9851599b085e389
|
[
"MIT"
] | null | null | null |
from typing import Optional
from betfairlightweight.metadata import currency_parameters
from betfairlightweight.resources.accountresources import AccountDetails
from .baseclient import BaseClient
from .clients import ExchangeType
class SimulatedClient(BaseClient):
"""
Simulated betting client.
"""
EXCHANGE = ExchangeType.SIMULATED
DISCOUNT_RATE = 0
CURRENCY_CODE = "GBP"
def login(self) -> None:
return
def keep_alive(self) -> None:
return
def logout(self) -> None:
return
def update_account_details(self) -> None:
self.account_details = AccountDetails(
**{"discountRate": self.DISCOUNT_RATE, "currencyCode": self.CURRENCY_CODE}
)
@property
def min_bet_size(self) -> Optional[float]:
if self.account_details:
return currency_parameters[self.account_details.currency_code][
"min_bet_size"
]
@property
def min_bsp_liability(self) -> Optional[float]:
if self.account_details:
return currency_parameters[self.account_details.currency_code][
"min_bsp_liability"
]
@property
def min_bet_payout(self) -> Optional[float]:
if self.account_details:
return currency_parameters[self.account_details.currency_code][
"min_bet_payout"
]
| 26.826923
| 86
| 0.656631
|
cd3668e5b3634552966dcdfa12e27edf06cd77c7
| 1,004
|
py
|
Python
|
Python3/187.repeated-dna-sequences.py
|
610yilingliu/leetcode
|
30d071b3685c2131bd3462ba77c6c05114f3f227
|
[
"MIT"
] | null | null | null |
Python3/187.repeated-dna-sequences.py
|
610yilingliu/leetcode
|
30d071b3685c2131bd3462ba77c6c05114f3f227
|
[
"MIT"
] | null | null | null |
Python3/187.repeated-dna-sequences.py
|
610yilingliu/leetcode
|
30d071b3685c2131bd3462ba77c6c05114f3f227
|
[
"MIT"
] | null | null | null |
#
# @lc app=leetcode id=187 lang=python3
#
# [187] Repeated DNA Sequences
#
# @lc code=start
class Solution:
def findRepeatedDnaSequences(self, s):
if len(s) <= 10:
return []
repeated = []
bit_dic = {
# 00
'A': 0,
# 01
'C': 1,
# 10
'G': 2,
# 11
'T': 3
}
repeated_dict = dict()
ans = []
limit = (1 << 20) - 1
current_bit = 0
for i in range(9):
current_bit = ((current_bit << 2) + bit_dic[s[i]]) & limit
for i in range(9, len(s)):
current_bit = ((current_bit << 2) + bit_dic[s[i]]) & limit
if current_bit not in repeated_dict:
repeated_dict[current_bit] = 1
else:
repeated_dict[current_bit] += 1
if repeated_dict[current_bit] == 2:
ans.append(s[i - 9: i + 1])
return ans
# @lc code=end
| 25.1
| 70
| 0.438247
|
af3aa8bd8d2ab7a708a11f662acad6226745db09
| 27,340
|
py
|
Python
|
jasmin/protocols/smpp/factory.py
|
phil-lavin/jasmin
|
75897bb394f531cd76113b21d697acc86c4f5e81
|
[
"Apache-2.0"
] | 2
|
2020-05-14T18:27:01.000Z
|
2021-03-21T17:26:19.000Z
|
jasmin/protocols/smpp/factory.py
|
phil-lavin/jasmin
|
75897bb394f531cd76113b21d697acc86c4f5e81
|
[
"Apache-2.0"
] | null | null | null |
jasmin/protocols/smpp/factory.py
|
phil-lavin/jasmin
|
75897bb394f531cd76113b21d697acc86c4f5e81
|
[
"Apache-2.0"
] | 1
|
2020-11-24T06:48:22.000Z
|
2020-11-24T06:48:22.000Z
|
# pylint: disable=W0401,W0611,W0231
import cPickle as pickle
import logging
import re
from datetime import datetime, timedelta
from logging.handlers import TimedRotatingFileHandler
from OpenSSL import SSL
from twisted.internet import defer, reactor, ssl
from twisted.internet.protocol import ClientFactory
from jasmin.routing.Routables import RoutableSubmitSm
from jasmin.vendor.smpp.twisted.protocol import DataHandlerResponse
from jasmin.vendor.smpp.twisted.server import SMPPBindManager as _SMPPBindManager
from jasmin.vendor.smpp.twisted.server import SMPPServerFactory as _SMPPServerFactory
from .error import *
from .protocol import SMPPClientProtocol, SMPPServerProtocol
from .stats import SMPPClientStatsCollector, SMPPServerStatsCollector
from .validation import SmppsCredentialValidator
LOG_CATEGORY_CLIENT_BASE = "smpp.client"
LOG_CATEGORY_SERVER_BASE = "smpp.server"
class SmppClientIsNotConnected(Exception):
"""
An exception that is raised when a trying to use smpp object when
it is still None (before callbacking bind())
"""
class SMPPClientFactory(ClientFactory):
protocol = SMPPClientProtocol
def __init__(self, config, msgHandler=None):
self.reconnectTimer = None
self.smpp = None
self.connectionRetry = True
self.config = config
# Setup statistics collector
self.stats = SMPPClientStatsCollector().get(cid=self.config.id)
self.stats.set('created_at', datetime.now())
# Set up a dedicated logger
self.log = logging.getLogger(LOG_CATEGORY_CLIENT_BASE + ".%s" % config.id)
if len(self.log.handlers) != 1:
self.log.setLevel(config.log_level)
_when = self.config.log_rotate if hasattr(self.config, 'log_rotate') else 'midnight'
handler = TimedRotatingFileHandler(filename=self.config.log_file, when=_when)
formatter = logging.Formatter(config.log_format, config.log_date_format)
handler.setFormatter(formatter)
self.log.addHandler(handler)
self.log.propagate = False
if msgHandler is None:
self.msgHandler = self.msgHandlerStub
else:
self.msgHandler = msgHandler
def buildProtocol(self, addr):
"""Provision protocol
"""
proto = ClientFactory.buildProtocol(self, addr)
# Setup logger
proto.log = self.log
return proto
def getConfig(self):
return self.config
def msgHandlerStub(self, smpp, pdu):
self.log.warn("msgHandlerStub: Received an unhandled message %s ...", pdu)
def startedConnecting(self, connector):
self.log.info("Connecting to %s ...", connector.getDestination())
def getExitDeferred(self):
"""Get a Deferred so you can be notified on disconnect and exited
This deferred is called once disconnection occurs without a further
reconnection retrys
"""
return self.exitDeferred
def clientConnectionFailed(self, connector, reason):
"""Connection failed
"""
self.log.error("Connection failed. Reason: %s", str(reason))
if self.config.reconnectOnConnectionFailure and self.connectionRetry:
self.log.info("Reconnecting after %d seconds ...",
self.config.reconnectOnConnectionFailureDelay)
self.reconnectTimer = reactor.callLater(
self.config.reconnectOnConnectionFailureDelay, self.reConnect, connector)
else:
self.connectDeferred.errback(reason)
self.exitDeferred.callback(None)
self.log.info("Exiting.")
def clientConnectionLost(self, connector, reason):
"""Connection lost
"""
self.log.error("Connection lost. Reason: %s", str(reason))
if self.config.reconnectOnConnectionLoss and self.connectionRetry:
self.log.info("Reconnecting after %d seconds ...",
self.config.reconnectOnConnectionLossDelay)
self.reconnectTimer = reactor.callLater(
self.config.reconnectOnConnectionLossDelay, self.reConnect, connector)
else:
self.exitDeferred.callback(None)
self.log.info("Exiting.")
def reConnect(self, connector=None):
if connector is None:
self.log.error("No connector to retry !")
else:
# Reset deferred if it were called before
if self.connectDeferred.called is True:
self.connectDeferred = defer.Deferred()
self.connectDeferred.addCallback(self.bind)
# And try to connect again
connector.connect()
def _connect(self):
self.connectionRetry = True
if self.config.useSSL:
self.log.info('Establishing SSL connection to %s:%d', self.config.host, self.config.port)
reactor.connectSSL(self.config.host, self.config.port, self, CtxFactory(self.config))
else:
self.log.info('Establishing TCP connection to %s:%d', self.config.host, self.config.port)
reactor.connectTCP(self.config.host, self.config.port, self)
self.exitDeferred = defer.Deferred()
self.connectDeferred = defer.Deferred()
return self.connectDeferred
def connectAndBind(self):
self._connect()
self.connectDeferred.addCallback(self.bind)
return self.connectDeferred
def disconnect(self):
if self.smpp is not None:
self.log.info('Disconnecting SMPP client')
return self.smpp.unbindAndDisconnect()
else:
return None
def stopConnectionRetrying(self):
"""This will stop the factory from reconnecting
It is used whenever a service stop has been requested, the connectionRetry flag
is reset to True upon connect() call
"""
self.log.info('Stopped automatic connection retrying.')
if self.reconnectTimer and self.reconnectTimer.active():
self.reconnectTimer.cancel()
self.reconnectTimer = None
self.connectionRetry = False
def disconnectAndDontRetryToConnect(self):
self.log.info('Ordering a disconnect with no further reconnections.')
self.stopConnectionRetrying()
return self.disconnect()
def bind(self, smpp):
self.smpp = smpp
if self.config.bindOperation == 'transceiver':
return smpp.bindAsTransceiver()
elif self.config.bindOperation == 'receiver':
return smpp.bindAsReceiver()
elif self.config.bindOperation == 'transmitter':
return smpp.bindAsTransmitter()
else:
raise SMPPClientError("Invalid bind operation: %s" % self.config.bindOperation)
def getSessionState(self):
if self.smpp is None:
return None
else:
return self.smpp.sessionState
class CtxFactory(ssl.ClientContextFactory):
def __init__(self, config):
self.smppConfig = config
def getContext(self):
self.method = SSL.SSLv23_METHOD
ctx = ssl.ClientContextFactory.getContext(self)
if self.smppConfig.SSLCertificateFile:
ctx.use_certificate_file(self.smppConfig.SSLCertificateFile)
return ctx
class SMPPServerFactory(_SMPPServerFactory):
protocol = SMPPServerProtocol
def __init__(self, config, auth_portal, RouterPB=None, SMPPClientManagerPB=None,
interceptorpb_client=None):
self.config = config
# A dict of protocol instances for each of the current connections,
# indexed by system_id
self.bound_connections = {}
self._auth_portal = auth_portal
self.RouterPB = RouterPB
self.SMPPClientManagerPB = SMPPClientManagerPB
self.interceptorpb_client = interceptorpb_client
# Setup statistics collector
self.stats = SMPPServerStatsCollector().get(cid=self.config.id)
self.stats.set('created_at', datetime.now())
# Set up a dedicated logger
self.log = logging.getLogger(LOG_CATEGORY_SERVER_BASE + ".%s" % config.id)
if len(self.log.handlers) != 1:
self.log.setLevel(config.log_level)
handler = TimedRotatingFileHandler(filename=self.config.log_file, when=self.config.log_rotate)
formatter = logging.Formatter(config.log_format, config.log_date_format)
handler.setFormatter(formatter)
self.log.addHandler(handler)
self.log.propagate = False
self.msgHandler = self.submit_sm_event_interceptor
def addInterceptorPBClient(self, interceptorpb_client):
self.interceptorpb_client = interceptorpb_client
self.log.info('Added Interceptor to SMPPServerFactory')
def submit_sm_event_interceptor(self, system_id, *args):
"Intercept submit_sm befor handing it to self.submit_sm_event"
self.log.debug('Intercepting submit_sm event for system_id: %s', system_id)
# Args validation
if len(args) != 2:
self.log.error('(submit_sm_event/%s) Invalid args: %s', system_id, args)
raise SubmitSmInvalidArgsError()
if not isinstance(args[1], pdu_types.PDURequest):
self.log.error(
'(submit_sm_event/%s) Received an unknown object when waiting for a PDURequest: %s',
system_id,
args[1])
raise SubmitSmInvalidArgsError()
if args[1].id != pdu_types.CommandId.submit_sm:
self.log.error('(submit_sm_event/%s) Received a non submit_sm command id: %s',
system_id, args[1].id)
raise SubmitSmInvalidArgsError()
if not isinstance(args[0], SMPPServerProtocol):
self.log.error(
'(submit_sm_event/%s) Received an unknown object when waiting for a SMPPServerProtocol: %s',
system_id,
args[0])
raise SubmitSmInvalidArgsError()
proto = args[0]
user = proto.user
SubmitSmPDU = args[1]
# Update CnxStatus
user.getCnxStatus().smpps['submit_sm_request_count'] += 1
# Basic validation
if len(SubmitSmPDU.params['destination_addr']) < 1 or SubmitSmPDU.params['destination_addr'] is None:
self.log.error('(submit_sm_event/%s) SubmitSmPDU have no defined destination_addr', system_id)
raise SubmitSmWithoutDestinationAddrError()
# Make Credential validation
v = SmppsCredentialValidator('Send', user, SubmitSmPDU)
v.validate()
# Update SubmitSmPDU by default values from user MtMessagingCredential
SubmitSmPDU = v.updatePDUWithUserDefaults(SubmitSmPDU)
if self.RouterPB is None:
self.log.error('(submit_sm_event_interceptor/%s) RouterPB not set: submit_sm will not be routed',
system_id)
return
# Prepare for interception then routing
routable = RoutableSubmitSm(SubmitSmPDU, user)
# Interception inline
# @TODO: make Interception in a thread, just like httpapi interception
interceptor = self.RouterPB.getMTInterceptionTable().getInterceptorFor(routable)
if interceptor is not None:
self.log.debug("RouterPB selected %s interceptor for this SubmitSmPDU", interceptor)
if self.interceptorpb_client is None:
self.stats.inc('interceptor_error_count')
self.log.error("InterceptorPB not set !")
raise InterceptorNotSetError('InterceptorPB not set !')
if not self.interceptorpb_client.isConnected:
self.stats.inc('interceptor_error_count')
self.log.error("InterceptorPB not connected !")
raise InterceptorNotConnectedError('InterceptorPB not connected !')
script = interceptor.getScript()
self.log.debug("Interceptor script loaded: %s", script)
# Run !
d = self.interceptorpb_client.run_script(script, routable)
d.addCallback(self.submit_sm_post_interception, system_id=system_id, proto=proto)
d.addErrback(self.submit_sm_post_interception)
return d
else:
return self.submit_sm_post_interception(routable=routable, system_id=system_id, proto=proto)
def submit_sm_post_interception(self, *args, **kw):
"""This event handler will deliver the submit_sm to the right smppc connector.
Note that Jasmin deliver submit_sm messages like this:
- from httpapi to smppc (handled in jasmin.protocols.http.server)
- from smpps to smppc (this event handler)
Note: This event handler MUST behave exactly like jasmin.protocols.http.server.Send.render
"""
try:
# Init message id & status
message_id = None
status = None
# Post interception:
if len(args) == 1:
if isinstance(args[0], bool) and not args[0]:
self.stats.inc('interceptor_error_count')
self.log.error('Failed running interception script, got a False return.')
raise InterceptorRunError('Failed running interception script, check log for details')
elif isinstance(args[0], dict) and args[0]['smpp_status'] > 0:
self.stats.inc('interceptor_error_count')
self.log.error('Interceptor script returned %s smpp_status error.', args[0]['smpp_status'])
raise SubmitSmInterceptionError(code=args[0]['smpp_status'])
elif isinstance(args[0], dict) and args[0]['smpp_status'] == 0:
self.stats.inc('interceptor_count')
self.log.info('Interceptor script returned %s success smpp_status.', args[0]['smpp_status'])
# Do we have a message_id returned from interceptor ?
if 'message_id' in args[0]['extra']:
message_id = str(args[0]['extra']['message_id'])
raise SubmitSmInterceptionSuccess()
elif isinstance(args[0], str):
self.stats.inc('interceptor_count')
routable = pickle.loads(args[0])
else:
self.stats.inc('interceptor_error_count')
self.log.error('Failed running interception script, got the following return: %s',
args[0])
raise InterceptorRunError(
'Failed running interception script, got the following return: %s' % args[0])
else:
routable = kw['routable']
system_id = kw['system_id']
proto = kw['proto']
self.log.debug('Handling submit_sm_post_interception event for system_id: %s', system_id)
# Get the route
route = self.RouterPB.getMTRoutingTable().getRouteFor(routable)
if route is None:
self.log.error("No route matched from user %s for SubmitSmPDU: %s",
routable.user, routable.pdu)
raise SubmitSmRouteNotFoundError()
# Get connector from selected route
self.log.debug("RouterPB selected %s route for this SubmitSmPDU", route)
routedConnector = route.getConnector()
# Is it a failover route ? then check for a bound connector, otherwise don't route
# The failover route requires at least one connector to be up, no message enqueuing will
# occur otherwise.
if repr(route) == 'FailoverMTRoute':
self.log.debug('Selected route is a failover, will ensure connector is bound:')
while True:
c = self.SMPPClientManagerPB.perspective_connector_details(routedConnector.cid)
if c:
self.log.debug('Connector [%s] is: %s', routedConnector.cid, c['session_state'])
else:
self.log.debug('Connector [%s] is not found', routedConnector.cid)
if c and c['session_state'][:6] == 'BOUND_':
# Choose this connector
break
else:
# Check next connector, None if no more connectors are available
routedConnector = route.getConnector()
if routedConnector is None:
break
if routedConnector is None:
self.log.error("Failover route has no bound connector to handle SubmitSmPDU: %s",
routable.pdu)
raise SubmitSmRoutingError()
# QoS throttling
if (routable.user.mt_credential.getQuota('smpps_throughput') >= 0
and routable.user.getCnxStatus().smpps['qos_last_submit_sm_at'] != 0):
qos_throughput_second = 1 / float(routable.user.mt_credential.getQuota('smpps_throughput'))
qos_throughput_ysecond_td = timedelta(microseconds=qos_throughput_second * 1000000)
qos_delay = datetime.now() - routable.user.getCnxStatus().smpps['qos_last_submit_sm_at']
if qos_delay < qos_throughput_ysecond_td:
self.log.error(
"QoS: submit_sm_event is faster (%s) than fixed throughput (%s) for user (%s), rejecting message.",
qos_delay,
qos_throughput_ysecond_td,
routable.user)
raise SubmitSmThroughputExceededError()
routable.user.getCnxStatus().smpps['qos_last_submit_sm_at'] = datetime.now()
# Pre-sending submit_sm: Billing processing
bill = route.getBillFor(routable.user)
self.log.debug("SubmitSmBill [bid:%s] [ttlamounts:%s] generated for this SubmitSmPDU",
bill.bid, bill.getTotalAmounts())
charging_requirements = []
u_balance = routable.user.mt_credential.getQuota('balance')
u_subsm_count = routable.user.mt_credential.getQuota('submit_sm_count')
if u_balance is not None and bill.getTotalAmounts() > 0:
# Ensure user have enough balance to pay submit_sm and submit_sm_resp
charging_requirements.append({
'condition': bill.getTotalAmounts() <= u_balance,
'error_message': 'Not enough balance (%s) for charging: %s' % (
u_balance, bill.getTotalAmounts())})
if u_subsm_count is not None:
# Ensure user have enough submit_sm_count to to cover the bill action (decrement_submit_sm_count)
charging_requirements.append({
'condition': bill.getAction('decrement_submit_sm_count') <= u_subsm_count,
'error_message': 'Not enough submit_sm_count (%s) for charging: %s' % (
u_subsm_count, bill.getAction('decrement_submit_sm_count'))})
if self.RouterPB.chargeUserForSubmitSms(routable.user, bill, requirements=charging_requirements) is None:
self.log.error('Charging user %s failed, [bid:%s] [ttlamounts:%s] (check router log)',
routable.user, bill.bid, bill.getTotalAmounts())
raise SubmitSmChargingError()
# Get priority value from SubmitSmPDU to pass to SMPPClientManagerPB.perspective_submit_sm()
priority = 0
if routable.pdu.params['priority_flag'] is not None:
priority = routable.pdu.params['priority_flag'].index
if self.SMPPClientManagerPB is None:
self.log.error(
'(submit_sm_event/%s) SMPPClientManagerPB not set: submit_sm will not be submitted',
system_id)
return
########################################################
# Send SubmitSmPDU through smpp client manager PB server
self.log.debug("Connector '%s' is set to be a route for this SubmitSmPDU", routedConnector.cid)
c = self.SMPPClientManagerPB.perspective_submit_sm(
uid=routable.user.uid,
cid=routedConnector.cid,
SubmitSmPDU=routable.pdu,
submit_sm_bill=bill,
priority=priority,
pickled=False,
source_connector=proto)
if not hasattr(c, 'result'):
self.log.error('Failed to send SubmitSmPDU to [cid:%s], got: %s', routedConnector.cid, c)
raise SubmitSmRoutingError()
# Build final response
if not c.result:
self.log.error('Failed to send SubmitSmPDU to [cid:%s]', routedConnector.cid)
raise SubmitSmRoutingError()
# Otherwise, message_id is defined on ESME_ROK
message_id = c.result
except (SubmitSmInterceptionError, SubmitSmInterceptionSuccess, InterceptorRunError,
SubmitSmRouteNotFoundError, SubmitSmThroughputExceededError, SubmitSmChargingError,
SubmitSmRoutingError) as e:
# Known exception handling
status = e.status
except Exception as e:
# Unknown exception handling
self.log.critical('Got an unknown exception: %s', e)
status = pdu_types.CommandStatus.ESME_RUNKNOWNERR
else:
self.log.debug('SubmitSmPDU sent to [cid:%s], result = %s', routedConnector.cid, message_id)
# Do not log text for privacy reasons
# Added in #691
if self.config.log_privacy:
logged_content = '** %s byte content **' % len(routable.pdu.params['short_message'])
else:
logged_content = '%r' % re.sub(r'[^\x20-\x7E]+', '.', routable.pdu.params['short_message'])
self.log.info(
'SMS-MT [uid:%s] [cid:%s] [msgid:%s] [prio:%s] [from:%s] [to:%s] [content:%s]',
routable.user.uid,
routedConnector.cid,
message_id,
priority,
routable.pdu.params['source_addr'],
routable.pdu.params['destination_addr'],
logged_content)
status = pdu_types.CommandStatus.ESME_ROK
finally:
if message_id is not None:
return DataHandlerResponse(status=status, message_id=message_id)
elif status is not None:
return DataHandlerResponse(status=status)
def buildProtocol(self, addr):
"""Provision protocol with the dedicated logger
"""
proto = _SMPPServerFactory.buildProtocol(self, addr)
# Setup logger
proto.log = self.log
return proto
def addBoundConnection(self, connection, user):
"""
Overloading _SMPPServerFactory to remove dependency with config.systems
Jasmin removed systems from config as everything about credentials is
managed through User object
"""
system_id = connection.system_id
self.log.debug('Adding SMPP binding for %s', system_id)
if system_id not in self.bound_connections:
self.bound_connections[system_id] = SMPPBindManager(user)
self.bound_connections[system_id].addBinding(connection)
bind_type = connection.bind_type
self.log.info("Added %s bind for '%s'. Active binds: %s.",
bind_type, system_id, self.getBoundConnectionCountsStr(system_id))
def removeConnection(self, connection):
"""
Overloading _SMPPServerFactory to remove dependency with config.systems
Jasmin removed systems from config as everything about credentials is
managed through User object
"""
if connection.system_id is None:
self.log.debug("SMPP connection attempt failed without binding.")
else:
system_id = connection.system_id
bind_type = connection.bind_type
self.bound_connections[system_id].removeBinding(connection)
self.log.info("Dropped %s bind for '%s'. Active binds: %s.",
bind_type, system_id, self.getBoundConnectionCountsStr(system_id))
# If this is the last binding for this service then remove the BindManager
if self.bound_connections[system_id].getBindingCount() == 0:
self.bound_connections.pop(system_id)
def canOpenNewConnection(self, user, bind_type):
"""
Overloading _SMPPServerFactory to remove dependency with config.systems
Jasmin removed systems from config as everything about credentials is
managed through User object
This method will check for authorization and quotas before allowing a new
connection
"""
# Can bind ?
if not user.smpps_credential.getAuthorization('bind'):
self.log.warning(
'New bind rejected for username: "%s", reason: authorization failure.', user.username)
return False
# Still didnt reach max_bindings ?
elif user.smpps_credential.getQuota('max_bindings') is not None:
bind_count = user.getCnxStatus().smpps['bound_connections_count']['bind_transmitter']
bind_count += user.getCnxStatus().smpps['bound_connections_count']['bind_receiver']
bind_count += user.getCnxStatus().smpps['bound_connections_count']['bind_transceiver']
if bind_count >= user.smpps_credential.getQuota('max_bindings'):
self.log.warning('New bind rejected for username: "%s", reason: max_bindings limit reached.',
user.username)
return False
return True
def unbindAndRemoveGateway(self, user, ban=True):
"""
Overloading _SMPPServerFactory to remove dependency with config.systems
Jasmin removed systems from config as everything about credentials is
managed through User object.
It's also adding a 'ban' parameter to optionally remove binding authorization
for user.
"""
if ban:
user.smpps_credential.setAuthorization('bind', False)
d = self.unbindGateway(user.username)
return d
class SMPPBindManager(_SMPPBindManager):
"Overloads _SMPPBindManager to add user tracking"
def __init__(self, user):
_SMPPBindManager.__init__(self, system_id=user.username)
self.user = user
def addBinding(self, connection):
_SMPPBindManager.addBinding(self, connection)
# Update CnxStatus
self.user.getCnxStatus().smpps['bind_count'] += 1
self.user.getCnxStatus().smpps['bound_connections_count'][str(connection.bind_type)] += 1
def removeBinding(self, connection):
_SMPPBindManager.removeBinding(self, connection)
# Update CnxStatus
self.user.getCnxStatus().smpps['unbind_count'] += 1
self.user.getCnxStatus().smpps['bound_connections_count'][str(connection.bind_type)] -= 1
| 44.527687
| 123
| 0.625896
|
982f71543d386ae442db9b113292f597581ffdac
| 2,814
|
py
|
Python
|
akira_lang.py
|
Daniell2020UKR/YT_dl
|
6c819f5162e98fff104638529a08b9ee9e243e01
|
[
"MIT"
] | null | null | null |
akira_lang.py
|
Daniell2020UKR/YT_dl
|
6c819f5162e98fff104638529a08b9ee9e243e01
|
[
"MIT"
] | 3
|
2021-06-08T21:53:26.000Z
|
2022-01-13T02:57:31.000Z
|
akira_lang.py
|
Daniell2020UKR/YT_dl
|
6c819f5162e98fff104638529a08b9ee9e243e01
|
[
"MIT"
] | null | null | null |
translations = {
'en': {
'akira_start': 'Hi! Type \".help\" to get a list of commands.',
'akira_noargs': 'No arguments.',
'akira_version': 'My version - ',
'akira_nolang': 'This language is not supported.',
'akira_newlang': 'New language is set.',
'akira_pmonly': 'This command is only available in PM\'s.',
'akira_newcontact': 'I added you to my contacts.',
'akira_donate': 'Donate the Developer of source code-@Myst33dornate, Donate me — @Daniell2020UKRDonate',
'akira_downloading': 'Downloading...',
'akira_uploading': 'Uploading...',
'akira_audio_download_error': 'An error occurred while trying to download audio.',
'akira_audio_upload_error': 'An error occurred while trying to upload audio.',
'akira_changelog': 'Version 0.1 pre-alpha — SC modded \& bot started firstly. Version 0.2-alpha — SC modded x2 \& added y2v function (PRE-ALPHA), bug fix Version 0.3-alpha — bug fix',
'akira_help':
'.start - Start using the bot.\n'
'.donate - Donation info.\n'
'.setlang - Set language (only en&ru).\n'
'.y2a - Download audio from Youtube\n'
'.y2v - Download video from YouTube (PRE-ALPHA!)\n'
'.changelog - Changelog and version info\n'
'.version - Bot version.'
},
'ru': {
'akira_start': 'Привет! Напишите \".help\" чтобы получить список команд.',
'akira_noargs': 'Нету агрументов.',
'akira_version': 'Моя версия - ',
'akira_nolang': 'Этот язык не поддерживается.',
'akira_newlang': 'Новый язык установлен.',
'akira_pmonly': 'Эта команда доступна только в ЛС.',
'akira_newcontact': 'Я добавила тебя в мои контакты.',
'akira_donate': 'Донат разработчику исхов - @Myst33dDonate, донат мне — @Daniell2020UKRDonate',
'akira_downloading': 'Скачиваю...',
'akira_uploading': 'Загружаю...',
'akira_changelog': 'Версия 0.1 pre-alpha — ИК модифицирован, бот запущен впервые. Версия 0.2-alpha — ИК модифицирован x2, добавлена y2v функция (ПРЕ-АЛЬФА), баг фикс, Версия 0.3-alpha — баг фикс, подготовка к переходу на nanogram',
'akira_audio_download_error': 'Произошла ошибка пытаясь скачать аудио.',
'akira_audio_upload_error': 'Произошла ошибка пытаясь загрузить аудио.',
'akira_help':
'.start - Начать использовать бота.\n'
'.donate - Информация о донатах.\n'
'.addme - Добавить пользователя в контакты бота.\n'
'.setlang - Установить язык.\n'
'.y2a - Скачать аудио с Youtube\n'
'.y2v - Скачать видео с YouTube (ПРЕ-АЛЬФА!)\n'
'.changelog - список версий и изменений\n'
'.version - Версия бота.'
}
}
| 56.28
| 240
| 0.61656
|
b68531ff5e5d72c57a8d4e5170381aa50916af2a
| 2,270
|
py
|
Python
|
sources/lambda/async/index.py
|
stmayne/workshop-textract-comprehend-es
|
9e886cacf3ba00a2c696baafa745fa753e2e5202
|
[
"MIT-0"
] | 26
|
2020-02-12T20:45:17.000Z
|
2021-12-23T15:23:03.000Z
|
sources/lambda/async/index.py
|
stmayne/workshop-textract-comprehend-es
|
9e886cacf3ba00a2c696baafa745fa753e2e5202
|
[
"MIT-0"
] | 2
|
2020-02-20T13:13:23.000Z
|
2021-09-28T19:21:54.000Z
|
sources/lambda/async/index.py
|
stmayne/workshop-textract-comprehend-es
|
9e886cacf3ba00a2c696baafa745fa753e2e5202
|
[
"MIT-0"
] | 22
|
2020-02-19T20:36:55.000Z
|
2022-03-19T07:11:16.000Z
|
#
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import json
from text_extractor import TextExtractor
from document_analyzer import DocumentAnalyzer
from document_indexer import DocumentIndexer
document_indexer = DocumentIndexer()
document_analyzer = DocumentAnalyzer()
text_extractor = TextExtractor()
def handler(event, context):
message = json.loads(event['Records'][0]['Sns']['Message'])
jobId = message['JobId']
print("JobId="+jobId)
status = message['Status']
print("Status="+status)
if status != "SUCCEEDED":
return {
# TODO : handle error with Dead letter queue (not in this workshop)
# https://docs.aws.amazon.com/lambda/latest/dg/dlq.html
"status": status
}
pages = text_extractor.extract_text(jobId)
print(list(pages.values()))
entities = document_analyzer.extract_entities(list(pages.values()))
print(entities)
doc = {
"bucket": message['DocumentLocation']['S3Bucket'],
"document": message['DocumentLocation']['S3ObjectName'],
"size": len(list(pages.values())),
"jobId": jobId,
"pages": list(pages.values()),
"entities": entities
}
print(doc)
docId = document_indexer.index(doc)
return {
"jobId": jobId,
"docId": docId
}
| 33.880597
| 87
| 0.7
|
c5e9fc90085756c9fdf363cb6b12a17ee7648e75
| 891
|
py
|
Python
|
sharppy/sharptab/constants.py
|
skovic/SHARPpy
|
19175269ab11fe06c917b5d10376862a4716e1db
|
[
"BSD-3-Clause"
] | 163
|
2015-01-05T06:57:16.000Z
|
2022-03-15T04:19:42.000Z
|
sharppy/sharptab/constants.py
|
skovic/SHARPpy
|
19175269ab11fe06c917b5d10376862a4716e1db
|
[
"BSD-3-Clause"
] | 187
|
2015-01-20T05:30:55.000Z
|
2022-03-28T17:50:38.000Z
|
sharppy/sharptab/constants.py
|
skovic/SHARPpy
|
19175269ab11fe06c917b5d10376862a4716e1db
|
[
"BSD-3-Clause"
] | 110
|
2015-01-06T05:55:47.000Z
|
2022-03-15T18:40:21.000Z
|
''' Frequently used meteorological constants '''
__all__ = ['MISSING', 'ROCP', 'ZEROCNK', 'G', 'TOL', 'WHITE', 'RED',
'ORANGE', 'YELLOW', 'MAGENTA', 'DBROWN', 'LBROWN', 'LBLUE',
'CYAN', 'BLACK', 'GREEN', 'DGREEN', 'HAINES_HIGH', 'HAINES_MID',
'HAINES_LOW']
# Meteorological Constants
MISSING = -9999.0 # Missing Flag
ROCP = 0.28571426 # R over Cp
ZEROCNK = 273.15 # Zero Celsius in Kelvins
G = 9.80665 # Gravity
TOL = 1e-10 # Floating Point Tolerance
# Color code constants for easy access
WHITE = '#FFFFFF'
BLACK = '#000000'
RED = '#FF0000'
ORANGE = '#FF4000'
YELLOW = '#FFFF00'
MAGENTA = '#E700DF'
DBROWN = '#775000'
LBROWN = '#996600'
LBLUE = '#06B5FF'
CYAN = '#00FFFF'
GREEN = '#00FF00'
DGREEN = '#006000'
# Haines Index elevation constants for easy access
HAINES_HIGH = 2
HAINES_MID = 1
HAINES_LOW = 0
| 25.457143
| 75
| 0.611672
|
59bb154a0a142bb44f1e617b59a6516f99d21a50
| 8,958
|
py
|
Python
|
socketio/handler.py
|
ansible/gevent-socketio
|
bd81cb6cc6fab8b67d108ecde94305d654bcd143
|
[
"BSD-3-Clause"
] | 2
|
2017-12-19T13:28:05.000Z
|
2020-04-13T07:34:55.000Z
|
socketio/handler.py
|
ansible/gevent-socketio
|
bd81cb6cc6fab8b67d108ecde94305d654bcd143
|
[
"BSD-3-Clause"
] | null | null | null |
socketio/handler.py
|
ansible/gevent-socketio
|
bd81cb6cc6fab8b67d108ecde94305d654bcd143
|
[
"BSD-3-Clause"
] | 2
|
2017-12-19T13:27:38.000Z
|
2020-04-13T07:34:57.000Z
|
import sys
import re
import gevent
import urlparse
from gevent.pywsgi import WSGIHandler
from socketio import transports
class SocketIOHandler(WSGIHandler):
RE_REQUEST_URL = re.compile(r"""
^/(?P<resource>.+?)
/1
/(?P<transport_id>[^/]+)
/(?P<sessid>[^/]+)/?$
""", re.X)
RE_HANDSHAKE_URL = re.compile(r"^/(?P<resource>.+?)/1/$", re.X)
# new socket.io versions (> 0.9.8) call an obscure url with two slashes
# instead of a transport when disconnecting
# https://github.com/LearnBoost/socket.io-client/blob/0.9.16/lib/socket.js#L361
RE_DISCONNECT_URL = re.compile(r"""
^/(?P<resource>.+?)
/(?P<protocol_version>[^/]+)
//(?P<sessid>[^/]+)/?$
""", re.X)
handler_types = {
'websocket': transports.WebsocketTransport,
'flashsocket': transports.FlashSocketTransport,
'htmlfile': transports.HTMLFileTransport,
'xhr-multipart': transports.XHRMultipartTransport,
'xhr-polling': transports.XHRPollingTransport,
'jsonp-polling': transports.JSONPolling,
}
def __init__(self, config, *args, **kwargs):
"""Create a new SocketIOHandler.
:param config: dict Configuration for timeouts and intervals
that will go down to the other components, transports, etc..
"""
self.socketio_connection = False
self.allowed_paths = None
self.config = config
super(SocketIOHandler, self).__init__(*args, **kwargs)
self.transports = self.handler_types.keys()
if self.server.transports:
self.transports = self.server.transports
if not set(self.transports).issubset(set(self.handler_types)):
raise ValueError("transports should be elements of: %s" %
(self.handler_types.keys()))
def _do_handshake(self, tokens):
if tokens["resource"] != self.server.resource:
self.log_error("socket.io URL mismatch")
else:
socket = self.server.get_socket()
data = "%s:%s:%s:%s" % (socket.sessid,
self.config['heartbeat_timeout'] or '',
self.config['close_timeout'] or '',
",".join(self.transports))
self.write_smart(data)
def write_jsonp_result(self, data, wrapper="0"):
self.start_response("200 OK", [
("Content-Type", "application/javascript"),
])
self.result = ['io.j[%s]("%s");' % (wrapper, data)]
def write_plain_result(self, data):
self.start_response("200 OK", [
("Access-Control-Allow-Origin", self.environ.get('HTTP_ORIGIN', '*')),
("Access-Control-Allow-Credentials", "true"),
("Access-Control-Allow-Methods", "POST, GET, OPTIONS"),
("Access-Control-Max-Age", "3600"),
("Content-Type", "text/plain"),
])
self.result = [data]
def write_smart(self, data):
args = urlparse.parse_qs(self.environ.get("QUERY_STRING"))
if "jsonp" in args:
self.write_jsonp_result(data, args["jsonp"][0])
else:
self.write_plain_result(data)
self.process_result()
def handle_one_response(self):
"""This function deals with *ONE INCOMING REQUEST* from the web.
It will wire and exchange message to the queues for long-polling
methods, otherwise, will stay alive for websockets.
"""
path = self.environ.get('PATH_INFO')
# Kick non-socket.io requests to our superclass
if not path.lstrip('/').startswith(self.server.resource + '/'):
return super(SocketIOHandler, self).handle_one_response()
self.status = None
self.headers_sent = False
self.result = None
self.response_length = 0
self.response_use_chunked = False
# This is analyzed for each and every HTTP requests involved
# in the Socket.IO protocol, whether long-running or long-polling
# (read: websocket or xhr-polling methods)
request_method = self.environ.get("REQUEST_METHOD")
request_tokens = self.RE_REQUEST_URL.match(path)
handshake_tokens = self.RE_HANDSHAKE_URL.match(path)
disconnect_tokens = self.RE_DISCONNECT_URL.match(path)
if handshake_tokens:
# Deal with first handshake here, create the Socket and push
# the config up.
return self._do_handshake(handshake_tokens.groupdict())
elif disconnect_tokens:
# it's a disconnect request via XHR
tokens = disconnect_tokens.groupdict()
elif request_tokens:
tokens = request_tokens.groupdict()
# and continue...
else:
# This is no socket.io request. Let the WSGI app handle it.
return super(SocketIOHandler, self).handle_one_response()
# Setup socket
sessid = tokens["sessid"]
socket = self.server.get_socket(sessid)
if not socket:
self.handle_bad_request()
return [] # Do not say the session is not found, just bad request
# so they don't start brute forcing to find open sessions
if self.environ['QUERY_STRING'].startswith('disconnect'):
# according to socket.io specs disconnect requests
# have a `disconnect` query string
# https://github.com/LearnBoost/socket.io-spec#forced-socket-disconnection
socket.disconnect()
self.handle_disconnect_request()
return []
# Setup transport
transport = self.handler_types.get(tokens["transport_id"])
# In case this is WebSocket request, switch to the WebSocketHandler
# FIXME: fix this ugly class change
old_class = None
if issubclass(transport, (transports.WebsocketTransport,
transports.FlashSocketTransport)):
old_class = self.__class__
self.__class__ = self.server.ws_handler_class
self.prevent_wsgi_call = True # thank you
# TODO: any errors, treat them ??
self.handle_one_response() # does the Websocket dance before we continue
# Make the socket object available for WSGI apps
self.environ['socketio'] = socket
# Create a transport and handle the request likewise
self.transport = transport(self, self.config)
# transports register their own spawn'd jobs now
self.transport.do_exchange(socket, request_method)
if not socket.connection_established:
# This is executed only on the *first* packet of the establishment
# of the virtual Socket connection.
socket.connection_established = True
socket.state = socket.STATE_CONNECTED
socket._spawn_heartbeat()
socket._spawn_watcher()
try:
# We'll run the WSGI app if it wasn't already done.
if socket.wsgi_app_greenlet is None:
# TODO: why don't we spawn a call to handle_one_response here ?
# why call directly the WSGI machinery ?
start_response = lambda status, headers, exc=None: None
socket.wsgi_app_greenlet = gevent.spawn(self.application,
self.environ,
start_response)
except:
self.handle_error(*sys.exc_info())
# we need to keep the connection open if we are an open socket
if tokens['transport_id'] in ['flashsocket', 'websocket']:
# wait here for all jobs to finished, when they are done
gevent.joinall(socket.jobs)
# Switch back to the old class so references to this don't use the
# incorrect class. Useful for debugging.
if old_class:
self.__class__ = old_class
# Clean up circular references so they can be garbage collected.
if hasattr(self, 'websocket') and self.websocket:
if hasattr(self.websocket, 'environ'):
del self.websocket.environ
del self.websocket
if self.environ:
self.environ.pop('wsgi.websocket', None)
del self.environ
def handle_bad_request(self):
self.close_connection = True
self.start_response("400 Bad Request", [
('Content-Type', 'text/plain'),
('Connection', 'close'),
('Content-Length', 0)
])
def handle_disconnect_request(self):
self.close_connection = True
self.start_response("200 OK", [
('Content-Type', 'text/plain'),
('Connection', 'close'),
('Content-Length', 0)
])
| 39.462555
| 86
| 0.592096
|
1169ccfa2282947ee4672b7e4b19f1a174b5fe6f
| 3,054
|
py
|
Python
|
sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2019_10_17/aio/_configuration_async.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 2
|
2019-05-17T21:24:53.000Z
|
2020-02-12T11:13:42.000Z
|
sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2019_10_17/aio/_configuration_async.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 15
|
2019-07-12T18:18:04.000Z
|
2019-07-25T20:55:51.000Z
|
sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2019_10_17/aio/_configuration_async.py
|
LianwMS/azure-sdk-for-python
|
612d7bca9de86ee1bd1fa59291d7bf897ba9213f
|
[
"MIT"
] | 2
|
2020-05-21T22:51:22.000Z
|
2020-05-26T20:53:01.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
VERSION = "unknown"
class MonitorClientConfiguration(Configuration):
"""Configuration for MonitorClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The Azure subscription Id.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(MonitorClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2019-10-17-preview"
self.credential_scopes = ['https://management.azure.com/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'mgmt-eventhub/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| 46.272727
| 134
| 0.688278
|
796b1bdb778efe5da26dcfcf9effb5b875abbd2c
| 2,400
|
py
|
Python
|
prensilia/parloma_demo_test1.py
|
parloma/Prensilia
|
e0292834b82e7046e4333ecdb0be78502fdf6dab
|
[
"Apache-2.0"
] | 1
|
2015-01-12T03:08:25.000Z
|
2015-01-12T03:08:25.000Z
|
prensilia/parloma_demo_test1.py
|
parloma/Prensilia
|
e0292834b82e7046e4333ecdb0be78502fdf6dab
|
[
"Apache-2.0"
] | null | null | null |
prensilia/parloma_demo_test1.py
|
parloma/Prensilia
|
e0292834b82e7046e4333ecdb0be78502fdf6dab
|
[
"Apache-2.0"
] | null | null | null |
from cv2 import namedWindow, imshow, waitKey, cvtColor, COLOR_RGB2BGR
from hand_grabber import PyOpenNIHandGrabber
from pose_recognizer import PyPoseRecognizer
from my_fun import *
from sklearn.externals import joblib
from hand import *
import time
import socket
WIDTH = 640
HEIGHT = 480
USE_CPU = False
#clientsocket=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#clientsocket.connect(('10.10.0.1',8089))
if __name__=="__main__":
namedWindow("rgb")
namedWindow("prediction")
grabber = PyOpenNIHandGrabber()
streamWidth = WIDTH
streamHeight = HEIGHT
trainingSetSamplesWidth = 320
radius = 150
#hand = start_communication()
#open_hand(hand)
recog = PyPoseRecognizer(streamWidth, streamHeight,
"../forest-final.xml",
USE_CPU, trainingSetSamplesWidth)
clf = joblib.load('tree_Random_class_4.pkl')
print("Wave the hand in front of the sensor")
while True:
rgb, depth = grabber.grabFrames()
pos = grabber.getHand3DPos()
if pos[0] or pos[1] or pos[2]:
break
print("CTRL+C to exit")
firstPose = True
actual_sign = 'rest'
while True:
rgb, depth = grabber.grabFrames()
pos = grabber.getHand3DPos()
mask = grabber.segment(depth, pos, radius)
prediction = recog.predict(depth, mask)
joints = recog.getJoints(depth, mask)
sign = clf.predict([joints2dist(joints)])
print sign.tolist()[0]
#clientsocket.send(sign.tolist()[0])
if (sign.tolist()[0] != actual_sign):
if (sign.tolist()[0] == 'rest'):
#open_hand(hand)
actual_sign = sign.tolist()[0]
elif (sign.tolist()[0] == 'v'):
#perform_V(hand)
actual_sign = sign.tolist()[0]
elif (sign.tolist()[0] == 'w'):
#perform_W(hand)
actual_sign = sign.tolist()[0]
elif (sign.tolist()[0] == 's'):
#perform_S(hand)
actual_sign = sign.tolist()[0]
imshow("rgb", cvtColor(rgb, COLOR_RGB2BGR))
imshow("prediction", prediction)
waitKey(1)
if not pos[0] and not pos[1] and not pos[2]:
print ("Hand position lost. Exiting ...")
break
| 25.263158
| 69
| 0.575
|
ff1884108a577cb9c834a2191838169ebc03e0e8
| 8,467
|
py
|
Python
|
accelbyte_py_sdk/api/cloudsave/operations/public_player_record/delete_player_record_ha_3addde.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/cloudsave/operations/public_player_record/delete_player_record_ha_3addde.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/cloudsave/operations/public_player_record/delete_player_record_ha_3addde.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
# justice-cloudsave-service (3.0.1)
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from .....core import Operation
from .....core import HeaderStr
from .....core import HttpResponse
from ...models import ModelsResponseError
class DeletePlayerRecordHandlerV1(Operation):
"""Delete player record (deletePlayerRecordHandlerV1)
Required Permission | `NAMESPACE:{namespace}:USER:{userId}:CLOUDSAVE:RECORD [DELETE]`
--------------------|-----------------------------------------------------------------
Required Scope | `social`
Delete player record by its key.
Only user that own the player record could delete it.
Required Permission(s):
- NAMESPACE:{namespace}:USER:{userId}:CLOUDSAVE:RECORD [DELETE]
Required Scope(s):
- social
Properties:
url: /cloudsave/v1/namespaces/{namespace}/users/{userId}/records/{key}
method: DELETE
tags: ["PublicPlayerRecord"]
consumes: ["application/json"]
produces: ["application/json"]
securities: [BEARER_AUTH]
key: (key) REQUIRED str in path
namespace: (namespace) REQUIRED str in path
user_id: (userId) REQUIRED str in path
Responses:
204: No Content - (Record deleted)
400: Bad Request - ModelsResponseError (18201: invalid record operator, expect [%s] but actual [%s])
401: Unauthorized - ModelsResponseError (Unauthorized)
403: Forbidden - ModelsResponseError (18072: delete action is forbidden on other user's record)
500: Internal Server Error - ModelsResponseError (Internal Server Error)
"""
# region fields
_url: str = "/cloudsave/v1/namespaces/{namespace}/users/{userId}/records/{key}"
_method: str = "DELETE"
_consumes: List[str] = ["application/json"]
_produces: List[str] = ["application/json"]
_securities: List[List[str]] = [["BEARER_AUTH"]]
_location_query: str = None
key: str # REQUIRED in [path]
namespace: str # REQUIRED in [path]
user_id: str # REQUIRED in [path]
# endregion fields
# region properties
@property
def url(self) -> str:
return self._url
@property
def method(self) -> str:
return self._method
@property
def consumes(self) -> List[str]:
return self._consumes
@property
def produces(self) -> List[str]:
return self._produces
@property
def securities(self) -> List[List[str]]:
return self._securities
@property
def location_query(self) -> str:
return self._location_query
# endregion properties
# region get methods
# endregion get methods
# region get_x_params methods
def get_all_params(self) -> dict:
return {
"path": self.get_path_params(),
}
def get_path_params(self) -> dict:
result = {}
if hasattr(self, "key"):
result["key"] = self.key
if hasattr(self, "namespace"):
result["namespace"] = self.namespace
if hasattr(self, "user_id"):
result["userId"] = self.user_id
return result
# endregion get_x_params methods
# region is/has methods
# endregion is/has methods
# region with_x methods
def with_key(self, value: str) -> DeletePlayerRecordHandlerV1:
self.key = value
return self
def with_namespace(self, value: str) -> DeletePlayerRecordHandlerV1:
self.namespace = value
return self
def with_user_id(self, value: str) -> DeletePlayerRecordHandlerV1:
self.user_id = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "key") and self.key:
result["key"] = str(self.key)
elif include_empty:
result["key"] = ""
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = ""
if hasattr(self, "user_id") and self.user_id:
result["userId"] = str(self.user_id)
elif include_empty:
result["userId"] = ""
return result
# endregion to methods
# region response methods
# noinspection PyMethodMayBeStatic
def parse_response(self, code: int, content_type: str, content: Any) -> Tuple[None, Union[None, HttpResponse, ModelsResponseError]]:
"""Parse the given response.
204: No Content - (Record deleted)
400: Bad Request - ModelsResponseError (18201: invalid record operator, expect [%s] but actual [%s])
401: Unauthorized - ModelsResponseError (Unauthorized)
403: Forbidden - ModelsResponseError (18072: delete action is forbidden on other user's record)
500: Internal Server Error - ModelsResponseError (Internal Server Error)
---: HttpResponse (Undocumented Response)
---: HttpResponse (Unexpected Content-Type Error)
---: HttpResponse (Unhandled Error)
"""
pre_processed_response, error = self.pre_process_response(code=code, content_type=content_type, content=content)
if error is not None:
return None, None if error.is_no_content() else error
code, content_type, content = pre_processed_response
if code == 204:
return None, None
if code == 400:
return None, ModelsResponseError.create_from_dict(content)
if code == 401:
return None, ModelsResponseError.create_from_dict(content)
if code == 403:
return None, ModelsResponseError.create_from_dict(content)
if code == 500:
return None, ModelsResponseError.create_from_dict(content)
return None, self.handle_undocumented_response(code=code, content_type=content_type, content=content)
# endregion response methods
# region static methods
@classmethod
def create(
cls,
key: str,
namespace: str,
user_id: str,
) -> DeletePlayerRecordHandlerV1:
instance = cls()
instance.key = key
instance.namespace = namespace
instance.user_id = user_id
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> DeletePlayerRecordHandlerV1:
instance = cls()
if "key" in dict_ and dict_["key"] is not None:
instance.key = str(dict_["key"])
elif include_empty:
instance.key = ""
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = ""
if "userId" in dict_ and dict_["userId"] is not None:
instance.user_id = str(dict_["userId"])
elif include_empty:
instance.user_id = ""
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"key": "key",
"namespace": "namespace",
"userId": "user_id",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"key": True,
"namespace": True,
"userId": True,
}
# endregion static methods
| 29.918728
| 136
| 0.612614
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.