hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5428e994e57c10ab2c09314300ce7a8811d9aba0
| 1,013
|
py
|
Python
|
vega_datasets/tests/test_errors.py
|
datadesk/vega_datasets
|
89d990da5a8732bdcb553f170a0ec407652fa71b
|
[
"MIT"
] | null | null | null |
vega_datasets/tests/test_errors.py
|
datadesk/vega_datasets
|
89d990da5a8732bdcb553f170a0ec407652fa71b
|
[
"MIT"
] | null | null | null |
vega_datasets/tests/test_errors.py
|
datadesk/vega_datasets
|
89d990da5a8732bdcb553f170a0ec407652fa71b
|
[
"MIT"
] | null | null | null |
import pytest
from vega_datasets import data, local_data
from vega_datasets.core import Dataset
def test_undefined_dataset():
with pytest.raises(AttributeError) as err:
data('blahblahblah')
assert str(err.value) == "No dataset named 'blahblahblah'"
with pytest.raises(AttributeError) as err:
local_data('blahblahblah')
assert str(err.value) == "No dataset named 'blahblahblah'"
def test_undefined_infodict():
with pytest.raises(ValueError) as err:
info = Dataset._infodict('blahblahblah')
assert str(err.value).startswith('No such dataset blahblahblah exists')
@pytest.mark.parametrize('name', (set(Dataset.list_datasets()) -
set(Dataset.list_local_datasets())))
def test_local_dataset_error(name):
with pytest.raises(ValueError) as err:
data = local_data(name)
assert str(err.value).startswith("'{0}' dataset is not available locally"
"".format(name.replace('-', '_')))
| 34.931034
| 77
| 0.671273
|
e4e1ce16cc79bb0a0d6a875fdf22fffe7d5d1f1c
| 1,765
|
py
|
Python
|
bitey/cpu/instruction/dec.py
|
jgerrish/bitey
|
a393a83c19338d94116f3405f4b8a0f03ea84d79
|
[
"MIT"
] | null | null | null |
bitey/cpu/instruction/dec.py
|
jgerrish/bitey
|
a393a83c19338d94116f3405f4b8a0f03ea84d79
|
[
"MIT"
] | null | null | null |
bitey/cpu/instruction/dec.py
|
jgerrish/bitey
|
a393a83c19338d94116f3405f4b8a0f03ea84d79
|
[
"MIT"
] | null | null | null |
from bitey.cpu.instruction.instruction import (
Instruction,
IncompleteInstruction,
)
class DE(Instruction):
"Generic register decrement instruction"
def __init__(self, name, opcode, description, options, register):
"Initialize with the register"
super().__init__(name, opcode, description, options)
self.register = register
def instruction_execute(self, cpu, memory, value, address=None):
"""
Execute the instruction, decrementing the register by one.
"""
cpu.registers[self.register].dec()
self.set_flags(cpu.flags, cpu.registers)
def set_flags(self, flags, registers):
flags["Z"].test_register_result(registers[self.register])
class DEX(DE):
"DEX: Decrement Index X by One"
def __init__(self, name, opcode, description, options):
super().__init__(name, opcode, description, options, "X")
class DEY(DE):
"DEY: Decrement Index Y by One"
def __init__(self, name, opcode, description, options):
super().__init__(name, opcode, description, options, "Y")
class DEC(Instruction):
"Decrement Memory by One"
def instruction_execute(self, cpu, memory, value, address=None):
"""
Execute the instruction, incrementing the register by one.
"""
if value is not None:
value = value - 1
if value < 0:
value = 0xFF
memory.write(address, value)
self.set_flags(cpu.flags, cpu.registers, value)
else:
raise IncompleteInstruction
def set_flags(self, flags, registers, value):
# The zero flag is set when the value is zero, not necessarily on a wrap
if value == 0x00:
flags["Z"].set()
| 29.416667
| 80
| 0.633994
|
bd81afd164c45d9a7f6c90286c07e2ec8e061598
| 10,156
|
py
|
Python
|
toontown/cogdominium/CogdoMazeGameGuis.py
|
journeyfan/toontown-journey
|
7a4db507e5c1c38a014fc65588086d9655aaa5b4
|
[
"MIT"
] | 1
|
2020-09-27T22:12:47.000Z
|
2020-09-27T22:12:47.000Z
|
toontown/cogdominium/CogdoMazeGameGuis.py
|
journeyfan/toontown-journey
|
7a4db507e5c1c38a014fc65588086d9655aaa5b4
|
[
"MIT"
] | null | null | null |
toontown/cogdominium/CogdoMazeGameGuis.py
|
journeyfan/toontown-journey
|
7a4db507e5c1c38a014fc65588086d9655aaa5b4
|
[
"MIT"
] | 2
|
2020-09-26T20:37:18.000Z
|
2020-11-15T20:55:33.000Z
|
from direct.gui.DirectLabel import DirectLabel
from direct.gui.DirectGui import *
from direct.task.Task import Task
from direct.interval.MetaInterval import Sequence, Parallel
from direct.interval.FunctionInterval import Wait, Func
from pandac.PandaModules import TextNode, NodePath, Point3, CardMaker
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownIntervals
from toontown.minigame.MazeMapGui import MazeMapGui
from . import CogdoMazeGameGlobals as Globals
from . import CogdoUtil
class CogdoMazeMapGui(MazeMapGui):
def __init__(self, mazeCollTable):
MazeMapGui.__init__(self, mazeCollTable, bgColor=Globals.MapGuiBgColor, fgColor=Globals.MapGuiFgColor)
self._suit2marker = {}
self._initModel()
self.setPos(*Globals.MapGuiPos)
self.setScale(Globals.MapGuiScale)
self.reparentTo(base.a2dBottomRight)
def destroy(self):
for marker in list(self._suit2marker.values()):
marker.removeNode()
del self._suit2marker
self._entrance.removeNode()
del self._entrance
self._exit.removeNode()
del self._exit
del self._exitOpen
del self._exitClosed
self._suitMarkerTemplate.removeNode()
del self._suitMarkerTemplate
self._waterCoolerTemplate.removeNode()
del self._waterCoolerTemplate
MazeMapGui.destroy(self)
def _initModel(self):
baseName = '**/tt_t_gui_cmg_miniMap_'
cardModel = CogdoUtil.loadMazeModel('miniMap_cards', group='gui')
cm = CardMaker('bg')
cm.setFrame(-1.1, 1.1, -1.1, 1.1)
bg = self.attachNewNode(cm.generate())
bg.setColor(*self._bgColor)
bg.setBin('fixed', 0)
frame = cardModel.find(baseName + 'frame')
frame.reparentTo(self)
frame.setScale(2.5)
frame.setPos(0.01, 0, -0.01)
self._entrance = cardModel.find(baseName + 'entrance')
self._entrance.reparentTo(self)
self._entrance.setScale(0.35)
self._entrance.hide()
self._exit = NodePath('exit')
self._exit.setScale(0.35)
self._exit.reparentTo(self)
self._exitOpen = cardModel.find(baseName + 'exitOpen')
self._exitOpen.reparentTo(self._exit)
self._exitClosed = cardModel.find(baseName + 'exitClosed')
self._exitClosed.reparentTo(self._exit)
self._suitMarkerTemplate = cardModel.find(baseName + 'cogIcon')
self._suitMarkerTemplate.detachNode()
self._suitMarkerTemplate.setScale(0.225)
self._waterCoolerTemplate = cardModel.find(baseName + 'waterDrop')
self._waterCoolerTemplate.detachNode()
self._waterCoolerTemplate.setScale(0.225)
self._exit.hide()
cardModel.removeNode()
def addWaterCooler(self, tX, tY):
marker = NodePath('WaterCoolerMarker-%i-%i' % (tX, tY))
self._waterCoolerTemplate.copyTo(marker)
marker.reparentTo(self.maskedLayer)
x, y = self.tile2gui(tX, tY)
marker.setPos(*self.gui2pos(x, y))
def addSuit(self, suit):
marker = NodePath('SuitMarker-%i' % len(self._suit2marker))
self._suitMarkerTemplate.copyTo(marker)
marker.reparentTo(self)
self._suit2marker[suit] = marker
def removeSuit(self, suit):
self._suit2marker[suit].removeNode()
del self._suit2marker[suit]
def updateSuit(self, suit, tX, tY):
x, y = self.tile2gui(tX, tY)
self._suit2marker[suit].setPos(*self.gui2pos(x, y))
def showExit(self):
self._exit.show()
self._exitClosed.hide()
def hideExit(self):
self._exit.hide()
def placeExit(self, tX, tY):
x, y = self.tile2gui(tX, tY)
self._exit.setPos(*self.gui2pos(x, y))
self._exit.setZ(self._exit, 0.3)
def placeEntrance(self, tX, tY):
x, y = self.tile2gui(tX, tY)
self._entrance.setPos(*self.gui2pos(x, y))
self._entrance.setZ(self._entrance, -0.35)
self._entrance.show()
class CogdoMazeBossCodeFrame(DirectFrame):
def __init__(self, id, code, modelToCopy):
DirectFrame.__init__(self, relief=None, state=DGG.NORMAL, sortOrder=DGG.BACKGROUND_SORT_INDEX)
self._id = id
self._model = modelToCopy.copyTo(self)
self._model.setPos(0, 0, 0)
self._bg = self._model.find('**/bossBackground')
self._bossIcon = self._model.find('**/bossIcon')
self._bossIconX = self._model.find('**/bossIconX')
self._bossIconX.reparentTo(self._bossIcon)
self._bossIconX.hide()
self._bg.hide()
self._bossIcon.setBin('fixed', 2)
self._bg.setBin('fixed', 3)
self._label = DirectLabel(parent=self._bg, relief=None, scale=Globals.BossCodeFrameLabelScale, text=code, pos=(0, 0, -0.03), text_align=TextNode.ACenter, text_fg=Globals.BossCodeFrameLabelNormalColor, text_shadow=(0, 0, 0, 0), text_font=ToontownGlobals.getSuitFont())
return
def destroy(self):
ToontownIntervals.cleanup('boss_code%i' % self._id)
DirectFrame.destroy(self)
def showNumber(self):
self.setHit(False)
self._bossIconX.show()
ToontownIntervals.cleanup('boss_code%i' % self._id)
ToontownIntervals.start(Sequence(Parallel(ToontownIntervals.getPresentGuiIval(self._bossIcon, '', startPos=(0, 0, -0.15))), Wait(1.0), ToontownIntervals.getPulseLargerIval(self._bg, ''), name='boss_code%i' % self._id))
def setHit(self, hit):
if hit:
self._model.setColorScale(Globals.BlinkColor)
else:
self._model.setColorScale(1.0, 1.0, 1.0, 1.0)
def highlightNumber(self):
self._label['text_fg'] = Globals.BossCodeFrameLabelHighlightColor
class CogdoMazeBossGui(DirectFrame):
def __init__(self, code):
DirectFrame.__init__(self, relief=None, state=DGG.NORMAL, sortOrder=DGG.BACKGROUND_SORT_INDEX)
self._code = str(code)
self._codeLength = len(self._code)
self._markersShown = 0
self._markers = []
self._initModel()
self.setPos(*Globals.BossGuiPos)
self.setScale(Globals.BossGuiScale)
self.hide()
return
def destroy(self):
ToontownIntervals.cleanup('bosscodedoor')
self._model.removeNode()
del self._model
self._titleLabel.removeNode()
del self._titleLabel
for marker in self._markers:
marker.destroy()
del self._markers
DirectFrame.destroy(self)
def _initModel(self):
codeFrameGap = Globals.BossCodeFrameGap
codeFrameWidth = Globals.BossCodeFrameWidth
self._model = CogdoUtil.loadMazeModel('bossCog', group='gui')
self._model.reparentTo(self)
self._model.find('**/frame').setBin('fixed', 1)
titleLabelPos = self._model.find('**/title_label_loc').getPos()
self._titleLabel = DirectLabel(parent=self, relief=None, scale=Globals.BossGuiTitleLabelScale, text=TTLocalizer.CogdoMazeGameBossGuiTitle.upper(), pos=titleLabelPos, text_align=TextNode.ACenter, text_fg=(0, 0, 0, 1), text_shadow=(0, 0, 0, 0), text_font=ToontownGlobals.getSuitFont())
self._titleLabel.setBin('fixed', 1)
bossCard = self._model.find('**/bossCard')
self._openDoor = self._model.find('**/doorOpen')
self._closedDoor = self._model.find('**/doorClosed')
self._openDoor.stash()
spacingX = codeFrameWidth + codeFrameGap
startX = -0.5 * ((self._codeLength - 1) * spacingX - codeFrameGap)
for i in range(self._codeLength):
marker = CogdoMazeBossCodeFrame(i, self._code[i], bossCard)
marker.reparentTo(self)
marker.setPos(bossCard, startX + spacingX * i, 0, 0)
self._markers.append(marker)
bossCard.removeNode()
return
def showHit(self, bossIndex):
self._markers[bossIndex].setHit(True)
def showNumber(self, bossIndex):
self._markers[bossIndex].setHit(False)
self._markers[bossIndex].showNumber()
self._markersShown += 1
if self._markersShown == self._codeLength:
self._openDoor.unstash()
self._closedDoor.stash()
ToontownIntervals.start(ToontownIntervals.getPulseLargerIval(self._openDoor, 'bosscodedoor'))
class CogdoMazeHud:
def __init__(self):
self._update = None
self._initQuestArrow()
return
def _initQuestArrow(self):
matchingGameGui = loader.loadModel('phase_3.5/models/gui/matching_game_gui')
arrow = matchingGameGui.find('**/minnieArrow')
arrow.setScale(Globals.QuestArrowScale)
arrow.setColor(*Globals.QuestArrowColor)
arrow.setHpr(90, -90, 0)
self._questArrow = NodePath('Arrow')
arrow.reparentTo(self._questArrow)
self._questArrow.reparentTo(render)
self.hideQuestArrow()
matchingGameGui.removeNode()
def destroy(self):
self.__stopUpdateTask()
self._questArrow.removeNode()
self._questArrow = None
return
def showQuestArrow(self, parent, nodeToPoint, offset = Point3(0, 0, 0)):
self._questArrowNodeToPoint = nodeToPoint
self._questArrowParent = parent
self._questArrowOffset = offset
self._questArrow.unstash()
self._questArrowVisible = True
self.__startUpdateTask()
def hideQuestArrow(self):
self._questArrow.stash()
self.__stopUpdateTask()
self._questArrowVisible = False
self._questArrowNodeToPoint = None
return
def __startUpdateTask(self):
self.__stopUpdateTask()
self._update = taskMgr.add(self._updateTask, 'CogdoMazeHud_Update', 45)
def __stopUpdateTask(self):
if self._update is not None:
taskMgr.remove(self._update)
return
def _updateTask(self, task):
if self._questArrowVisible:
self._questArrow.setPos(self._questArrowParent, self._questArrowOffset)
self._questArrow.lookAt(self._questArrowNodeToPoint)
return Task.cont
| 37.895522
| 291
| 0.662466
|
b78bf3a2589851470fee11bdb8300852a533b84a
| 1,027
|
py
|
Python
|
ultros_site/routes/news_view.py
|
tsao-chi/Site
|
e3fc4574101b8cdacb2a28e54495da5376dd5396
|
[
"MIT",
"Artistic-2.0",
"BSD-3-Clause"
] | 2
|
2017-06-25T20:57:40.000Z
|
2017-11-27T15:13:35.000Z
|
ultros_site/routes/news_view.py
|
tsao-chi/Site
|
e3fc4574101b8cdacb2a28e54495da5376dd5396
|
[
"MIT",
"Artistic-2.0",
"BSD-3-Clause"
] | 5
|
2017-06-23T12:05:57.000Z
|
2021-06-30T05:46:44.000Z
|
ultros_site/routes/news_view.py
|
tsao-chi/Site
|
e3fc4574101b8cdacb2a28e54495da5376dd5396
|
[
"MIT",
"Artistic-2.0",
"BSD-3-Clause"
] | 3
|
2018-01-08T04:57:12.000Z
|
2020-01-22T08:03:56.000Z
|
# coding=utf-8
import re
from falcon.errors import HTTPNotFound
from sqlalchemy.orm.exc import NoResultFound
from ultros_site.base_sink import BaseSink
from ultros_site.database.schema.news_post import NewsPost
from ultros_site.markdown import Markdown
__author__ = "Gareth Coles"
class NewsViewRoute(BaseSink):
route = re.compile(r"/news/(?P<post_id>\d+)")
def __call__(self, req, resp, post_id):
db_session = req.context["db_session"]
news_posts = db_session.query(NewsPost).filter_by(published=True).order_by(NewsPost.posted.desc())[0:3]
try:
news_post = db_session.query(NewsPost).filter_by(id=post_id, published=True).one()
except NoResultFound:
raise HTTPNotFound()
if news_post.summary is None:
markdown = Markdown(news_post.markdown)
news_post.summary = markdown.summary
self.render_template(
req, resp, "news_view.html",
post=news_post,
news_posts=news_posts
)
| 29.342857
| 111
| 0.681597
|
7f1d20ab240eeca067614c3df366d983ba7b220d
| 11,634
|
py
|
Python
|
anima/rig/utilityFuncs.py
|
MehmetErer/anima
|
f92ae599b5a4c181fc8e131a9ccdde537e635303
|
[
"MIT"
] | 101
|
2015-02-08T22:20:11.000Z
|
2022-03-21T18:56:42.000Z
|
anima/rig/utilityFuncs.py
|
Khosiyat/anima
|
f631c08400547f49ac5f1feeb730f22c255eb771
|
[
"MIT"
] | 23
|
2016-11-30T08:33:21.000Z
|
2021-01-26T12:11:12.000Z
|
anima/rig/utilityFuncs.py
|
Khosiyat/anima
|
f631c08400547f49ac5f1feeb730f22c255eb771
|
[
"MIT"
] | 27
|
2015-01-03T06:49:45.000Z
|
2021-12-28T03:30:54.000Z
|
# -*- coding: utf-8 -*-
import pymel.core as pm
from pymel.all import mel
class UtilityFuncs():
#selects the hiearachy
@staticmethod
def selHierarchy(root):
pm.select(root, hi=1)
return pm.ls(sl = 1)
@staticmethod
def renameHierarchy(hierarchy, name):
#rename the hiearachy
for s in hierarchy:
pm.rename(s, (name + "#"))
return hierarchy
@staticmethod
def duplicateObject(object):
#duplicate the object
dup = pm.duplicate(object)
return dup[0]
@staticmethod
def typeCheck(instanceName, className) :
if not isinstance(instanceName, (className)):
raise TypeError("%s should be an instance of %s",
(instanceName, className))
@staticmethod
def evaluate(command):
#evaluates the given string and return a list
return eval(command)
@staticmethod
def connect(sourceObj, sourceAttr, destObj, destAttr):
source = sourceObj + "." + sourceAttr
dest = destObj + "." + destAttr
pm.connectAttr(source, dest)
@staticmethod
def rename_byType(nodes):
temp_list = []
for nd in nodes:
temp_name = nd + pm.nodeType(nd)
temp_list.append(temp_name)
return temp_list
@staticmethod
def rename(object, name_in):
return (pm.rename(object, name_in))
@staticmethod
def position(object):
return pm.xform(object, q = 1, ws = 1, t = 1)
#controllers Shape Dictionary
ctrlShapes = {"circle" : ("pm.delete((pm.circle( nr=(0, 1, 0), c=(0, 0, 0), sw=360, r=1)), ch = 1)"),
"arrowCtrl" : ("""pm.curve(per=True, d = 1, p = [ ( -1, -0.00207849, 0 ), ( 1, -0.00207849, 0 ),
( 1, 2.997922, 0 ),( 2, 2.997922, 0 ), ( 0, 4.997922, 0 ), ( -2, 2.997922, 0 ),
( -1, 2.997922, 0 ), ( -1, -0.00207849, 0 ) ],
k = ([0 , 1 , 2 , 3 , 4 , 5 , 6 , 7]))"""),
"fourSidedArrowCtrl" : ("""pm.curve(per=True, d = 1, p = [(-0.31907, 1.758567, 0),
(-0.31907, 0.272474, 0), (-1.758567, 0.272474, 0) ,
(-1.758567, 1.172378, 0), (-2.930946, 0, 0 ), ( -1.758567, -1.172378, 0 ),
( -1.758567, -0.272474, 0 ),( -0.31907, -0.272474, 0 ), ( -0.31907, -1.758567, 0 ),
( -1.172378, -1.758567, 0 ), ( 0, -2.930946, 0 ), ( 1.172378, -1.758567, 0 ),
( 0.31907, -1.758567, 0 ),( 0.31907, -0.272474, 0 ),( 1.758567, -0.272474, 0 ),
( 1.758567, -1.172378, 0 ), ( 2.930946, 0, 0 ), ( 1.758567, 1.172378, 0 ),
( 1.7585607, 0.272474, 0 ), ( 0.31907, 0.272474, 0 ), ( 0.31907, 1.758567, 0 ),
( 1.172378, 1.758567, 0 ), ( 0, 2.930946, 0 ),( -1.172378, 1.758567, 0 ),
( -0.31907, 1.758567, 0) ],
k = ([0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 ,
17 , 18 , 19 , 20 , 21 , 22 , 23 , 24]))"""),
"ikCtrl" : ("""pm.curve(per=True, d = 1, p = [ ( 0.552734, 0, -0.138183), ( 0.552734, 0, -0.184245),
( 0.552734, 0, -0.230306),
( 0.552734, 0, -0.276367), ( 0.644856, 0, -0.184245), ( 0.736978, 0, -0.0921223),
( 0.829101, 0, 0), ( 0.736978, 0, 0.0921223), ( 0.644856, 0, 0.184245),
( 0.552734, 0, 0.276367), ( 0.552734, 0, 0.230306), ( 0.552734, 0, 0.184245),
( 0.552734, 0, 0.138183), ( 0.517927, 0, 0.138183), ( 0.48312, 0, 0.138183),
( 0.448313, 0, 0.138183), ( 0.444285, 0, 0.150144), ( 0.436622, 0, 0.170644),
( 0.419439, 0, 0.209124), ( 0.402845, 0, 0.239713), ( 0.386952, 0, 0.264852),
( 0.371754, 0, 0.286013), ( 0.359029, 0, 0.301972), ( 0.342183, 0, 0.321041),
( 0.32585, 0, 0.337618), ( 0.305397, 0, 0.356146), ( 0.290641, 0, 0.368196),
( 0.270877, 0, 0.382837), ( 0.256838, 0, 0.392304), ( 0.233632, 0, 0.406427),
( 0.208595, 0, 0.419739), ( 0.181267, 0, 0.432208), ( 0.158735, 0, 0.440999),
( 0.138233, 0, 0.447895), ( 0.138183, 0, 0.481828), ( 0.138183, 0, 0.517281),
( 0.138183, 0, 0.552734), ( 0.184245, 0, 0.552734), ( 0.230306, 0, 0.552734),
( 0.276367, 0, 0.552734), ( 0.184245, 0, 0.644856), ( 0.0921223, 0, 0.736978),
( 0, 0, 0.829101), ( -0.0921223, 0, 0.736978), ( -0.184245, 0, 0.644856),
( -0.276367, 0, 0.552734), ( -0.230306, 0, 0.552734), ( -0.184245, 0, 0.552734),
( -0.138183, 0, 0.552734), ( -0.138183, 0, 0.517349), ( -0.138183, 0, 0.481964),
( -0.138183, 0, 0.446579), ( -0.157573, 0, 0.440389), ( -0.195184, 0, 0.425554),
( -0.226251, 0, 0.41026), ( -0.261537, 0, 0.389117), ( -0.287101, 0, 0.37091),
( -0.313357, 0, 0.349202), ( -0.327368, 0, 0.336149), ( -0.344095, 0, 0.318984),
( -0.366533, 0, 0.292752), ( -0.382675, 0, 0.271108), ( -0.404132, 0, 0.237612),
( -0.417852, 0, 0.212369), ( -0.431433, 0, 0.183106), ( -0.441634, 0, 0.156968),
( -0.449357, 0, 0.133453), ( -0.464563, 0, 0.135341), ( -0.489623, 0, 0.137181),
( -0.509494, 0, 0.137868), ( -0.526834, 0, 0.138116), ( -0.542441, 0, 0.138179),
( -0.552734, 0, 0.138183), ( -0.552734, 0, 0.184245), ( -0.552734, 0, 0.230306),
( -0.552734, 0, 0.276367), ( -0.644856, 0, 0.184245), ( -0.736978, 0, 0.0921223),
( -0.829101, 0, 0), ( -0.736978, 0, -0.0921223), ( -0.644856, 0, -0.184245),
( -0.552734, 0, -0.276367), ( -0.552734, 0, -0.230306), ( -0.552734, 0, -0.184245),
( -0.552734, 0, -0.138183), ( -0.518383, 0, -0.138183), ( -0.484033, 0, -0.138183),
( -0.448148, 0, -0.137417), ( -0.438965, 0, -0.164253), ( -0.430847, 0, -0.184482),
( -0.420951, 0, -0.206126), ( -0.412191, 0, -0.223225), ( -0.395996, 0, -0.251053),
( -0.388009, 0, -0.263343), ( -0.36993, 0, -0.288412), ( -0.352908, 0, -0.309157),
( -0.331158, 0, -0.33242), ( -0.311574, 0, -0.350787), ( -0.287785, 0, -0.370404),
( -0.266573, 0, -0.385789), ( -0.242718, 0, -0.401044), ( -0.216381, 0, -0.41566),
( -0.190836, 0, -0.427831), ( -0.163247, 0, -0.438946), ( -0.149238, 0, -0.443829),
( -0.138183, 0, -0.447335), ( -0.138183, 0, -0.482468), ( -0.138183, 0, -0.517601),
( -0.138183, 0, -0.552734), ( -0.184245, 0, -0.552734), ( -0.230306, 0, -0.552734),
( -0.276367, 0, -0.552734), ( -0.184245, 0, -0.644856), ( -0.0921223, 0, -0.736978),
( 0, 0, -0.829101), ( 0.0921223, 0, -0.736978), ( 0.184245, 0, -0.644856),
( 0.276367, 0, -0.552734), ( 0.230306, 0, -0.552734), ( 0.184245, 0, -0.552734),
( 0.138183, 0, -0.552734), ( 0.138183, 0, -0.517258), ( 0.138183, 0, -0.481783),
( 0.138183, 0, -0.446308), ( 0.168167, 0, -0.436473), ( 0.190718, 0, -0.427463),
( 0.207556, 0, -0.419785), ( 0.22845, 0, -0.409061), ( 0.259644, 0, -0.39037),
( 0.28708, 0, -0.37093), ( 0.309495, 0, -0.352609), ( 0.341156, 0, -0.322135),
( 0.358246, 0, -0.302914), ( 0.375889, 0, -0.280529), ( 0.387391, 0, -0.26426),
( 0.402652, 0, -0.240132), ( 0.411495, 0, -0.224515), ( 0.423963, 0, -0.199829),
( 0.430266, 0, -0.185834), ( 0.437317, 0, -0.16858), ( 0.444059, 0, -0.150009),
( 0.447312, 0, -0.14009), ( 0.480289, 0, -0.138183), ( 0.516511, 0, -0.138183),
( 0.552734, 0, -0.138183) ] ,
k = ( [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
137, 138, 139, 140, 141, 142, 143, 144] ))"""),
"bodyCtrl" : ("""pm.curve(per=True, d = 1, p = [( -1, 0, 1), ( -1, 0, -1), ( 1, 0, -1), ( 1, 0, 1),
( -1, 0, 1) ] , k = [0, 1, 2, 3, 4 ] )"""),
"elbowCtrl" : ("""pm.curve(d = 3, p = [ ( 0, -0.0728115, -0.263333), ( 0, 0.0676745, -0.30954),
( 0, 0.166422, -0.162811),( 0, 0.316242, 0.066353), ( 0, 0.263828, 0.160055),
( 0, 0.0048945, 0.30954), ( 0, -0.117923, 0.298165), ( 0, -0.316242, 0.027507),
( 0, -0.265623, -0.052244), ( 0, -0.0394945, -0.211749), ( 0, 0.190873, 0.097192),
( 0, -0.139762, 0.142256), ( 0, -0.0829025, 0.013979), ( 0, -0.0666985, -0.054076),
( 0, -0.0205975, 0.039797) ],
k = [0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 12, 12] )""")
}
| 66.48
| 124
| 0.372271
|
a82e1f47cd99904ce91095f8a47d678591b71b20
| 1,020
|
py
|
Python
|
sample/web/app/databases.py
|
hdknr/django-mautic
|
aa2e5304936541c9266215df00469576142ee906
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2021-02-20T08:48:05.000Z
|
2021-02-20T08:48:05.000Z
|
sample/web/app/databases.py
|
hdknr/django-mautic
|
aa2e5304936541c9266215df00469576142ee906
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
sample/web/app/databases.py
|
hdknr/django-mautic
|
aa2e5304936541c9266215df00469576142ee906
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
''' DatabaseRouter
'''
class DatabaseRouter(object):
_maps = {
'mautic': {
'apps': ['mautic'],
},
}
def get_database(self, model, **hints):
for db, conf in self._maps.items():
if model._meta.app_label in conf['apps']:
return db
return 'default'
def db_for_read(self, model, **hints):
return self.get_database(model, **hints)
def db_for_write(self, model, **hints):
return self.get_database(model, **hints)
def allow_migrate(self, db, app_label, model_name=None, **hints):
for db, conf in self._maps.items():
if app_label in conf['apps']:
return db
return 'default'
def allow_relation(self, obj1, obj2, **hints):
db1, db2 = [self.get_database(o) for o in [obj1, obj2]]
if db1 == db2 and db1:
return db1
return None
@classmethod
def router(cls):
return "{0}.{1}".format(cls.__module__, cls.__name__)
| 24.878049
| 69
| 0.557843
|
c32db957456b92631f4b713bc8ac53548a179f3d
| 367
|
py
|
Python
|
performStringShifts.py
|
Dhawal-Modi/30_Days_Of_LeetCode
|
472d36dec04576e1ecd4aa7f7609464c99e2f6ba
|
[
"CNRI-Python"
] | null | null | null |
performStringShifts.py
|
Dhawal-Modi/30_Days_Of_LeetCode
|
472d36dec04576e1ecd4aa7f7609464c99e2f6ba
|
[
"CNRI-Python"
] | null | null | null |
performStringShifts.py
|
Dhawal-Modi/30_Days_Of_LeetCode
|
472d36dec04576e1ecd4aa7f7609464c99e2f6ba
|
[
"CNRI-Python"
] | null | null | null |
class Solution:
def stringShift(self, s: str, shift: List[List[int]]) -> str:
move_l = 0
length = len(s)
for direction,amount in shift:
if direction == 0:
move_l = move_l + amount
else:
move_l = move_l - amount
move_l = move_l % length
return s[move_l:] + s[:move_l]
| 30.583333
| 65
| 0.506812
|
fdb29efb431d47920d61ea375cf064d7ca0c5bfd
| 3,235
|
py
|
Python
|
profiles_project/settings.py
|
sindylpy/profiles-rest-api
|
72516064d9ad16fdafcf893ab6343475e6ecdc27
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
sindylpy/profiles-rest-api
|
72516064d9ad16fdafcf893ab6343475e6ecdc27
|
[
"MIT"
] | 5
|
2021-03-19T11:38:44.000Z
|
2022-02-10T11:42:58.000Z
|
profiles_project/settings.py
|
sindylpy/profiles-rest-api
|
72516064d9ad16fdafcf893ab6343475e6ecdc27
|
[
"MIT"
] | null | null | null |
"""
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%7%p7t&&tljbdv5@8_v#t@&^2$0vwbw9b%n91o_6&czgb5)x5%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL ='profiles_api.UserProfile'
| 25.674603
| 91
| 0.699536
|
0614995142f3a41d0af3cd8cc070c5a2563e0b12
| 1,744
|
py
|
Python
|
tools/patch.py
|
mheinzler/z-sublime-plugins
|
7ab235c025003206c3250d1de9907081eccb2dfb
|
[
"MIT"
] | null | null | null |
tools/patch.py
|
mheinzler/z-sublime-plugins
|
7ab235c025003206c3250d1de9907081eccb2dfb
|
[
"MIT"
] | null | null | null |
tools/patch.py
|
mheinzler/z-sublime-plugins
|
7ab235c025003206c3250d1de9907081eccb2dfb
|
[
"MIT"
] | null | null | null |
"""Patch functions with a replacement."""
from collections import namedtuple
# a patch object containing information about a replaced function
Patch = namedtuple('Patch', [
'cls',
'name',
'original',
'replacement'
])
# the list of patched functions (do not overwrite the previous patches when the
# module is reloaded)
try:
patches # noqa
except NameError:
patches = {}
def patch(cls):
"""Patch a class method with a replacement function."""
def decorator(replacement):
# make sure there is an entry for the replacement's module
module = replacement.__module__
if module not in patches:
patches[module] = []
patch = None
# wrap the replacement to also pass the patch object
def replacement_wrapper(*args, **kwargs):
return replacement(patch, *args, **kwargs)
# store the information about this patch
name = replacement.__name__
patch = Patch(
cls=cls,
name=name,
original=getattr(cls, name),
replacement=replacement_wrapper
)
patches[module].append(patch)
# return the function unchanged
return replacement
return decorator
def apply_patches(module):
"""Apply all patches registered by a module."""
if module in patches:
for patch in patches[module]:
setattr(patch.cls, patch.name, patch.replacement)
def restore_patches(module):
"""Restore all original functions patched by a module."""
if module in patches:
for patch in patches[module]:
setattr(patch.cls, patch.name, patch.original)
# remove all patches for this module
del patches[module]
| 26.029851
| 79
| 0.638188
|
e31273a31b362b993c01c799283e5b2ad7889958
| 184
|
py
|
Python
|
w2020/w10/w10_9/app/apps/main.py
|
abrance/mine
|
d4067bf6fb158ebaea3eb7a516ae372dcb8cf419
|
[
"MIT"
] | null | null | null |
w2020/w10/w10_9/app/apps/main.py
|
abrance/mine
|
d4067bf6fb158ebaea3eb7a516ae372dcb8cf419
|
[
"MIT"
] | null | null | null |
w2020/w10/w10_9/app/apps/main.py
|
abrance/mine
|
d4067bf6fb158ebaea3eb7a516ae372dcb8cf419
|
[
"MIT"
] | null | null | null |
from flask import Flask
app = Flask(__name__)
@app.route("/")
def index():
return "<h1> index </h1>"
def run():
app.run(debug=True)
if __name__ == "__main__":
run()
| 10.823529
| 29
| 0.592391
|
43e3a67c69e530a1ba128045bd80e196783a5b90
| 320
|
py
|
Python
|
DataTransformation.py
|
rohandatta/Web-App
|
20a22f93932550c76306ee933bed34c770ae450c
|
[
"BSD-2-Clause"
] | null | null | null |
DataTransformation.py
|
rohandatta/Web-App
|
20a22f93932550c76306ee933bed34c770ae450c
|
[
"BSD-2-Clause"
] | null | null | null |
DataTransformation.py
|
rohandatta/Web-App
|
20a22f93932550c76306ee933bed34c770ae450c
|
[
"BSD-2-Clause"
] | null | null | null |
import pandas as pd
from sklearn.preprocessing import StandardScaler
def scaler(X_test):
df = pd.read_csv('C:/Users/acer/Desktop/Web App/data/heart.csv')
X = df.iloc[:,:-1]
scaler = StandardScaler()
X_scale = scaler.fit_transform(X)
X_test_scale = scaler.transform(X_test)
return X_test
| 21.333333
| 68
| 0.69375
|
2295e43ee6b4221d86ce9b821cb1117aeaa81a26
| 22,262
|
py
|
Python
|
blink/main_dense.py
|
JimXiongGM/BLINK
|
4668bf0f1d25d9f3db4aae5fda0e8d00c7275a4d
|
[
"MIT"
] | null | null | null |
blink/main_dense.py
|
JimXiongGM/BLINK
|
4668bf0f1d25d9f3db4aae5fda0e8d00c7275a4d
|
[
"MIT"
] | null | null | null |
blink/main_dense.py
|
JimXiongGM/BLINK
|
4668bf0f1d25d9f3db4aae5fda0e8d00c7275a4d
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import argparse
import json
import sys
from tqdm import tqdm
import logging
import torch
import numpy as np
from colorama import init
from termcolor import colored
import ner as NER
from torch.utils.data import DataLoader, SequentialSampler, TensorDataset
from biencoder.biencoder import BiEncoderRanker, load_biencoder
from crossencoder.crossencoder import CrossEncoderRanker, load_crossencoder
from biencoder.data_process import (
process_mention_data,
get_candidate_representation,
)
import candidate_ranking.utils as utils
from crossencoder.train_cross import modify, evaluate
from crossencoder.data_process import prepare_crossencoder_data
from indexer.faiss_indexer import DenseFlatIndexer, DenseHNSWFlatIndexer
HIGHLIGHTS = [
"on_red",
"on_green",
"on_yellow",
"on_blue",
"on_magenta",
"on_cyan",
]
def _print_colorful_text(input_sentence, samples):
init() # colorful output
msg = ""
if samples and (len(samples) > 0):
msg += input_sentence[0 : int(samples[0]["start_pos"])]
for idx, sample in enumerate(samples):
msg += colored(
input_sentence[int(sample["start_pos"]) : int(sample["end_pos"])],
"grey",
HIGHLIGHTS[idx % len(HIGHLIGHTS)],
)
if idx < len(samples) - 1:
msg += input_sentence[
int(sample["end_pos"]) : int(samples[idx + 1]["start_pos"])
]
else:
msg += input_sentence[int(sample["end_pos"]) :]
else:
msg = input_sentence
print("Failed to identify entity from text:")
print("\n" + str(msg) + "\n")
def _print_colorful_prediction(
idx, sample, e_id, e_title, e_text, e_url, show_url=False
):
print(colored(sample["mention"], "grey", HIGHLIGHTS[idx % len(HIGHLIGHTS)]))
to_print = "id:{}\ntitle:{}\ntext:{}\n".format(e_id, e_title, e_text[:256])
if show_url:
to_print += "url:{}\n".format(e_url)
print(to_print)
def _annotate(ner_model, input_sentences):
ner_output_data = ner_model.predict(input_sentences)
sentences = ner_output_data["sentences"]
mentions = ner_output_data["mentions"]
samples = []
for mention in mentions:
record = {}
record["label"] = "unknown"
record["label_id"] = -1
# LOWERCASE EVERYTHING !
record["context_left"] = sentences[mention["sent_idx"]][
: mention["start_pos"]
].lower()
record["context_right"] = sentences[mention["sent_idx"]][
mention["end_pos"] :
].lower()
record["mention"] = mention["text"].lower()
record["start_pos"] = int(mention["start_pos"])
record["end_pos"] = int(mention["end_pos"])
record["sent_idx"] = mention["sent_idx"]
samples.append(record)
return samples
def _load_candidates(
entity_catalogue, entity_encoding, faiss_index=None, index_path=None, logger=None
):
# only load candidate encoding if not using faiss index
if faiss_index is None:
candidate_encoding = torch.load(entity_encoding)
indexer = None
else:
if logger:
logger.info("Using faiss index to retrieve entities.")
candidate_encoding = None
assert index_path is not None, "Error! Empty indexer path."
if faiss_index == "flat":
indexer = DenseFlatIndexer(1)
elif faiss_index == "hnsw":
indexer = DenseHNSWFlatIndexer(1)
else:
raise ValueError("Error! Unsupported indexer type! Choose from flat,hnsw.")
indexer.deserialize_from(index_path)
# load all the 5903527 entities
title2id = {}
id2title = {}
id2text = {}
wikipedia_id2local_id = {}
local_idx = 0
with open(entity_catalogue, "r") as fin:
lines = fin.readlines()
for line in lines:
entity = json.loads(line)
if "idx" in entity:
split = entity["idx"].split("curid=")
if len(split) > 1:
wikipedia_id = int(split[-1].strip())
else:
wikipedia_id = entity["idx"].strip()
assert wikipedia_id not in wikipedia_id2local_id
wikipedia_id2local_id[wikipedia_id] = local_idx
title2id[entity["title"]] = local_idx
id2title[local_idx] = entity["title"]
id2text[local_idx] = entity["text"]
local_idx += 1
return (
candidate_encoding,
title2id,
id2title,
id2text,
wikipedia_id2local_id,
indexer,
)
def __map_test_entities(test_entities_path, title2id, logger):
# load the 732859 tac_kbp_ref_know_base entities
kb2id = {}
missing_pages = 0
n = 0
with open(test_entities_path, "r") as fin:
lines = fin.readlines()
for line in lines:
entity = json.loads(line)
if entity["title"] not in title2id:
missing_pages += 1
else:
kb2id[entity["entity_id"]] = title2id[entity["title"]]
n += 1
if logger:
logger.info("missing {}/{} pages".format(missing_pages, n))
return kb2id
def __load_test(test_filename, kb2id, wikipedia_id2local_id, logger):
test_samples = []
with open(test_filename, "r") as fin:
lines = fin.readlines()
for line in lines:
record = json.loads(line)
record["label"] = str(record["label_id"])
# for tac kbp we should use a separate knowledge source to get the entity id (label_id)
if kb2id and len(kb2id) > 0:
if record["label"] in kb2id:
record["label_id"] = kb2id[record["label"]]
else:
continue
# check that each entity id (label_id) is in the entity collection
elif wikipedia_id2local_id and len(wikipedia_id2local_id) > 0:
try:
key = int(record["label"].strip())
if key in wikipedia_id2local_id:
record["label_id"] = wikipedia_id2local_id[key]
else:
continue
except:
continue
# LOWERCASE EVERYTHING !
record["context_left"] = record["context_left"].lower()
record["context_right"] = record["context_right"].lower()
record["mention"] = record["mention"].lower()
test_samples.append(record)
if logger:
logger.info("{}/{} samples considered".format(len(test_samples), len(lines)))
return test_samples
def _get_test_samples(
test_filename, test_entities_path, title2id, wikipedia_id2local_id, logger
):
kb2id = None
if test_entities_path:
kb2id = __map_test_entities(test_entities_path, title2id, logger)
test_samples = __load_test(test_filename, kb2id, wikipedia_id2local_id, logger)
return test_samples
def _process_biencoder_dataloader(samples, tokenizer, biencoder_params):
_, tensor_data = process_mention_data(
samples,
tokenizer,
biencoder_params["max_context_length"],
biencoder_params["max_cand_length"],
silent=True,
logger=None,
debug=biencoder_params["debug"],
)
sampler = SequentialSampler(tensor_data)
dataloader = DataLoader(
tensor_data, sampler=sampler, batch_size=biencoder_params["eval_batch_size"]
)
return dataloader
def _run_biencoder(biencoder, dataloader, candidate_encoding, top_k=100, indexer=None):
biencoder.model.eval()
labels = []
nns = []
all_scores = []
for batch in tqdm(dataloader):
context_input, _, label_ids = batch
with torch.no_grad():
if indexer is not None:
context_encoding = biencoder.encode_context(context_input).numpy()
context_encoding = np.ascontiguousarray(context_encoding)
scores, indicies = indexer.search_knn(context_encoding, top_k)
else:
scores = biencoder.score_candidate(
context_input, None, cand_encs=candidate_encoding # .to(device)
)
scores, indicies = scores.topk(top_k)
scores = scores.data.numpy()
indicies = indicies.data.numpy()
labels.extend(label_ids.data.numpy())
nns.extend(indicies)
all_scores.extend(scores)
return labels, nns, all_scores
def _process_crossencoder_dataloader(context_input, label_input, crossencoder_params):
tensor_data = TensorDataset(context_input, label_input)
sampler = SequentialSampler(tensor_data)
dataloader = DataLoader(
tensor_data, sampler=sampler, batch_size=crossencoder_params["eval_batch_size"]
)
return dataloader
def _run_crossencoder(crossencoder, dataloader, logger, context_len, device="cuda"):
crossencoder.model.eval()
accuracy = 0.0
crossencoder.to(device)
res = evaluate(
crossencoder,
dataloader,
device,
logger,
context_len,
zeshel=False,
silent=False,
)
accuracy = res["normalized_accuracy"]
logits = res["logits"]
if accuracy > -1:
predictions = np.argsort(logits, axis=1)
else:
predictions = []
return accuracy, predictions, logits
def load_models(args, logger=None):
# load biencoder model
if logger:
logger.info("loading biencoder model")
with open(args.biencoder_config) as json_file:
biencoder_params = json.load(json_file)
biencoder_params['bert_model'] = "pretrained_models/bert-large-uncased"
biencoder_params["path_to_model"] = args.biencoder_model
biencoder = load_biencoder(biencoder_params)
crossencoder = None
crossencoder_params = None
if not args.fast:
# load crossencoder model
if logger:
logger.info("loading crossencoder model")
with open(args.crossencoder_config) as json_file:
crossencoder_params = json.load(json_file)
crossencoder_params['bert_model'] = "pretrained_models/bert-large-uncased"
crossencoder_params["path_to_model"] = args.crossencoder_model
crossencoder = load_crossencoder(crossencoder_params) # 又是一个bert large
# load candidate entities
if logger:
logger.info("loading candidate entities")
(
candidate_encoding,
title2id,
id2title,
id2text,
wikipedia_id2local_id,
faiss_indexer,
) = _load_candidates(
args.entity_catalogue,
args.entity_encoding,
faiss_index=getattr(args, "faiss_index", None),
index_path=getattr(args, "index_path", None),
logger=logger,
)
return (
biencoder,
biencoder_params,
crossencoder,
crossencoder_params,
candidate_encoding,
title2id,
id2title,
id2text,
wikipedia_id2local_id,
faiss_indexer,
)
def run(
args,
logger,
biencoder,
biencoder_params,
crossencoder,
crossencoder_params,
candidate_encoding,
title2id,
id2title,
id2text,
wikipedia_id2local_id,
faiss_indexer=None,
test_data=None,
):
if not test_data and not args.test_mentions and not args.interactive:
msg = (
"ERROR: either you start BLINK with the "
"interactive option (-i) or you pass in input test mentions (--test_mentions)"
"and test entitied (--test_entities)"
)
raise ValueError(msg)
id2url = {
v: "https://en.wikipedia.org/wiki?curid=%s" % k
for k, v in wikipedia_id2local_id.items()
}
stopping_condition = False
while not stopping_condition:
samples = None
if args.interactive:
logger.info("interactive mode")
# biencoder_params["eval_batch_size"] = 1
# Load NER model
ner_model = NER.get_model()
# Interactive
text = input("insert text:")
# Identify mentions
samples = _annotate(ner_model, [text])
_print_colorful_text(text, samples)
else:
if logger:
logger.info("test dataset mode")
if test_data:
samples = test_data
else:
# Load test mentions
samples = _get_test_samples(
args.test_mentions,
args.test_entities,
title2id,
wikipedia_id2local_id,
logger,
)
stopping_condition = True
# don't look at labels
keep_all = (
args.interactive
or samples[0]["label"] == "unknown"
or samples[0]["label_id"] < 0
)
# prepare the data for biencoder
if logger:
logger.info("preparing data for biencoder")
dataloader = _process_biencoder_dataloader(
samples, biencoder.tokenizer, biencoder_params
)
# run biencoder
if logger:
logger.info("run biencoder")
top_k = args.top_k
labels, nns, scores = _run_biencoder(
biencoder, dataloader, candidate_encoding, top_k, faiss_indexer
)
if args.interactive:
print("\nfast (biencoder) predictions:")
_print_colorful_text(text, samples)
# print biencoder prediction
idx = 0
for entity_list, sample in zip(nns, samples):
e_id = entity_list[0]
e_title = id2title[e_id]
e_text = id2text[e_id]
e_url = id2url[e_id]
_print_colorful_prediction(
idx, sample, e_id, e_title, e_text, e_url, args.show_url
)
idx += 1
print()
if args.fast:
# use only biencoder
continue
else:
biencoder_accuracy = -1
recall_at = -1
if not keep_all:
# get recall values
top_k = args.top_k
x = []
y = []
for i in range(1, top_k):
temp_y = 0.0
for label, top in zip(labels, nns):
if label in top[:i]:
temp_y += 1
if len(labels) > 0:
temp_y /= len(labels)
x.append(i)
y.append(temp_y)
# plt.plot(x, y)
biencoder_accuracy = y[0]
recall_at = y[-1]
print("biencoder accuracy: %.4f" % biencoder_accuracy)
print("biencoder recall@%d: %.4f" % (top_k, y[-1]))
if args.fast:
predictions = []
for entity_list in nns:
sample_prediction = []
for e_id in entity_list:
e_title = id2title[e_id]
sample_prediction.append(e_title)
predictions.append(sample_prediction)
# use only biencoder
return (
biencoder_accuracy,
recall_at,
-1,
-1,
len(samples),
predictions,
scores,
)
# prepare crossencoder data
context_input, candidate_input, label_input = prepare_crossencoder_data(
crossencoder.tokenizer,
samples,
labels,
nns,
id2title,
id2text,
keep_all,
)
context_input = modify(
context_input, candidate_input, crossencoder_params["max_seq_length"]
)
dataloader = _process_crossencoder_dataloader(
context_input, label_input, crossencoder_params
)
# run crossencoder and get accuracy
accuracy, index_array, unsorted_scores = _run_crossencoder(
crossencoder,
dataloader,
logger,
context_len=biencoder_params["max_context_length"],
)
if args.interactive:
print("\naccurate (crossencoder) predictions:")
_print_colorful_text(text, samples)
# print crossencoder prediction
idx = 0
for entity_list, index_list, sample in zip(nns, index_array, samples):
e_id = entity_list[index_list[-1]]
e_title = id2title[e_id]
e_text = id2text[e_id]
e_url = id2url[e_id]
_print_colorful_prediction(
idx, sample, e_id, e_title, e_text, e_url, args.show_url
)
idx += 1
print()
else:
scores = []
predictions = []
for entity_list, index_list, scores_list in zip(
nns, index_array, unsorted_scores
):
index_list = index_list.tolist()
# descending order
index_list.reverse()
sample_prediction = []
sample_scores = []
for index in index_list:
e_id = entity_list[index]
e_title = id2title[e_id]
sample_prediction.append(e_title)
sample_scores.append(scores_list[index])
predictions.append(sample_prediction)
scores.append(sample_scores)
crossencoder_normalized_accuracy = -1
overall_unormalized_accuracy = -1
if not keep_all:
crossencoder_normalized_accuracy = accuracy
print(
"crossencoder normalized accuracy: %.4f"
% crossencoder_normalized_accuracy
)
if len(samples) > 0:
overall_unormalized_accuracy = (
crossencoder_normalized_accuracy
* len(label_input)
/ len(samples)
)
print(
"overall unnormalized accuracy: %.4f" % overall_unormalized_accuracy
)
return (
biencoder_accuracy,
recall_at,
crossencoder_normalized_accuracy,
overall_unormalized_accuracy,
len(samples),
predictions,
scores,
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--interactive", "-i", action="store_true", help="Interactive mode."
)
parser.set_defaults(interactive=True)
# test_data
parser.add_argument(
"--test_mentions", dest="test_mentions", type=str, help="Test Dataset."
)
parser.add_argument(
"--test_entities", dest="test_entities", type=str, help="Test Entities."
)
# biencoder
parser.add_argument(
"--biencoder_model",
dest="biencoder_model",
type=str,
default="blink_elq_data/blink/models/biencoder_wiki_large.bin",
help="Path to the biencoder model.",
)
parser.add_argument(
"--biencoder_config",
dest="biencoder_config",
type=str,
default="blink_elq_data/blink/models/biencoder_wiki_large.json",
help="Path to the biencoder configuration.",
)
parser.add_argument(
"--entity_catalogue",
dest="entity_catalogue",
type=str,
# default="blink_elq_data/blink/models/tac_entity.jsonl", # TAC-KBP
default="blink_elq_data/blink/models/entity.jsonl", # ALL WIKIPEDIA!
help="Path to the entity catalogue.",
)
parser.add_argument(
"--entity_encoding",
dest="entity_encoding",
type=str,
# default="blink_elq_data/blink/models/tac_candidate_encode_large.t7", # TAC-KBP
default="blink_elq_data/blink/models/all_entities_large.t7", # ALL WIKIPEDIA!
help="Path to the entity catalogue.",
)
# crossencoder
parser.add_argument(
"--crossencoder_model",
dest="crossencoder_model",
type=str,
default="blink_elq_data/blink/models/crossencoder_wiki_large.bin",
help="Path to the crossencoder model.",
)
parser.add_argument(
"--crossencoder_config",
dest="crossencoder_config",
type=str,
default="blink_elq_data/blink/models/crossencoder_wiki_large.json",
help="Path to the crossencoder configuration.",
)
parser.add_argument(
"--top_k",
dest="top_k",
type=int,
default=10,
help="Number of candidates retrieved by biencoder.",
)
# output folder
parser.add_argument(
"--output_path",
dest="output_path",
type=str,
default="output",
help="Path to the output.",
)
parser.add_argument(
"--fast", dest="fast", action="store_true", help="only biencoder mode"
)
parser.add_argument(
"--show_url",
dest="show_url",
action="store_true",
help="whether to show entity url in interactive mode",
)
# 重要!否则要加载23G的矩阵 all_entities_large.t7 ,内存不足
parser.add_argument(
"--faiss_index",
type=str,
default="flat",
help="whether to use faiss index 支持 flat; hnsw",
)
# 重要
parser.add_argument(
"--index_path",
type=str,
default=None,
help="path to load indexer",
)
args = parser.parse_args()
logger = utils.get_logger(args.output_path)
models = load_models(args, logger)
run(args, logger, *models)
| 30.833795
| 99
| 0.574656
|
64bb0529b8dc6be179a44d4aae2be312c12e5984
| 270
|
py
|
Python
|
app/app.py
|
PaulRedmond94/Lab6Repo
|
1ed8398fac97c3272003f5179ecd93ae9f2bbde1
|
[
"MIT"
] | null | null | null |
app/app.py
|
PaulRedmond94/Lab6Repo
|
1ed8398fac97c3272003f5179ecd93ae9f2bbde1
|
[
"MIT"
] | null | null | null |
app/app.py
|
PaulRedmond94/Lab6Repo
|
1ed8398fac97c3272003f5179ecd93ae9f2bbde1
|
[
"MIT"
] | null | null | null |
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello bubx"
@app.route("/user/<username>")
def show_user(username):
return "Hello user %s" % username
if __name__ == "__main__":
app.run(host="0.0.0.0", port=3000, debug=True)
| 18
| 48
| 0.666667
|
6c2f11f457d18010a30c62eae76b67b334cd255c
| 1,136
|
py
|
Python
|
endpoints/json.py
|
Kuzj/simple_rest
|
12f622bc7265471a0000aecdb505202921d4cb0e
|
[
"MIT"
] | null | null | null |
endpoints/json.py
|
Kuzj/simple_rest
|
12f622bc7265471a0000aecdb505202921d4cb0e
|
[
"MIT"
] | null | null | null |
endpoints/json.py
|
Kuzj/simple_rest
|
12f622bc7265471a0000aecdb505202921d4cb0e
|
[
"MIT"
] | null | null | null |
import logging
import json
from aiohttp.web_response import Response
from aiohttp.web import Request
import actions
from simple_rest import RestEndpoint
class JSONError(Exception):
"""Base class for exceptions in GrafanaEndpoint."""
pass
class NoActionInMessage(JSONError):
pass
class Endpoint(RestEndpoint):
'''
{"action":{
"name":"http_request",
"method":"post",
"url":"http://127.0.0.1:8801",
"data":"{'status': 'warning', 'host': 'host.alert.from', 'hostgroup': 'project', 'service': 'alert name or metric', 'text': 'text alert'}"
}}
'''
def routes(self):
return ['/json',]
async def post(self, request: Request) -> Response:
text = await request.text()
logging.info(f'{text}')
data = await request.json()
logging.info(f'{self.__class__} {request.path} from {request.host} {request.method} request: {data}')
if 'action' in data:
await self.do_action(data['action'])
else:
# HTTPBadRequest
Response(status=400)
raise NoActionInMessage
return Response(status=200)
| 28.4
| 142
| 0.62412
|
c6099f7f465a6902ff1c782af444ad33a806b14f
| 1,001
|
py
|
Python
|
trac/mimeview/tests/__init__.py
|
clubturbo/Trac-1.4.2
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
[
"BSD-3-Clause"
] | null | null | null |
trac/mimeview/tests/__init__.py
|
clubturbo/Trac-1.4.2
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
[
"BSD-3-Clause"
] | null | null | null |
trac/mimeview/tests/__init__.py
|
clubturbo/Trac-1.4.2
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2020 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/log/.
import unittest
from trac.mimeview.tests import api, patch, pygments, rst, txtl
from trac.mimeview.tests.functional import functionalSuite
def test_suite():
suite = unittest.TestSuite()
suite.addTest(api.test_suite())
suite.addTest(patch.test_suite())
suite.addTest(pygments.test_suite())
suite.addTest(rst.test_suite())
suite.addTest(txtl.test_suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 31.28125
| 68
| 0.723277
|
b2fa40332c1d5d9ff89c9d4f4ab8147f4183b21d
| 619
|
py
|
Python
|
quick_model.py
|
tfriedel/neural-image-assessment
|
b35e667e78ba9be01ef38e3d08983386851a8902
|
[
"MIT"
] | 25
|
2018-02-01T00:26:27.000Z
|
2021-06-07T09:21:47.000Z
|
quick_model.py
|
tfriedel/neural-image-assessment
|
b35e667e78ba9be01ef38e3d08983386851a8902
|
[
"MIT"
] | 2
|
2018-12-12T11:32:40.000Z
|
2019-07-02T12:33:40.000Z
|
quick_model.py
|
tfriedel/neural-image-assessment
|
b35e667e78ba9be01ef38e3d08983386851a8902
|
[
"MIT"
] | 6
|
2018-02-19T14:42:22.000Z
|
2021-04-01T06:35:30.000Z
|
from keras.layers import Conv2D, Dense, Dropout, Input, MaxPooling2D, Flatten
from keras.models import Model
class NimaModel(object):
def __init__(self):
inputs = Input(shape=(224, 224, 3))
x = Conv2D(16, kernel_size=(3, 3),
activation='relu')(inputs)
x = Conv2D(8, (3, 3), activation='relu')(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Flatten()(x)
self.base_model = Model(inputs, x)
x = Dropout(0.75)(x)
x = Dense(10, activation='softmax', name='toplayer')(x)
self.model = Model(inputs, x)
self.model.summary()
| 34.388889
| 77
| 0.583199
|
49b6dd754564e68dae38a03815f713bb57286919
| 4,958
|
py
|
Python
|
JMDE/scripts/unpackClass.py
|
yarbroughw/JMDE
|
198a6bd3854d0b51998c7512765c5e6c23901648
|
[
"MIT"
] | 1
|
2015-08-04T23:22:53.000Z
|
2015-08-04T23:22:53.000Z
|
JMDE/scripts/unpackClass.py
|
yarbroughw/JMDE
|
198a6bd3854d0b51998c7512765c5e6c23901648
|
[
"MIT"
] | 3
|
2015-08-07T06:15:45.000Z
|
2015-08-15T05:12:34.000Z
|
JMDE/scripts/unpackClass.py
|
yarbroughw/ontologyproj
|
198a6bd3854d0b51998c7512765c5e6c23901648
|
[
"MIT"
] | null | null | null |
''' takes dbpedia CSV class file and splits it
into separate, cleaned entity files
'''
from __future__ import print_function
import json
import csv
import glob
from itertools import count
ontologyprefix = "http://dbpedia.org/ontology/"
resourceprefix = "http://dbpedia.org/resource/"
itemprefix = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"
with open("../data/flatontology.json", 'r') as flat:
ontology = json.load(flat)
ontology_labels = list(ontology.keys())
def toName(url):
return url[len(ontologyprefix):]
def csvinstances(path, skip=1):
with open(path, 'r') as classfile:
dicts = csv.DictReader(classfile)
labelURIs = next(dicts) # field label URIs
next(dicts) # field types (unneeded)
next(dicts) # field type URIs (unneeded)
for i, instance in zip(count(start=0), dicts):
if i % skip == 0:
yield cleanCSVinstance(instance, labelURIs)
def cleanCSVinstance(instance, labeldict):
tolist = lambda x: x[1:-1].split('|')
newinstance = {"properties": {}}
for key, value in instance.items():
if labeldict[key].startswith(ontologyprefix):
newinstance["properties"][key] = value
if value[0] == '{':
newinstance["properties"][key] = tolist(value)
typelabel = "22-rdf-syntax-ns#type"
newinstance[typelabel] = tolist(instance[typelabel])
uri = instance["URI"]
newinstance["URI"] = uri
newinstance["name"] = uri[len(resourceprefix):].replace('/', '-')
return newinstance
def writeInstances(instances, dest):
''' stream of all instances in file '''
for instance in instances:
writeEntity(instance, dest)
def writeEntity(instance, dest):
entity = buildEntity(instance)
if len(entity['properties']) > 2:
deepest = getFinestOntology(entity['ontologies'])
path = ontology[toName(deepest)]['fullpath']
write(entity, dest, path)
addToIndexes(entity, dest, path)
def buildEntity(instance):
return {'URI': instance["URI"],
'name': instance["name"],
'properties': getProperties(instance),
'ontologies': getOntologies(instance)}
def getProperties(instance):
''' returns dict of all properties in instance '''
validproperty = lambda key, val: not key.endswith("_label") \
and key != "wikiPageRevisionID" \
and key != "wikiPageID" \
and key != "wikiPageRedirects" \
and key != "22-rdf-syntax-ns#type" \
and key != "thumbnail" \
and val != "NULL"
allprops = instance["properties"]
properties = {key: val for key, val in allprops.items()
if validproperty(key, val)}
return properties
def getOntologies(instance):
''' construct list of ontology refs in instance '''
ontologyrefs = instance["22-rdf-syntax-ns#type"]
return [x for x in ontologyrefs
if x.startswith(ontologyprefix)
and x[len(ontologyprefix):] in ontology_labels]
def getFinestOntology(refs):
''' take list of ontology classes and return deepest one '''
getdepth = lambda ref: ontology[toName(ref)]['depth']
return max(refs, key=getdepth)
def write(entity, dest, path):
''' write entity to JSON file at path '''
name = entity['name']
fullname = dest + path + name + ".json"
with open(fullname, 'w') as fp:
json.dump(entity, fp)
print("wrote", name, "...", end='')
def addToIndexes(entity, dest, path):
paths = path.split('/')[:-2]
fullpaths = ['/'.join(paths[:i+1]) for i, _ in enumerate(paths)]
for p in fullpaths:
addToIndex(entity, dest+p, path)
print("added to parent fileindexes")
def addToIndex(entity, dest, path):
''' add name and filepath to file index at dest '''
this_entity = entity['name']
val = path + entity['name'] + ".json"
with open(dest+"/fileindex.json", 'r') as f:
index = json.load(f)
index[this_entity] = val
with open(dest+"/fileindex.json", 'w') as f:
json.dump(index, f, indent=4)
def unpackAll():
# load progress file
with open("../data/csv/progress.json", 'r') as f:
progress = set(json.load(f))
files = glob.glob("../data/csv/*.csv")
files = set(map(lambda x: x.split('/')[-1], files))
remaining = files - progress
for x in remaining:
print("Unpacking", x, "...")
instances = csvinstances("../data/csv/" + x, skip=500)
writeInstances(instances, "../data/")
progress.add(x)
with open("../data/csv/progress.json", 'w') as f:
json.dump(list(progress), f)
def unpack(x, skip=1):
x = x + ".csv"
instances = csvinstances("../data/csv/" + x, skip=skip)
writeInstances(instances, "../data/")
if __name__ == "__main__":
unpack("Food", 1)
unpack("Sales", 1)
unpack("Holiday", 1)
unpack("Colour", 1)
unpack("Biomolecule", 10)
| 29.86747
| 69
| 0.615571
|
db3a2bf39741a8b7f008ae419d2a1cdcacec977f
| 9,814
|
py
|
Python
|
reports/configs/only_logds_dmpnn4_2/other_config.py
|
hengwei-chan/graph_network_demo
|
542f2a59b1b9708abdc718d77db7111f3ba2df96
|
[
"MIT"
] | 1
|
2021-10-18T03:44:53.000Z
|
2021-10-18T03:44:53.000Z
|
reports/configs/only_logds_dmpnn4_2/other_config.py
|
hengwei-chan/graph_network_demo
|
542f2a59b1b9708abdc718d77db7111f3ba2df96
|
[
"MIT"
] | null | null | null |
reports/configs/only_logds_dmpnn4_2/other_config.py
|
hengwei-chan/graph_network_demo
|
542f2a59b1b9708abdc718d77db7111f3ba2df96
|
[
"MIT"
] | 1
|
2022-02-22T08:32:01.000Z
|
2022-02-22T08:32:01.000Z
|
from dataclasses import dataclass, field
from typing import List
import tensorflow as tf
from graph_networks.utilities import *
import logging
import os
ATOM_FEATURE_DIM = DGIN4_ATOM_FEATURE_DIM
EDGE_FEATURE_DIM = DGIN4_EDGE_FEATURE_DIM
@dataclass
class BasicModelConfig:
"""
Config for model1/2/3 run file.
General model parameters
"""
model_name: str = 'only_logds_dmpnn4_2' # without h_w in DGIN gin part - added h_v_0 instead
# whole train/eval split - no more double split within train data set
# random train/test split in get_data_sd - only change overall_seed
# CHANGES dgin3 10.02.2021:
# *added new bondFeaturesDGIN2 and atomFeaturesDGIN2; DGIN2_ATOM_FEATURE_DIM; DGIN2_EDGE_FEATURE_DIM
# *from project_path+'data/processed/lipo/pickled/train_frags3/' to project_path+'data/processed/lipo/pickled/test_frags3/'
# CHANGES dgin3 16.02.2021:
# *added new bondFeaturesDGIN3 and atomFeaturesDGIN3; DGIN3_ATOM_FEATURE_DIM; DGIN3_EDGE_FEATURE_DIM
# *from project_path+'data/processed/lipo/pickled/train_frags_dgin3/' to project_path+'data/processed/lipo/pickled/test_frags_dgin3/'
# CHANGES dgin4 16.02.2021:
# *added add_species bool in model1 config - previously not there; for dgin2 featurization adds the species type after the dgin
# encoding before logD prediction
# test_frags_dgin4 was added for species inclusion in model2 call()
batch_size: int =15
override_if_exists: bool = True
overall_seed: int = 2
# path to the project folder
project_path:str = "./"
retrain_model: bool = False
retrain_model_name: str = ''
retrain_model_epoch: str = ''
retrain_model_weights_dir: str = project_path+'reports/model_weights/'+retrain_model_name+'/epoch_'+retrain_model_epoch+'/checkp_'+retrain_model_epoch
train_data_dir: str = project_path+'data/processed/lipo/pickled/train_dgin4_logd/'
test_data_dir: str = project_path+'data/processed/lipo/pickled/test_dgin4_logd/'
combined_dataset: bool = True
add_train_data_dir: str = project_path+'data/processed/lipo/pickled/train_dgin4_logs/'
add_test_data_dir: str = project_path+'data/processed/lipo/pickled/test_dgin4_logs/'
test_model: bool = False
test_model_epoch: str = '887'
# define the number or test runs for the CI.
# the mean and std of the RMSE and r^2 of the combined runs are taken as the output.
test_n_times: int = 1
# do you want to test the model with consensus mode?
# if yes, a defined ML model will be included in the consensus predictions during the testing.
consensus: bool = False
# include dropout during testing?
include_dropout: bool = False
test_model_weights_dir: str = project_path+'reports/model_weights/'+model_name+'/epoch_'+test_model_epoch+'/checkp_'+test_model_epoch
# To save the prediction values for each property set to True
# When this flag is True - the whole test dataset is taken an test_n_times is set to zero!
save_predictions: bool = False
# define the folder where you want to save the predictions.
# For each property, a file is created under the property name ("./logd.txt","./logs.txt","./logp.txt","./others.txt")
test_prediction_output_folder: str = project_path+"reports/predictions/"+model_name+"/"
encode_hidden: bool = False
log_dir: str = project_path+'reports/logs/'+model_name+'.log'
verbosity_level = logging.INFO
model_type: str = 'DMPNN' # added 31.03.2021 to compare models like 'GIN' 'DMPNN' 'DGIN' 'MLP'
plot_dir: str = project_path+'reports/figures/'+model_name+'/'
tensorboard_log_dir: str = project_path+'reports/tensorboard/'+model_name+'/'
config_log_dir: str = project_path+'reports/configs/'+model_name+'/'
model_weights_dir: str = project_path+'reports/model_weights/'+model_name+'/'
stats_log_dir: str = project_path+'reports/stats/'+model_name+'/'
@dataclass
class DGINConfig:
"""
Config for direcpted-mpnn class.
"""
dropout_aggregate_dmpnn: bool = False
layernorm_aggregate_dmpnn: bool = True
dropout_passing_dmpnn: bool = False
layernorm_passing_dmpnn: bool = True
dropout_aggregate_gin: bool = False
layernorm_aggregate_gin: bool = True
dropout_passing_gin: bool = False
layernorm_passing_gin: bool = True
gin_aggregate_bias: bool = False
dmpnn_passing_bias: bool = False
init_bias: bool = False
massge_iteration_dmpnn: int = 4
message_iterations_gin: int = 4
dropout_rate: float = 0.15
input_size: int = (ATOM_FEATURE_DIM+EDGE_FEATURE_DIM) # combination of node feature len (33) and edge feature len (12)
passing_hidden_size: int = 56 # this can be changed
input_size_gin: int = (ATOM_FEATURE_DIM) # changed 31.03.2021
return_hv: bool = True # model3 parameter
@dataclass
class Model1Config:
"""
Config model1 class - no subclass configs are defined here.
"""
validation_split: float = 0.90
learning_rate: float = 0.004
clip_rate: float = 0.6
optimizer = tf.keras.optimizers.Adam(learning_rate)
lipo_loss_mse = tf.keras.losses.mse
lipo_loss_mae = tf.keras.losses.mae
logP_loss_mse = tf.keras.losses.mse
logS_loss_mse = tf.keras.losses.mse
other_loss_mse = tf.keras.losses.mse
mw_loss_mse = tf.keras.losses.mse
metric = tf.keras.losses.mae
epochs: int = 1600
# define the number of epochs for each test run.
save_after_epoch: int = 3
# dropout rate for the general model - mainly the MLP for the different log predictions
dropout_rate: float = 0.15 # the overall dropout rate of the readout functions
# the seed to shuffle the training/validation dataset; For the same dataset, even when
# combined_dataset is True, it is the same training/valiation instances
train_data_seed: int = 0
dropout_rate: float = 0.15 # the overall dropout rate of the readout functions
train_data_seed: int = 0
hidden_readout_1: int = 32
hidden_readout_2: int = 14
activation_func_readout = tf.nn.relu
include_logD: bool = True
include_logS: bool = True
include_logP: bool = False
include_other: bool = False
include_mw: bool = False
include_rot_bond: bool = False
include_HBA: bool = False
include_HBD: bool = False
# define the starting threshold for the RMSE of the model. When the comnbined RMSE
# is below this threshold, the model weights are being safed and a new threshold
# is set. It only serves as a starting threshold so that not too many models
# are being safed. Depends on how many log endpoints are being taken into
# consideration - as three endpoints have a higher combined RMSE as only one
# endpoint.
best_evaluation_threshold: float = 2.45 #was introduced on the 25.03.2021/
# define the individual thresholds. If one model is better, the corresponding
# model weights are being saved.
best_evaluation_threshold_logd: float = 1.85
best_evaluation_threshold_logp: float = 1.65
best_evaluation_threshold_logs: float = 2.15
best_evaluation_threshold_other: float = 2.15
# 2.45 for all_logs
# 0.70 logP
# 0.75 logD
# 1.00 logS
# 1.75 logSD
# 1.70 logSP
# 1.45 logDP
include_fragment_conv: bool = False # was introduced on the 4.12.2020
use_rmse: bool = True # uses RMSE instead of MSE for only lipo_loss
shuffle_inside: bool = True # reshuffles the train/valid test seach in each epoch (generalizes)
add_species: bool = False # 16.02 introduction; previously not there; for dgin3 adds the species type after the dgin encoding before logD prediction
@dataclass
class FrACConfig:
"""
Config fragment aggregation class - no subclass configs are defined here.
"""
input_size_gin: int = 28
layernorm_aggregate: bool = True
reduce_mean: bool = True # when false -> reduce_sum
@dataclass
class MLConfig:
"""
Configs for the ML algorithm
"""
# which algorithm do you want to use for the consensus?
# possibilities are: "SVM", "RF", "KNN" or "LR" - all are regression models!
# SVM: Support Vector Machine; RF: Random Forest, KNN: K-Nearest Neigbors; LR: Linear Regression;
algorithm: str = "SVM"
# which fingerprint to use - possibilities are: "ECFP" or "MACCS"
fp_types: str = "ECFP"
# If 'ECFP' fingerprint is used, define the number of bits - maximum is 2048!
n_bits: int = 2048
# If "ECFP" fingerprint is used, define the radius
radius: int = 4
# define if descriptors should be included into the non-GNN molecular representation
include_descriptors: bool = True
# define if the descriptors should be standardizedby scaling and centering (Sklearn)
standardize: bool = True
@dataclass
class Config():
"""
Overall config class for model2 and run file.
Includes all submodels config
"""
basic_model_config: BasicModelConfig
model1_config: Model1Config
d_gin_config: DGINConfig
frag_acc_config: FrACConfig
ml_config: MLConfig
model: str = 'model11'
| 44.008969
| 169
| 0.669757
|
6b85943d3878dee3ba60cc43d6bfc670d3c293e6
| 773
|
py
|
Python
|
Dataset/Leetcode/train/2/134.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/2/134.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/train/2/134.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution:
def XXX(self, l1: ListNode, l2: ListNode) -> ListNode:
ans = ListNode(0)
node = ans
lastnode = None
while True:
if not l1 and not l2:
if node.val == 0:
lastnode.next = None
return ans
if l1 and l2:
s = l1.val + l2.val + node.val
l1 = l1.next
l2 = l2.next
elif l1:
s = l1.val + node.val
l1 = l1.next
elif l2:
s = l2.val + node.val
l2 = l2.next
c = s // 10
s = s % 10
node.val = s
node.next = ListNode(c)
lastnode = node
node = node.next
| 27.607143
| 58
| 0.38163
|
a0057275d901efdf1f16044a43243443707eecd0
| 129,606
|
py
|
Python
|
pandas/plotting/_core.py
|
kimsey0/pandas
|
7fafb356549f1c52a9896429ce0241487f42bea9
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/plotting/_core.py
|
kimsey0/pandas
|
7fafb356549f1c52a9896429ce0241487f42bea9
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/plotting/_core.py
|
kimsey0/pandas
|
7fafb356549f1c52a9896429ce0241487f42bea9
|
[
"BSD-3-Clause"
] | null | null | null |
# being a bit too dynamic
from collections import namedtuple
import re
from typing import List, Optional, Type
import warnings
import numpy as np
from pandas._config import get_option
from pandas.compat import lrange
from pandas.errors import AbstractMethodError
from pandas.util._decorators import Appender, cache_readonly
from pandas.core.dtypes.common import (
is_hashable, is_integer, is_iterator, is_list_like, is_number)
from pandas.core.dtypes.generic import (
ABCDataFrame, ABCIndexClass, ABCMultiIndex, ABCPeriodIndex, ABCSeries)
from pandas.core.dtypes.missing import isna, notna, remove_na_arraylike
from pandas.core.base import PandasObject
import pandas.core.common as com
from pandas.core.generic import _shared_doc_kwargs, _shared_docs
from pandas.io.formats.printing import pprint_thing
from pandas.plotting._compat import _mpl_ge_3_0_0
from pandas.plotting._style import _get_standard_colors, plot_params
from pandas.plotting._tools import (
_flatten, _get_all_lines, _get_xlim, _handle_shared_axes, _set_ticks_props,
_subplots, format_date_labels, table)
try:
from pandas.plotting import _converter
except ImportError:
_HAS_MPL = False
else:
_HAS_MPL = True
if get_option('plotting.matplotlib.register_converters'):
_converter.register(explicit=False)
def _raise_if_no_mpl():
# TODO(mpl_converter): remove once converter is explicit
if not _HAS_MPL:
raise ImportError("matplotlib is required for plotting.")
def _get_standard_kind(kind):
return {'density': 'kde'}.get(kind, kind)
def _gca(rc=None):
import matplotlib.pyplot as plt
with plt.rc_context(rc):
return plt.gca()
def _gcf():
import matplotlib.pyplot as plt
return plt.gcf()
class MPLPlot:
"""
Base class for assembling a pandas plot using matplotlib
Parameters
----------
data :
"""
@property
def _kind(self):
"""Specify kind str. Must be overridden in child class"""
raise NotImplementedError
_layout_type = 'vertical'
_default_rot = 0
orientation = None # type: Optional[str]
_pop_attributes = ['label', 'style', 'logy', 'logx', 'loglog',
'mark_right', 'stacked']
_attr_defaults = {'logy': False, 'logx': False, 'loglog': False,
'mark_right': True, 'stacked': False}
def __init__(self, data, kind=None, by=None, subplots=False, sharex=None,
sharey=False, use_index=True,
figsize=None, grid=None, legend=True, rot=None,
ax=None, fig=None, title=None, xlim=None, ylim=None,
xticks=None, yticks=None,
sort_columns=False, fontsize=None,
secondary_y=False, colormap=None,
table=False, layout=None, **kwds):
_raise_if_no_mpl()
_converter._WARN = False
self.data = data
self.by = by
self.kind = kind
self.sort_columns = sort_columns
self.subplots = subplots
if sharex is None:
if ax is None:
self.sharex = True
else:
# if we get an axis, the users should do the visibility
# setting...
self.sharex = False
else:
self.sharex = sharex
self.sharey = sharey
self.figsize = figsize
self.layout = layout
self.xticks = xticks
self.yticks = yticks
self.xlim = xlim
self.ylim = ylim
self.title = title
self.use_index = use_index
self.fontsize = fontsize
if rot is not None:
self.rot = rot
# need to know for format_date_labels since it's rotated to 30 by
# default
self._rot_set = True
else:
self._rot_set = False
self.rot = self._default_rot
if grid is None:
grid = False if secondary_y else self.plt.rcParams['axes.grid']
self.grid = grid
self.legend = legend
self.legend_handles = []
self.legend_labels = []
for attr in self._pop_attributes:
value = kwds.pop(attr, self._attr_defaults.get(attr, None))
setattr(self, attr, value)
self.ax = ax
self.fig = fig
self.axes = None
# parse errorbar input if given
xerr = kwds.pop('xerr', None)
yerr = kwds.pop('yerr', None)
self.errors = {kw: self._parse_errorbars(kw, err)
for kw, err in zip(['xerr', 'yerr'], [xerr, yerr])}
if not isinstance(secondary_y, (bool, tuple, list,
np.ndarray, ABCIndexClass)):
secondary_y = [secondary_y]
self.secondary_y = secondary_y
# ugly TypeError if user passes matplotlib's `cmap` name.
# Probably better to accept either.
if 'cmap' in kwds and colormap:
raise TypeError("Only specify one of `cmap` and `colormap`.")
elif 'cmap' in kwds:
self.colormap = kwds.pop('cmap')
else:
self.colormap = colormap
self.table = table
self.kwds = kwds
self._validate_color_args()
def _validate_color_args(self):
if 'color' not in self.kwds and 'colors' in self.kwds:
warnings.warn(("'colors' is being deprecated. Please use 'color'"
"instead of 'colors'"))
colors = self.kwds.pop('colors')
self.kwds['color'] = colors
if ('color' in self.kwds and self.nseries == 1 and
not is_list_like(self.kwds['color'])):
# support series.plot(color='green')
self.kwds['color'] = [self.kwds['color']]
if ('color' in self.kwds and isinstance(self.kwds['color'], tuple) and
self.nseries == 1 and len(self.kwds['color']) in (3, 4)):
# support RGB and RGBA tuples in series plot
self.kwds['color'] = [self.kwds['color']]
if ('color' in self.kwds or 'colors' in self.kwds) and \
self.colormap is not None:
warnings.warn("'color' and 'colormap' cannot be used "
"simultaneously. Using 'color'")
if 'color' in self.kwds and self.style is not None:
if is_list_like(self.style):
styles = self.style
else:
styles = [self.style]
# need only a single match
for s in styles:
if re.match('^[a-z]+?', s) is not None:
raise ValueError(
"Cannot pass 'style' string with a color "
"symbol and 'color' keyword argument. Please"
" use one or the other or pass 'style' "
"without a color symbol")
def _iter_data(self, data=None, keep_index=False, fillna=None):
if data is None:
data = self.data
if fillna is not None:
data = data.fillna(fillna)
# TODO: unused?
# if self.sort_columns:
# columns = com.try_sort(data.columns)
# else:
# columns = data.columns
for col, values in data.iteritems():
if keep_index is True:
yield col, values
else:
yield col, values.values
@property
def nseries(self):
if self.data.ndim == 1:
return 1
else:
return self.data.shape[1]
def draw(self):
self.plt.draw_if_interactive()
def generate(self):
self._args_adjust()
self._compute_plot_data()
self._setup_subplots()
self._make_plot()
self._add_table()
self._make_legend()
self._adorn_subplots()
for ax in self.axes:
self._post_plot_logic_common(ax, self.data)
self._post_plot_logic(ax, self.data)
def _args_adjust(self):
pass
def _has_plotted_object(self, ax):
"""check whether ax has data"""
return (len(ax.lines) != 0 or
len(ax.artists) != 0 or
len(ax.containers) != 0)
def _maybe_right_yaxis(self, ax, axes_num):
if not self.on_right(axes_num):
# secondary axes may be passed via ax kw
return self._get_ax_layer(ax)
if hasattr(ax, 'right_ax'):
# if it has right_ax proparty, ``ax`` must be left axes
return ax.right_ax
elif hasattr(ax, 'left_ax'):
# if it has left_ax proparty, ``ax`` must be right axes
return ax
else:
# otherwise, create twin axes
orig_ax, new_ax = ax, ax.twinx()
# TODO: use Matplotlib public API when available
new_ax._get_lines = orig_ax._get_lines
new_ax._get_patches_for_fill = orig_ax._get_patches_for_fill
orig_ax.right_ax, new_ax.left_ax = new_ax, orig_ax
if not self._has_plotted_object(orig_ax): # no data on left y
orig_ax.get_yaxis().set_visible(False)
if self.logy is True or self.loglog is True:
new_ax.set_yscale('log')
elif self.logy == 'sym' or self.loglog == 'sym':
new_ax.set_yscale('symlog')
return new_ax
def _setup_subplots(self):
if self.subplots:
fig, axes = _subplots(naxes=self.nseries,
sharex=self.sharex, sharey=self.sharey,
figsize=self.figsize, ax=self.ax,
layout=self.layout,
layout_type=self._layout_type)
else:
if self.ax is None:
fig = self.plt.figure(figsize=self.figsize)
axes = fig.add_subplot(111)
else:
fig = self.ax.get_figure()
if self.figsize is not None:
fig.set_size_inches(self.figsize)
axes = self.ax
axes = _flatten(axes)
valid_log = {False, True, 'sym', None}
input_log = {self.logx, self.logy, self.loglog}
if input_log - valid_log:
invalid_log = next(iter((input_log - valid_log)))
raise ValueError(
"Boolean, None and 'sym' are valid options,"
" '{}' is given.".format(invalid_log)
)
if self.logx is True or self.loglog is True:
[a.set_xscale('log') for a in axes]
elif self.logx == 'sym' or self.loglog == 'sym':
[a.set_xscale('symlog') for a in axes]
if self.logy is True or self.loglog is True:
[a.set_yscale('log') for a in axes]
elif self.logy == 'sym' or self.loglog == 'sym':
[a.set_yscale('symlog') for a in axes]
self.fig = fig
self.axes = axes
@property
def result(self):
"""
Return result axes
"""
if self.subplots:
if self.layout is not None and not is_list_like(self.ax):
return self.axes.reshape(*self.layout)
else:
return self.axes
else:
sec_true = isinstance(self.secondary_y, bool) and self.secondary_y
all_sec = (is_list_like(self.secondary_y) and
len(self.secondary_y) == self.nseries)
if (sec_true or all_sec):
# if all data is plotted on secondary, return right axes
return self._get_ax_layer(self.axes[0], primary=False)
else:
return self.axes[0]
def _compute_plot_data(self):
data = self.data
if isinstance(data, ABCSeries):
label = self.label
if label is None and data.name is None:
label = 'None'
data = data.to_frame(name=label)
# GH16953, _convert is needed as fallback, for ``Series``
# with ``dtype == object``
data = data._convert(datetime=True, timedelta=True)
numeric_data = data.select_dtypes(include=[np.number,
"datetime",
"datetimetz",
"timedelta"])
try:
is_empty = numeric_data.empty
except AttributeError:
is_empty = not len(numeric_data)
# no non-numeric frames or series allowed
if is_empty:
raise TypeError('no numeric data to plot')
# GH25587: cast ExtensionArray of pandas (IntegerArray, etc.) to
# np.ndarray before plot.
numeric_data = numeric_data.copy()
for col in numeric_data:
numeric_data[col] = np.asarray(numeric_data[col])
self.data = numeric_data
def _make_plot(self):
raise AbstractMethodError(self)
def _add_table(self):
if self.table is False:
return
elif self.table is True:
data = self.data.transpose()
else:
data = self.table
ax = self._get_ax(0)
table(ax, data)
def _post_plot_logic_common(self, ax, data):
"""Common post process for each axes"""
from matplotlib.ticker import FixedLocator, FixedFormatter
def get_label(i):
try:
return pprint_thing(data.index[i])
except Exception:
return ''
if self.orientation == 'vertical' or self.orientation is None:
if self._need_to_set_index:
xticks = ax.get_xticks()
xticklabels = [get_label(x) for x in xticks]
ax.set_xticklabels(xticklabels)
ax.xaxis.set_major_locator(FixedLocator(xticks))
ax.xaxis.set_major_formatter(FixedFormatter(xticklabels))
self._apply_axis_properties(ax.xaxis, rot=self.rot,
fontsize=self.fontsize)
self._apply_axis_properties(ax.yaxis, fontsize=self.fontsize)
if hasattr(ax, 'right_ax'):
self._apply_axis_properties(ax.right_ax.yaxis,
fontsize=self.fontsize)
elif self.orientation == 'horizontal':
if self._need_to_set_index:
yticks = ax.get_yticks()
yticklabels = [get_label(y) for y in yticks]
ax.set_yticklabels(yticklabels)
ax.xaxis.set_major_locator(FixedLocator(yticks))
ax.xaxis.set_major_formatter(FixedFormatter(yticklabels))
self._apply_axis_properties(ax.yaxis, rot=self.rot,
fontsize=self.fontsize)
self._apply_axis_properties(ax.xaxis, fontsize=self.fontsize)
if hasattr(ax, 'right_ax'):
self._apply_axis_properties(ax.right_ax.yaxis,
fontsize=self.fontsize)
else: # pragma no cover
raise ValueError
def _post_plot_logic(self, ax, data):
"""Post process for each axes. Overridden in child classes"""
pass
def _adorn_subplots(self):
"""Common post process unrelated to data"""
if len(self.axes) > 0:
all_axes = self._get_subplots()
nrows, ncols = self._get_axes_layout()
_handle_shared_axes(axarr=all_axes, nplots=len(all_axes),
naxes=nrows * ncols, nrows=nrows,
ncols=ncols, sharex=self.sharex,
sharey=self.sharey)
for ax in self.axes:
if self.yticks is not None:
ax.set_yticks(self.yticks)
if self.xticks is not None:
ax.set_xticks(self.xticks)
if self.ylim is not None:
ax.set_ylim(self.ylim)
if self.xlim is not None:
ax.set_xlim(self.xlim)
ax.grid(self.grid)
if self.title:
if self.subplots:
if is_list_like(self.title):
if len(self.title) != self.nseries:
msg = ('The length of `title` must equal the number '
'of columns if using `title` of type `list` '
'and `subplots=True`.\n'
'length of title = {}\n'
'number of columns = {}').format(
len(self.title), self.nseries)
raise ValueError(msg)
for (ax, title) in zip(self.axes, self.title):
ax.set_title(title)
else:
self.fig.suptitle(self.title)
else:
if is_list_like(self.title):
msg = ('Using `title` of type `list` is not supported '
'unless `subplots=True` is passed')
raise ValueError(msg)
self.axes[0].set_title(self.title)
def _apply_axis_properties(self, axis, rot=None, fontsize=None):
""" Tick creation within matplotlib is reasonably expensive and is
internally deferred until accessed as Ticks are created/destroyed
multiple times per draw. It's therefore beneficial for us to avoid
accessing unless we will act on the Tick.
"""
if rot is not None or fontsize is not None:
# rot=0 is a valid setting, hence the explicit None check
labels = axis.get_majorticklabels() + axis.get_minorticklabels()
for label in labels:
if rot is not None:
label.set_rotation(rot)
if fontsize is not None:
label.set_fontsize(fontsize)
@property
def legend_title(self):
if not isinstance(self.data.columns, ABCMultiIndex):
name = self.data.columns.name
if name is not None:
name = pprint_thing(name)
return name
else:
stringified = map(pprint_thing,
self.data.columns.names)
return ','.join(stringified)
def _add_legend_handle(self, handle, label, index=None):
if label is not None:
if self.mark_right and index is not None:
if self.on_right(index):
label = label + ' (right)'
self.legend_handles.append(handle)
self.legend_labels.append(label)
def _make_legend(self):
ax, leg = self._get_ax_legend(self.axes[0])
handles = []
labels = []
title = ''
if not self.subplots:
if leg is not None:
title = leg.get_title().get_text()
handles = leg.legendHandles
labels = [x.get_text() for x in leg.get_texts()]
if self.legend:
if self.legend == 'reverse':
self.legend_handles = reversed(self.legend_handles)
self.legend_labels = reversed(self.legend_labels)
handles += self.legend_handles
labels += self.legend_labels
if self.legend_title is not None:
title = self.legend_title
if len(handles) > 0:
ax.legend(handles, labels, loc='best', title=title)
elif self.subplots and self.legend:
for ax in self.axes:
if ax.get_visible():
ax.legend(loc='best')
def _get_ax_legend(self, ax):
leg = ax.get_legend()
other_ax = (getattr(ax, 'left_ax', None) or
getattr(ax, 'right_ax', None))
other_leg = None
if other_ax is not None:
other_leg = other_ax.get_legend()
if leg is None and other_leg is not None:
leg = other_leg
ax = other_ax
return ax, leg
@cache_readonly
def plt(self):
import matplotlib.pyplot as plt
return plt
_need_to_set_index = False
def _get_xticks(self, convert_period=False):
index = self.data.index
is_datetype = index.inferred_type in ('datetime', 'date',
'datetime64', 'time')
if self.use_index:
if convert_period and isinstance(index, ABCPeriodIndex):
self.data = self.data.reindex(index=index.sort_values())
x = self.data.index.to_timestamp()._mpl_repr()
elif index.is_numeric():
"""
Matplotlib supports numeric values or datetime objects as
xaxis values. Taking LBYL approach here, by the time
matplotlib raises exception when using non numeric/datetime
values for xaxis, several actions are already taken by plt.
"""
x = index._mpl_repr()
elif is_datetype:
self.data = self.data[notna(self.data.index)]
self.data = self.data.sort_index()
x = self.data.index._mpl_repr()
else:
self._need_to_set_index = True
x = lrange(len(index))
else:
x = lrange(len(index))
return x
@classmethod
def _plot(cls, ax, x, y, style=None, is_errorbar=False, **kwds):
mask = isna(y)
if mask.any():
y = np.ma.array(y)
y = np.ma.masked_where(mask, y)
if isinstance(x, ABCIndexClass):
x = x._mpl_repr()
if is_errorbar:
if 'xerr' in kwds:
kwds['xerr'] = np.array(kwds.get('xerr'))
if 'yerr' in kwds:
kwds['yerr'] = np.array(kwds.get('yerr'))
return ax.errorbar(x, y, **kwds)
else:
# prevent style kwarg from going to errorbar, where it is
# unsupported
if style is not None:
args = (x, y, style)
else:
args = (x, y)
return ax.plot(*args, **kwds)
def _get_index_name(self):
if isinstance(self.data.index, ABCMultiIndex):
name = self.data.index.names
if com._any_not_none(*name):
name = ','.join(pprint_thing(x) for x in name)
else:
name = None
else:
name = self.data.index.name
if name is not None:
name = pprint_thing(name)
return name
@classmethod
def _get_ax_layer(cls, ax, primary=True):
"""get left (primary) or right (secondary) axes"""
if primary:
return getattr(ax, 'left_ax', ax)
else:
return getattr(ax, 'right_ax', ax)
def _get_ax(self, i):
# get the twinx ax if appropriate
if self.subplots:
ax = self.axes[i]
ax = self._maybe_right_yaxis(ax, i)
self.axes[i] = ax
else:
ax = self.axes[0]
ax = self._maybe_right_yaxis(ax, i)
ax.get_yaxis().set_visible(True)
return ax
def on_right(self, i):
if isinstance(self.secondary_y, bool):
return self.secondary_y
if isinstance(self.secondary_y, (tuple, list,
np.ndarray, ABCIndexClass)):
return self.data.columns[i] in self.secondary_y
def _apply_style_colors(self, colors, kwds, col_num, label):
"""
Manage style and color based on column number and its label.
Returns tuple of appropriate style and kwds which "color" may be added.
"""
style = None
if self.style is not None:
if isinstance(self.style, list):
try:
style = self.style[col_num]
except IndexError:
pass
elif isinstance(self.style, dict):
style = self.style.get(label, style)
else:
style = self.style
has_color = 'color' in kwds or self.colormap is not None
nocolor_style = style is None or re.match('[a-z]+', style) is None
if (has_color or self.subplots) and nocolor_style:
kwds['color'] = colors[col_num % len(colors)]
return style, kwds
def _get_colors(self, num_colors=None, color_kwds='color'):
if num_colors is None:
num_colors = self.nseries
return _get_standard_colors(num_colors=num_colors,
colormap=self.colormap,
color=self.kwds.get(color_kwds))
def _parse_errorbars(self, label, err):
"""
Look for error keyword arguments and return the actual errorbar data
or return the error DataFrame/dict
Error bars can be specified in several ways:
Series: the user provides a pandas.Series object of the same
length as the data
ndarray: provides a np.ndarray of the same length as the data
DataFrame/dict: error values are paired with keys matching the
key in the plotted DataFrame
str: the name of the column within the plotted DataFrame
"""
if err is None:
return None
def match_labels(data, e):
e = e.reindex(data.index)
return e
# key-matched DataFrame
if isinstance(err, ABCDataFrame):
err = match_labels(self.data, err)
# key-matched dict
elif isinstance(err, dict):
pass
# Series of error values
elif isinstance(err, ABCSeries):
# broadcast error series across data
err = match_labels(self.data, err)
err = np.atleast_2d(err)
err = np.tile(err, (self.nseries, 1))
# errors are a column in the dataframe
elif isinstance(err, str):
evalues = self.data[err].values
self.data = self.data[self.data.columns.drop(err)]
err = np.atleast_2d(evalues)
err = np.tile(err, (self.nseries, 1))
elif is_list_like(err):
if is_iterator(err):
err = np.atleast_2d(list(err))
else:
# raw error values
err = np.atleast_2d(err)
err_shape = err.shape
# asymmetrical error bars
if err.ndim == 3:
if (err_shape[0] != self.nseries) or \
(err_shape[1] != 2) or \
(err_shape[2] != len(self.data)):
msg = "Asymmetrical error bars should be provided " + \
"with the shape (%u, 2, %u)" % \
(self.nseries, len(self.data))
raise ValueError(msg)
# broadcast errors to each data series
if len(err) == 1:
err = np.tile(err, (self.nseries, 1))
elif is_number(err):
err = np.tile([err], (self.nseries, len(self.data)))
else:
msg = "No valid {label} detected".format(label=label)
raise ValueError(msg)
return err
def _get_errorbars(self, label=None, index=None, xerr=True, yerr=True):
errors = {}
for kw, flag in zip(['xerr', 'yerr'], [xerr, yerr]):
if flag:
err = self.errors[kw]
# user provided label-matched dataframe of errors
if isinstance(err, (ABCDataFrame, dict)):
if label is not None and label in err.keys():
err = err[label]
else:
err = None
elif index is not None and err is not None:
err = err[index]
if err is not None:
errors[kw] = err
return errors
def _get_subplots(self):
from matplotlib.axes import Subplot
return [ax for ax in self.axes[0].get_figure().get_axes()
if isinstance(ax, Subplot)]
def _get_axes_layout(self):
axes = self._get_subplots()
x_set = set()
y_set = set()
for ax in axes:
# check axes coordinates to estimate layout
points = ax.get_position().get_points()
x_set.add(points[0][0])
y_set.add(points[0][1])
return (len(y_set), len(x_set))
class PlanePlot(MPLPlot):
"""
Abstract class for plotting on plane, currently scatter and hexbin.
"""
_layout_type = 'single'
def __init__(self, data, x, y, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
if x is None or y is None:
raise ValueError(self._kind + ' requires an x and y column')
if is_integer(x) and not self.data.columns.holds_integer():
x = self.data.columns[x]
if is_integer(y) and not self.data.columns.holds_integer():
y = self.data.columns[y]
if len(self.data[x]._get_numeric_data()) == 0:
raise ValueError(self._kind + ' requires x column to be numeric')
if len(self.data[y]._get_numeric_data()) == 0:
raise ValueError(self._kind + ' requires y column to be numeric')
self.x = x
self.y = y
@property
def nseries(self):
return 1
def _post_plot_logic(self, ax, data):
x, y = self.x, self.y
ax.set_ylabel(pprint_thing(y))
ax.set_xlabel(pprint_thing(x))
def _plot_colorbar(self, ax, **kwds):
# Addresses issues #10611 and #10678:
# When plotting scatterplots and hexbinplots in IPython
# inline backend the colorbar axis height tends not to
# exactly match the parent axis height.
# The difference is due to small fractional differences
# in floating points with similar representation.
# To deal with this, this method forces the colorbar
# height to take the height of the parent axes.
# For a more detailed description of the issue
# see the following link:
# https://github.com/ipython/ipython/issues/11215
img = ax.collections[0]
cbar = self.fig.colorbar(img, ax=ax, **kwds)
if _mpl_ge_3_0_0():
# The workaround below is no longer necessary.
return
points = ax.get_position().get_points()
cbar_points = cbar.ax.get_position().get_points()
cbar.ax.set_position([cbar_points[0, 0],
points[0, 1],
cbar_points[1, 0] - cbar_points[0, 0],
points[1, 1] - points[0, 1]])
# To see the discrepancy in axis heights uncomment
# the following two lines:
# print(points[1, 1] - points[0, 1])
# print(cbar_points[1, 1] - cbar_points[0, 1])
class ScatterPlot(PlanePlot):
_kind = 'scatter'
def __init__(self, data, x, y, s=None, c=None, **kwargs):
if s is None:
# hide the matplotlib default for size, in case we want to change
# the handling of this argument later
s = 20
super().__init__(data, x, y, s=s, **kwargs)
if is_integer(c) and not self.data.columns.holds_integer():
c = self.data.columns[c]
self.c = c
def _make_plot(self):
x, y, c, data = self.x, self.y, self.c, self.data
ax = self.axes[0]
c_is_column = is_hashable(c) and c in self.data.columns
# plot a colorbar only if a colormap is provided or necessary
cb = self.kwds.pop('colorbar', self.colormap or c_is_column)
# pandas uses colormap, matplotlib uses cmap.
cmap = self.colormap or 'Greys'
cmap = self.plt.cm.get_cmap(cmap)
color = self.kwds.pop("color", None)
if c is not None and color is not None:
raise TypeError('Specify exactly one of `c` and `color`')
elif c is None and color is None:
c_values = self.plt.rcParams['patch.facecolor']
elif color is not None:
c_values = color
elif c_is_column:
c_values = self.data[c].values
else:
c_values = c
if self.legend and hasattr(self, 'label'):
label = self.label
else:
label = None
scatter = ax.scatter(data[x].values, data[y].values, c=c_values,
label=label, cmap=cmap, **self.kwds)
if cb:
cbar_label = c if c_is_column else ''
self._plot_colorbar(ax, label=cbar_label)
if label is not None:
self._add_legend_handle(scatter, label)
else:
self.legend = False
errors_x = self._get_errorbars(label=x, index=0, yerr=False)
errors_y = self._get_errorbars(label=y, index=0, xerr=False)
if len(errors_x) > 0 or len(errors_y) > 0:
err_kwds = dict(errors_x, **errors_y)
err_kwds['ecolor'] = scatter.get_facecolor()[0]
ax.errorbar(data[x].values, data[y].values,
linestyle='none', **err_kwds)
class HexBinPlot(PlanePlot):
_kind = 'hexbin'
def __init__(self, data, x, y, C=None, **kwargs):
super().__init__(data, x, y, **kwargs)
if is_integer(C) and not self.data.columns.holds_integer():
C = self.data.columns[C]
self.C = C
def _make_plot(self):
x, y, data, C = self.x, self.y, self.data, self.C
ax = self.axes[0]
# pandas uses colormap, matplotlib uses cmap.
cmap = self.colormap or 'BuGn'
cmap = self.plt.cm.get_cmap(cmap)
cb = self.kwds.pop('colorbar', True)
if C is None:
c_values = None
else:
c_values = data[C].values
ax.hexbin(data[x].values, data[y].values, C=c_values, cmap=cmap,
**self.kwds)
if cb:
self._plot_colorbar(ax)
def _make_legend(self):
pass
class LinePlot(MPLPlot):
_kind = 'line'
_default_rot = 0
orientation = 'vertical'
def __init__(self, data, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
if self.stacked:
self.data = self.data.fillna(value=0)
self.x_compat = plot_params['x_compat']
if 'x_compat' in self.kwds:
self.x_compat = bool(self.kwds.pop('x_compat'))
def _is_ts_plot(self):
# this is slightly deceptive
return not self.x_compat and self.use_index and self._use_dynamic_x()
def _use_dynamic_x(self):
from pandas.plotting._timeseries import _use_dynamic_x
return _use_dynamic_x(self._get_ax(0), self.data)
def _make_plot(self):
if self._is_ts_plot():
from pandas.plotting._timeseries import _maybe_convert_index
data = _maybe_convert_index(self._get_ax(0), self.data)
x = data.index # dummy, not used
plotf = self._ts_plot
it = self._iter_data(data=data, keep_index=True)
else:
x = self._get_xticks(convert_period=True)
plotf = self._plot
it = self._iter_data()
stacking_id = self._get_stacking_id()
is_errorbar = com._any_not_none(*self.errors.values())
colors = self._get_colors()
for i, (label, y) in enumerate(it):
ax = self._get_ax(i)
kwds = self.kwds.copy()
style, kwds = self._apply_style_colors(colors, kwds, i, label)
errors = self._get_errorbars(label=label, index=i)
kwds = dict(kwds, **errors)
label = pprint_thing(label) # .encode('utf-8')
kwds['label'] = label
newlines = plotf(ax, x, y, style=style, column_num=i,
stacking_id=stacking_id,
is_errorbar=is_errorbar,
**kwds)
self._add_legend_handle(newlines[0], label, index=i)
lines = _get_all_lines(ax)
left, right = _get_xlim(lines)
ax.set_xlim(left, right)
@classmethod
def _plot(cls, ax, x, y, style=None, column_num=None,
stacking_id=None, **kwds):
# column_num is used to get the target column from protf in line and
# area plots
if column_num == 0:
cls._initialize_stacker(ax, stacking_id, len(y))
y_values = cls._get_stacked_values(ax, stacking_id, y, kwds['label'])
lines = MPLPlot._plot(ax, x, y_values, style=style, **kwds)
cls._update_stacker(ax, stacking_id, y)
return lines
@classmethod
def _ts_plot(cls, ax, x, data, style=None, **kwds):
from pandas.plotting._timeseries import (_maybe_resample,
_decorate_axes,
format_dateaxis)
# accept x to be consistent with normal plot func,
# x is not passed to tsplot as it uses data.index as x coordinate
# column_num must be in kwds for stacking purpose
freq, data = _maybe_resample(data, ax, kwds)
# Set ax with freq info
_decorate_axes(ax, freq, kwds)
# digging deeper
if hasattr(ax, 'left_ax'):
_decorate_axes(ax.left_ax, freq, kwds)
if hasattr(ax, 'right_ax'):
_decorate_axes(ax.right_ax, freq, kwds)
ax._plot_data.append((data, cls._kind, kwds))
lines = cls._plot(ax, data.index, data.values, style=style, **kwds)
# set date formatter, locators and rescale limits
format_dateaxis(ax, ax.freq, data.index)
return lines
def _get_stacking_id(self):
if self.stacked:
return id(self.data)
else:
return None
@classmethod
def _initialize_stacker(cls, ax, stacking_id, n):
if stacking_id is None:
return
if not hasattr(ax, '_stacker_pos_prior'):
ax._stacker_pos_prior = {}
if not hasattr(ax, '_stacker_neg_prior'):
ax._stacker_neg_prior = {}
ax._stacker_pos_prior[stacking_id] = np.zeros(n)
ax._stacker_neg_prior[stacking_id] = np.zeros(n)
@classmethod
def _get_stacked_values(cls, ax, stacking_id, values, label):
if stacking_id is None:
return values
if not hasattr(ax, '_stacker_pos_prior'):
# stacker may not be initialized for subplots
cls._initialize_stacker(ax, stacking_id, len(values))
if (values >= 0).all():
return ax._stacker_pos_prior[stacking_id] + values
elif (values <= 0).all():
return ax._stacker_neg_prior[stacking_id] + values
raise ValueError('When stacked is True, each column must be either '
'all positive or negative.'
'{0} contains both positive and negative values'
.format(label))
@classmethod
def _update_stacker(cls, ax, stacking_id, values):
if stacking_id is None:
return
if (values >= 0).all():
ax._stacker_pos_prior[stacking_id] += values
elif (values <= 0).all():
ax._stacker_neg_prior[stacking_id] += values
def _post_plot_logic(self, ax, data):
condition = (not self._use_dynamic_x() and
data.index.is_all_dates and
not self.subplots or
(self.subplots and self.sharex))
index_name = self._get_index_name()
if condition:
# irregular TS rotated 30 deg. by default
# probably a better place to check / set this.
if not self._rot_set:
self.rot = 30
format_date_labels(ax, rot=self.rot)
if index_name is not None and self.use_index:
ax.set_xlabel(index_name)
class AreaPlot(LinePlot):
_kind = 'area'
def __init__(self, data, **kwargs):
kwargs.setdefault('stacked', True)
data = data.fillna(value=0)
LinePlot.__init__(self, data, **kwargs)
if not self.stacked:
# use smaller alpha to distinguish overlap
self.kwds.setdefault('alpha', 0.5)
if self.logy or self.loglog:
raise ValueError("Log-y scales are not supported in area plot")
@classmethod
def _plot(cls, ax, x, y, style=None, column_num=None,
stacking_id=None, is_errorbar=False, **kwds):
if column_num == 0:
cls._initialize_stacker(ax, stacking_id, len(y))
y_values = cls._get_stacked_values(ax, stacking_id, y, kwds['label'])
# need to remove label, because subplots uses mpl legend as it is
line_kwds = kwds.copy()
line_kwds.pop('label')
lines = MPLPlot._plot(ax, x, y_values, style=style, **line_kwds)
# get data from the line to get coordinates for fill_between
xdata, y_values = lines[0].get_data(orig=False)
# unable to use ``_get_stacked_values`` here to get starting point
if stacking_id is None:
start = np.zeros(len(y))
elif (y >= 0).all():
start = ax._stacker_pos_prior[stacking_id]
elif (y <= 0).all():
start = ax._stacker_neg_prior[stacking_id]
else:
start = np.zeros(len(y))
if 'color' not in kwds:
kwds['color'] = lines[0].get_color()
rect = ax.fill_between(xdata, start, y_values, **kwds)
cls._update_stacker(ax, stacking_id, y)
# LinePlot expects list of artists
res = [rect]
return res
def _post_plot_logic(self, ax, data):
LinePlot._post_plot_logic(self, ax, data)
if self.ylim is None:
if (data >= 0).all().all():
ax.set_ylim(0, None)
elif (data <= 0).all().all():
ax.set_ylim(None, 0)
class BarPlot(MPLPlot):
_kind = 'bar'
_default_rot = 90
orientation = 'vertical'
def __init__(self, data, **kwargs):
# we have to treat a series differently than a
# 1-column DataFrame w.r.t. color handling
self._is_series = isinstance(data, ABCSeries)
self.bar_width = kwargs.pop('width', 0.5)
pos = kwargs.pop('position', 0.5)
kwargs.setdefault('align', 'center')
self.tick_pos = np.arange(len(data))
self.bottom = kwargs.pop('bottom', 0)
self.left = kwargs.pop('left', 0)
self.log = kwargs.pop('log', False)
MPLPlot.__init__(self, data, **kwargs)
if self.stacked or self.subplots:
self.tickoffset = self.bar_width * pos
if kwargs['align'] == 'edge':
self.lim_offset = self.bar_width / 2
else:
self.lim_offset = 0
else:
if kwargs['align'] == 'edge':
w = self.bar_width / self.nseries
self.tickoffset = self.bar_width * (pos - 0.5) + w * 0.5
self.lim_offset = w * 0.5
else:
self.tickoffset = self.bar_width * pos
self.lim_offset = 0
self.ax_pos = self.tick_pos - self.tickoffset
def _args_adjust(self):
if is_list_like(self.bottom):
self.bottom = np.array(self.bottom)
if is_list_like(self.left):
self.left = np.array(self.left)
@classmethod
def _plot(cls, ax, x, y, w, start=0, log=False, **kwds):
return ax.bar(x, y, w, bottom=start, log=log, **kwds)
@property
def _start_base(self):
return self.bottom
def _make_plot(self):
import matplotlib as mpl
colors = self._get_colors()
ncolors = len(colors)
pos_prior = neg_prior = np.zeros(len(self.data))
K = self.nseries
for i, (label, y) in enumerate(self._iter_data(fillna=0)):
ax = self._get_ax(i)
kwds = self.kwds.copy()
if self._is_series:
kwds['color'] = colors
else:
kwds['color'] = colors[i % ncolors]
errors = self._get_errorbars(label=label, index=i)
kwds = dict(kwds, **errors)
label = pprint_thing(label)
if (('yerr' in kwds) or ('xerr' in kwds)) \
and (kwds.get('ecolor') is None):
kwds['ecolor'] = mpl.rcParams['xtick.color']
start = 0
if self.log and (y >= 1).all():
start = 1
start = start + self._start_base
if self.subplots:
w = self.bar_width / 2
rect = self._plot(ax, self.ax_pos + w, y, self.bar_width,
start=start, label=label,
log=self.log, **kwds)
ax.set_title(label)
elif self.stacked:
mask = y > 0
start = np.where(mask, pos_prior, neg_prior) + self._start_base
w = self.bar_width / 2
rect = self._plot(ax, self.ax_pos + w, y, self.bar_width,
start=start, label=label,
log=self.log, **kwds)
pos_prior = pos_prior + np.where(mask, y, 0)
neg_prior = neg_prior + np.where(mask, 0, y)
else:
w = self.bar_width / K
rect = self._plot(ax, self.ax_pos + (i + 0.5) * w, y, w,
start=start, label=label,
log=self.log, **kwds)
self._add_legend_handle(rect, label, index=i)
def _post_plot_logic(self, ax, data):
if self.use_index:
str_index = [pprint_thing(key) for key in data.index]
else:
str_index = [pprint_thing(key) for key in range(data.shape[0])]
name = self._get_index_name()
s_edge = self.ax_pos[0] - 0.25 + self.lim_offset
e_edge = self.ax_pos[-1] + 0.25 + self.bar_width + self.lim_offset
self._decorate_ticks(ax, name, str_index, s_edge, e_edge)
def _decorate_ticks(self, ax, name, ticklabels, start_edge, end_edge):
ax.set_xlim((start_edge, end_edge))
ax.set_xticks(self.tick_pos)
ax.set_xticklabels(ticklabels)
if name is not None and self.use_index:
ax.set_xlabel(name)
class BarhPlot(BarPlot):
_kind = 'barh'
_default_rot = 0
orientation = 'horizontal'
@property
def _start_base(self):
return self.left
@classmethod
def _plot(cls, ax, x, y, w, start=0, log=False, **kwds):
return ax.barh(x, y, w, left=start, log=log, **kwds)
def _decorate_ticks(self, ax, name, ticklabels, start_edge, end_edge):
# horizontal bars
ax.set_ylim((start_edge, end_edge))
ax.set_yticks(self.tick_pos)
ax.set_yticklabels(ticklabels)
if name is not None and self.use_index:
ax.set_ylabel(name)
class HistPlot(LinePlot):
_kind = 'hist'
def __init__(self, data, bins=10, bottom=0, **kwargs):
self.bins = bins # use mpl default
self.bottom = bottom
# Do not call LinePlot.__init__ which may fill nan
MPLPlot.__init__(self, data, **kwargs)
def _args_adjust(self):
if is_integer(self.bins):
# create common bin edge
values = (self.data._convert(datetime=True)._get_numeric_data())
values = np.ravel(values)
values = values[~isna(values)]
hist, self.bins = np.histogram(
values, bins=self.bins,
range=self.kwds.get('range', None),
weights=self.kwds.get('weights', None))
if is_list_like(self.bottom):
self.bottom = np.array(self.bottom)
@classmethod
def _plot(cls, ax, y, style=None, bins=None, bottom=0, column_num=0,
stacking_id=None, **kwds):
if column_num == 0:
cls._initialize_stacker(ax, stacking_id, len(bins) - 1)
y = y[~isna(y)]
base = np.zeros(len(bins) - 1)
bottom = bottom + \
cls._get_stacked_values(ax, stacking_id, base, kwds['label'])
# ignore style
n, bins, patches = ax.hist(y, bins=bins, bottom=bottom, **kwds)
cls._update_stacker(ax, stacking_id, n)
return patches
def _make_plot(self):
colors = self._get_colors()
stacking_id = self._get_stacking_id()
for i, (label, y) in enumerate(self._iter_data()):
ax = self._get_ax(i)
kwds = self.kwds.copy()
label = pprint_thing(label)
kwds['label'] = label
style, kwds = self._apply_style_colors(colors, kwds, i, label)
if style is not None:
kwds['style'] = style
kwds = self._make_plot_keywords(kwds, y)
artists = self._plot(ax, y, column_num=i,
stacking_id=stacking_id, **kwds)
self._add_legend_handle(artists[0], label, index=i)
def _make_plot_keywords(self, kwds, y):
"""merge BoxPlot/KdePlot properties to passed kwds"""
# y is required for KdePlot
kwds['bottom'] = self.bottom
kwds['bins'] = self.bins
return kwds
def _post_plot_logic(self, ax, data):
if self.orientation == 'horizontal':
ax.set_xlabel('Frequency')
else:
ax.set_ylabel('Frequency')
@property
def orientation(self):
if self.kwds.get('orientation', None) == 'horizontal':
return 'horizontal'
else:
return 'vertical'
_kde_docstring = """
Generate Kernel Density Estimate plot using Gaussian kernels.
In statistics, `kernel density estimation`_ (KDE) is a non-parametric
way to estimate the probability density function (PDF) of a random
variable. This function uses Gaussian kernels and includes automatic
bandwidth determination.
.. _kernel density estimation:
https://en.wikipedia.org/wiki/Kernel_density_estimation
Parameters
----------
bw_method : str, scalar or callable, optional
The method used to calculate the estimator bandwidth. This can be
'scott', 'silverman', a scalar constant or a callable.
If None (default), 'scott' is used.
See :class:`scipy.stats.gaussian_kde` for more information.
ind : NumPy array or integer, optional
Evaluation points for the estimated PDF. If None (default),
1000 equally spaced points are used. If `ind` is a NumPy array, the
KDE is evaluated at the points passed. If `ind` is an integer,
`ind` number of equally spaced points are used.
**kwds : optional
Additional keyword arguments are documented in
:meth:`pandas.%(this-datatype)s.plot`.
Returns
-------
matplotlib.axes.Axes or numpy.ndarray of them
See Also
--------
scipy.stats.gaussian_kde : Representation of a kernel-density
estimate using Gaussian kernels. This is the function used
internally to estimate the PDF.
%(sibling-datatype)s.plot.kde : Generate a KDE plot for a
%(sibling-datatype)s.
Examples
--------
%(examples)s
"""
class KdePlot(HistPlot):
_kind = 'kde'
orientation = 'vertical'
def __init__(self, data, bw_method=None, ind=None, **kwargs):
MPLPlot.__init__(self, data, **kwargs)
self.bw_method = bw_method
self.ind = ind
def _args_adjust(self):
pass
def _get_ind(self, y):
if self.ind is None:
# np.nanmax() and np.nanmin() ignores the missing values
sample_range = np.nanmax(y) - np.nanmin(y)
ind = np.linspace(np.nanmin(y) - 0.5 * sample_range,
np.nanmax(y) + 0.5 * sample_range, 1000)
elif is_integer(self.ind):
sample_range = np.nanmax(y) - np.nanmin(y)
ind = np.linspace(np.nanmin(y) - 0.5 * sample_range,
np.nanmax(y) + 0.5 * sample_range, self.ind)
else:
ind = self.ind
return ind
@classmethod
def _plot(cls, ax, y, style=None, bw_method=None, ind=None,
column_num=None, stacking_id=None, **kwds):
from scipy.stats import gaussian_kde
y = remove_na_arraylike(y)
gkde = gaussian_kde(y, bw_method=bw_method)
y = gkde.evaluate(ind)
lines = MPLPlot._plot(ax, ind, y, style=style, **kwds)
return lines
def _make_plot_keywords(self, kwds, y):
kwds['bw_method'] = self.bw_method
kwds['ind'] = self._get_ind(y)
return kwds
def _post_plot_logic(self, ax, data):
ax.set_ylabel('Density')
class PiePlot(MPLPlot):
_kind = 'pie'
_layout_type = 'horizontal'
def __init__(self, data, kind=None, **kwargs):
data = data.fillna(value=0)
if (data < 0).any().any():
raise ValueError("{0} doesn't allow negative values".format(kind))
MPLPlot.__init__(self, data, kind=kind, **kwargs)
def _args_adjust(self):
self.grid = False
self.logy = False
self.logx = False
self.loglog = False
def _validate_color_args(self):
pass
def _make_plot(self):
colors = self._get_colors(
num_colors=len(self.data), color_kwds='colors')
self.kwds.setdefault('colors', colors)
for i, (label, y) in enumerate(self._iter_data()):
ax = self._get_ax(i)
if label is not None:
label = pprint_thing(label)
ax.set_ylabel(label)
kwds = self.kwds.copy()
def blank_labeler(label, value):
if value == 0:
return ''
else:
return label
idx = [pprint_thing(v) for v in self.data.index]
labels = kwds.pop('labels', idx)
# labels is used for each wedge's labels
# Blank out labels for values of 0 so they don't overlap
# with nonzero wedges
if labels is not None:
blabels = [blank_labeler(l, value) for
l, value in zip(labels, y)]
else:
blabels = None
results = ax.pie(y, labels=blabels, **kwds)
if kwds.get('autopct', None) is not None:
patches, texts, autotexts = results
else:
patches, texts = results
autotexts = []
if self.fontsize is not None:
for t in texts + autotexts:
t.set_fontsize(self.fontsize)
# leglabels is used for legend labels
leglabels = labels if labels is not None else idx
for p, l in zip(patches, leglabels):
self._add_legend_handle(p, l)
class BoxPlot(LinePlot):
_kind = 'box'
_layout_type = 'horizontal'
_valid_return_types = (None, 'axes', 'dict', 'both')
# namedtuple to hold results
BP = namedtuple("Boxplot", ['ax', 'lines'])
def __init__(self, data, return_type='axes', **kwargs):
# Do not call LinePlot.__init__ which may fill nan
if return_type not in self._valid_return_types:
raise ValueError(
"return_type must be {None, 'axes', 'dict', 'both'}")
self.return_type = return_type
MPLPlot.__init__(self, data, **kwargs)
def _args_adjust(self):
if self.subplots:
# Disable label ax sharing. Otherwise, all subplots shows last
# column label
if self.orientation == 'vertical':
self.sharex = False
else:
self.sharey = False
@classmethod
def _plot(cls, ax, y, column_num=None, return_type='axes', **kwds):
if y.ndim == 2:
y = [remove_na_arraylike(v) for v in y]
# Boxplot fails with empty arrays, so need to add a NaN
# if any cols are empty
# GH 8181
y = [v if v.size > 0 else np.array([np.nan]) for v in y]
else:
y = remove_na_arraylike(y)
bp = ax.boxplot(y, **kwds)
if return_type == 'dict':
return bp, bp
elif return_type == 'both':
return cls.BP(ax=ax, lines=bp), bp
else:
return ax, bp
def _validate_color_args(self):
if 'color' in self.kwds:
if self.colormap is not None:
warnings.warn("'color' and 'colormap' cannot be used "
"simultaneously. Using 'color'")
self.color = self.kwds.pop('color')
if isinstance(self.color, dict):
valid_keys = ['boxes', 'whiskers', 'medians', 'caps']
for key, values in self.color.items():
if key not in valid_keys:
raise ValueError("color dict contains invalid "
"key '{0}' "
"The key must be either {1}"
.format(key, valid_keys))
else:
self.color = None
# get standard colors for default
colors = _get_standard_colors(num_colors=3,
colormap=self.colormap,
color=None)
# use 2 colors by default, for box/whisker and median
# flier colors isn't needed here
# because it can be specified by ``sym`` kw
self._boxes_c = colors[0]
self._whiskers_c = colors[0]
self._medians_c = colors[2]
self._caps_c = 'k' # mpl default
def _get_colors(self, num_colors=None, color_kwds='color'):
pass
def maybe_color_bp(self, bp):
if isinstance(self.color, dict):
boxes = self.color.get('boxes', self._boxes_c)
whiskers = self.color.get('whiskers', self._whiskers_c)
medians = self.color.get('medians', self._medians_c)
caps = self.color.get('caps', self._caps_c)
else:
# Other types are forwarded to matplotlib
# If None, use default colors
boxes = self.color or self._boxes_c
whiskers = self.color or self._whiskers_c
medians = self.color or self._medians_c
caps = self.color or self._caps_c
from matplotlib.artist import setp
setp(bp['boxes'], color=boxes, alpha=1)
setp(bp['whiskers'], color=whiskers, alpha=1)
setp(bp['medians'], color=medians, alpha=1)
setp(bp['caps'], color=caps, alpha=1)
def _make_plot(self):
if self.subplots:
from pandas.core.series import Series
self._return_obj = Series()
for i, (label, y) in enumerate(self._iter_data()):
ax = self._get_ax(i)
kwds = self.kwds.copy()
ret, bp = self._plot(ax, y, column_num=i,
return_type=self.return_type, **kwds)
self.maybe_color_bp(bp)
self._return_obj[label] = ret
label = [pprint_thing(label)]
self._set_ticklabels(ax, label)
else:
y = self.data.values.T
ax = self._get_ax(0)
kwds = self.kwds.copy()
ret, bp = self._plot(ax, y, column_num=0,
return_type=self.return_type, **kwds)
self.maybe_color_bp(bp)
self._return_obj = ret
labels = [l for l, _ in self._iter_data()]
labels = [pprint_thing(l) for l in labels]
if not self.use_index:
labels = [pprint_thing(key) for key in range(len(labels))]
self._set_ticklabels(ax, labels)
def _set_ticklabels(self, ax, labels):
if self.orientation == 'vertical':
ax.set_xticklabels(labels)
else:
ax.set_yticklabels(labels)
def _make_legend(self):
pass
def _post_plot_logic(self, ax, data):
pass
@property
def orientation(self):
if self.kwds.get('vert', True):
return 'vertical'
else:
return 'horizontal'
@property
def result(self):
if self.return_type is None:
return super().result
else:
return self._return_obj
# kinds supported by both dataframe and series
_common_kinds = ['line', 'bar', 'barh',
'kde', 'density', 'area', 'hist', 'box']
# kinds supported by dataframe
_dataframe_kinds = ['scatter', 'hexbin']
# kinds supported only by series or dataframe single column
_series_kinds = ['pie']
_all_kinds = _common_kinds + _dataframe_kinds + _series_kinds
_klasses = [LinePlot, BarPlot, BarhPlot, KdePlot, HistPlot, BoxPlot,
ScatterPlot, HexBinPlot, AreaPlot, PiePlot] \
# type: List[Type[MPLPlot]]
_plot_klass = {klass._kind: klass for klass in _klasses}
def _plot(data, x=None, y=None, subplots=False,
ax=None, kind='line', **kwds):
kind = _get_standard_kind(kind.lower().strip())
if kind in _all_kinds:
klass = _plot_klass[kind]
else:
raise ValueError("%r is not a valid plot kind" % kind)
if kind in _dataframe_kinds:
if isinstance(data, ABCDataFrame):
plot_obj = klass(data, x=x, y=y, subplots=subplots, ax=ax,
kind=kind, **kwds)
else:
raise ValueError("plot kind %r can only be used for data frames"
% kind)
elif kind in _series_kinds:
if isinstance(data, ABCDataFrame):
if y is None and subplots is False:
msg = "{0} requires either y column or 'subplots=True'"
raise ValueError(msg.format(kind))
elif y is not None:
if is_integer(y) and not data.columns.holds_integer():
y = data.columns[y]
# converted to series actually. copy to not modify
data = data[y].copy()
data.index.name = y
plot_obj = klass(data, subplots=subplots, ax=ax, kind=kind, **kwds)
else:
if isinstance(data, ABCDataFrame):
data_cols = data.columns
if x is not None:
if is_integer(x) and not data.columns.holds_integer():
x = data_cols[x]
elif not isinstance(data[x], ABCSeries):
raise ValueError("x must be a label or position")
data = data.set_index(x)
if y is not None:
# check if we have y as int or list of ints
int_ylist = is_list_like(y) and all(is_integer(c) for c in y)
int_y_arg = is_integer(y) or int_ylist
if int_y_arg and not data.columns.holds_integer():
y = data_cols[y]
label_kw = kwds['label'] if 'label' in kwds else False
for kw in ['xerr', 'yerr']:
if (kw in kwds) and \
(isinstance(kwds[kw], str) or
is_integer(kwds[kw])):
try:
kwds[kw] = data[kwds[kw]]
except (IndexError, KeyError, TypeError):
pass
# don't overwrite
data = data[y].copy()
if isinstance(data, ABCSeries):
label_name = label_kw or y
data.name = label_name
else:
match = is_list_like(label_kw) and len(label_kw) == len(y)
if label_kw and not match:
raise ValueError(
"label should be list-like and same length as y"
)
label_name = label_kw or data.columns
data.columns = label_name
plot_obj = klass(data, subplots=subplots, ax=ax, kind=kind, **kwds)
plot_obj.generate()
plot_obj.draw()
return plot_obj.result
df_kind = """- 'scatter' : scatter plot
- 'hexbin' : hexbin plot"""
series_kind = ""
df_coord = """x : label or position, default None
y : label, position or list of label, positions, default None
Allows plotting of one column versus another"""
series_coord = ""
df_unique = """stacked : bool, default False in line and
bar plots, and True in area plot. If True, create stacked plot.
sort_columns : bool, default False
Sort column names to determine plot ordering
secondary_y : bool or sequence, default False
Whether to plot on the secondary y-axis
If a list/tuple, which columns to plot on secondary y-axis"""
series_unique = """label : label argument to provide to plot
secondary_y : bool or sequence of ints, default False
If True then y-axis will be on the right"""
df_ax = """ax : matplotlib axes object, default None
subplots : bool, default False
Make separate subplots for each column
sharex : bool, default True if ax is None else False
In case subplots=True, share x axis and set some x axis labels to
invisible; defaults to True if ax is None otherwise False if an ax
is passed in; Be aware, that passing in both an ax and sharex=True
will alter all x axis labels for all axis in a figure!
sharey : bool, default False
In case subplots=True, share y axis and set some y axis labels to
invisible
layout : tuple (optional)
(rows, columns) for the layout of subplots"""
series_ax = """ax : matplotlib axes object
If not passed, uses gca()"""
df_note = """- If `kind` = 'scatter' and the argument `c` is the name of a dataframe
column, the values of that column are used to color each point.
- If `kind` = 'hexbin', you can control the size of the bins with the
`gridsize` argument. By default, a histogram of the counts around each
`(x, y)` point is computed. You can specify alternative aggregations
by passing values to the `C` and `reduce_C_function` arguments.
`C` specifies the value at each `(x, y)` point and `reduce_C_function`
is a function of one argument that reduces all the values in a bin to
a single number (e.g. `mean`, `max`, `sum`, `std`)."""
series_note = ""
_shared_doc_df_kwargs = dict(klass='DataFrame', klass_obj='df',
klass_kind=df_kind, klass_coord=df_coord,
klass_ax=df_ax, klass_unique=df_unique,
klass_note=df_note)
_shared_doc_series_kwargs = dict(klass='Series', klass_obj='s',
klass_kind=series_kind,
klass_coord=series_coord, klass_ax=series_ax,
klass_unique=series_unique,
klass_note=series_note)
_shared_docs['plot'] = """
Make plots of %(klass)s using matplotlib / pylab.
*New in version 0.17.0:* Each plot kind has a corresponding method on the
``%(klass)s.plot`` accessor:
``%(klass_obj)s.plot(kind='line')`` is equivalent to
``%(klass_obj)s.plot.line()``.
Parameters
----------
data : %(klass)s
%(klass_coord)s
kind : str
- 'line' : line plot (default)
- 'bar' : vertical bar plot
- 'barh' : horizontal bar plot
- 'hist' : histogram
- 'box' : boxplot
- 'kde' : Kernel Density Estimation plot
- 'density' : same as 'kde'
- 'area' : area plot
- 'pie' : pie plot
%(klass_kind)s
%(klass_ax)s
figsize : a tuple (width, height) in inches
use_index : bool, default True
Use index as ticks for x axis
title : string or list
Title to use for the plot. If a string is passed, print the string at
the top of the figure. If a list is passed and `subplots` is True,
print each item in the list above the corresponding subplot.
grid : bool, default None (matlab style default)
Axis grid lines
legend : False/True/'reverse'
Place legend on axis subplots
style : list or dict
matplotlib line style per column
logx : bool or 'sym', default False
Use log scaling or symlog scaling on x axis
.. versionchanged:: 0.25.0
logy : bool or 'sym' default False
Use log scaling or symlog scaling on y axis
.. versionchanged:: 0.25.0
loglog : bool or 'sym', default False
Use log scaling or symlog scaling on both x and y axes
.. versionchanged:: 0.25.0
xticks : sequence
Values to use for the xticks
yticks : sequence
Values to use for the yticks
xlim : 2-tuple/list
ylim : 2-tuple/list
rot : int, default None
Rotation for ticks (xticks for vertical, yticks for horizontal plots)
fontsize : int, default None
Font size for xticks and yticks
colormap : str or matplotlib colormap object, default None
Colormap to select colors from. If string, load colormap with that name
from matplotlib.
colorbar : bool, optional
If True, plot colorbar (only relevant for 'scatter' and 'hexbin' plots)
position : float
Specify relative alignments for bar plot layout.
From 0 (left/bottom-end) to 1 (right/top-end). Default is 0.5 (center)
table : bool, Series or DataFrame, default False
If True, draw a table using the data in the DataFrame and the data will
be transposed to meet matplotlib's default layout.
If a Series or DataFrame is passed, use passed data to draw a table.
yerr : DataFrame, Series, array-like, dict and str
See :ref:`Plotting with Error Bars <visualization.errorbars>` for
detail.
xerr : same types as yerr.
%(klass_unique)s
mark_right : bool, default True
When using a secondary_y axis, automatically mark the column
labels with "(right)" in the legend
`**kwds` : keywords
Options to pass to matplotlib plotting method
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
Notes
-----
- See matplotlib documentation online for more on this subject
- If `kind` = 'bar' or 'barh', you can specify relative alignments
for bar plot layout by `position` keyword.
From 0 (left/bottom-end) to 1 (right/top-end). Default is 0.5 (center)
%(klass_note)s
"""
@Appender(_shared_docs['plot'] % _shared_doc_df_kwargs)
def plot_frame(data, x=None, y=None, kind='line', ax=None,
subplots=False, sharex=None, sharey=False, layout=None,
figsize=None, use_index=True, title=None, grid=None,
legend=True, style=None, logx=False, logy=False, loglog=False,
xticks=None, yticks=None, xlim=None, ylim=None,
rot=None, fontsize=None, colormap=None, table=False,
yerr=None, xerr=None,
secondary_y=False, sort_columns=False,
**kwds):
return _plot(data, kind=kind, x=x, y=y, ax=ax,
subplots=subplots, sharex=sharex, sharey=sharey,
layout=layout, figsize=figsize, use_index=use_index,
title=title, grid=grid, legend=legend,
style=style, logx=logx, logy=logy, loglog=loglog,
xticks=xticks, yticks=yticks, xlim=xlim, ylim=ylim,
rot=rot, fontsize=fontsize, colormap=colormap, table=table,
yerr=yerr, xerr=xerr,
secondary_y=secondary_y, sort_columns=sort_columns,
**kwds)
@Appender(_shared_docs['plot'] % _shared_doc_series_kwargs)
def plot_series(data, kind='line', ax=None, # Series unique
figsize=None, use_index=True, title=None, grid=None,
legend=False, style=None, logx=False, logy=False, loglog=False,
xticks=None, yticks=None, xlim=None, ylim=None,
rot=None, fontsize=None, colormap=None, table=False,
yerr=None, xerr=None,
label=None, secondary_y=False, # Series unique
**kwds):
import matplotlib.pyplot as plt
if ax is None and len(plt.get_fignums()) > 0:
ax = _gca()
ax = MPLPlot._get_ax_layer(ax)
return _plot(data, kind=kind, ax=ax,
figsize=figsize, use_index=use_index, title=title,
grid=grid, legend=legend,
style=style, logx=logx, logy=logy, loglog=loglog,
xticks=xticks, yticks=yticks, xlim=xlim, ylim=ylim,
rot=rot, fontsize=fontsize, colormap=colormap, table=table,
yerr=yerr, xerr=xerr,
label=label, secondary_y=secondary_y,
**kwds)
_shared_docs['boxplot'] = """
Make a box plot from DataFrame columns.
Make a box-and-whisker plot from DataFrame columns, optionally grouped
by some other columns. A box plot is a method for graphically depicting
groups of numerical data through their quartiles.
The box extends from the Q1 to Q3 quartile values of the data,
with a line at the median (Q2). The whiskers extend from the edges
of box to show the range of the data. The position of the whiskers
is set by default to `1.5 * IQR (IQR = Q3 - Q1)` from the edges of the box.
Outlier points are those past the end of the whiskers.
For further details see
Wikipedia's entry for `boxplot <https://en.wikipedia.org/wiki/Box_plot>`_.
Parameters
----------
column : str or list of str, optional
Column name or list of names, or vector.
Can be any valid input to :meth:`pandas.DataFrame.groupby`.
by : str or array-like, optional
Column in the DataFrame to :meth:`pandas.DataFrame.groupby`.
One box-plot will be done per value of columns in `by`.
ax : object of class matplotlib.axes.Axes, optional
The matplotlib axes to be used by boxplot.
fontsize : float or str
Tick label font size in points or as a string (e.g., `large`).
rot : int or float, default 0
The rotation angle of labels (in degrees)
with respect to the screen coordinate system.
grid : bool, default True
Setting this to True will show the grid.
figsize : A tuple (width, height) in inches
The size of the figure to create in matplotlib.
layout : tuple (rows, columns), optional
For example, (3, 5) will display the subplots
using 3 columns and 5 rows, starting from the top-left.
return_type : {'axes', 'dict', 'both'} or None, default 'axes'
The kind of object to return. The default is ``axes``.
* 'axes' returns the matplotlib axes the boxplot is drawn on.
* 'dict' returns a dictionary whose values are the matplotlib
Lines of the boxplot.
* 'both' returns a namedtuple with the axes and dict.
* when grouping with ``by``, a Series mapping columns to
``return_type`` is returned.
If ``return_type`` is `None`, a NumPy array
of axes with the same shape as ``layout`` is returned.
**kwds
All other plotting keyword arguments to be passed to
:func:`matplotlib.pyplot.boxplot`.
Returns
-------
result
See Notes.
See Also
--------
Series.plot.hist: Make a histogram.
matplotlib.pyplot.boxplot : Matplotlib equivalent plot.
Notes
-----
The return type depends on the `return_type` parameter:
* 'axes' : object of class matplotlib.axes.Axes
* 'dict' : dict of matplotlib.lines.Line2D objects
* 'both' : a namedtuple with structure (ax, lines)
For data grouped with ``by``, return a Series of the above or a numpy
array:
* :class:`~pandas.Series`
* :class:`~numpy.array` (for ``return_type = None``)
Use ``return_type='dict'`` when you want to tweak the appearance
of the lines after plotting. In this case a dict containing the Lines
making up the boxes, caps, fliers, medians, and whiskers is returned.
Examples
--------
Boxplots can be created for every column in the dataframe
by ``df.boxplot()`` or indicating the columns to be used:
.. plot::
:context: close-figs
>>> np.random.seed(1234)
>>> df = pd.DataFrame(np.random.randn(10,4),
... columns=['Col1', 'Col2', 'Col3', 'Col4'])
>>> boxplot = df.boxplot(column=['Col1', 'Col2', 'Col3'])
Boxplots of variables distributions grouped by the values of a third
variable can be created using the option ``by``. For instance:
.. plot::
:context: close-figs
>>> df = pd.DataFrame(np.random.randn(10, 2),
... columns=['Col1', 'Col2'])
>>> df['X'] = pd.Series(['A', 'A', 'A', 'A', 'A',
... 'B', 'B', 'B', 'B', 'B'])
>>> boxplot = df.boxplot(by='X')
A list of strings (i.e. ``['X', 'Y']``) can be passed to boxplot
in order to group the data by combination of the variables in the x-axis:
.. plot::
:context: close-figs
>>> df = pd.DataFrame(np.random.randn(10,3),
... columns=['Col1', 'Col2', 'Col3'])
>>> df['X'] = pd.Series(['A', 'A', 'A', 'A', 'A',
... 'B', 'B', 'B', 'B', 'B'])
>>> df['Y'] = pd.Series(['A', 'B', 'A', 'B', 'A',
... 'B', 'A', 'B', 'A', 'B'])
>>> boxplot = df.boxplot(column=['Col1', 'Col2'], by=['X', 'Y'])
The layout of boxplot can be adjusted giving a tuple to ``layout``:
.. plot::
:context: close-figs
>>> boxplot = df.boxplot(column=['Col1', 'Col2'], by='X',
... layout=(2, 1))
Additional formatting can be done to the boxplot, like suppressing the grid
(``grid=False``), rotating the labels in the x-axis (i.e. ``rot=45``)
or changing the fontsize (i.e. ``fontsize=15``):
.. plot::
:context: close-figs
>>> boxplot = df.boxplot(grid=False, rot=45, fontsize=15)
The parameter ``return_type`` can be used to select the type of element
returned by `boxplot`. When ``return_type='axes'`` is selected,
the matplotlib axes on which the boxplot is drawn are returned:
>>> boxplot = df.boxplot(column=['Col1','Col2'], return_type='axes')
>>> type(boxplot)
<class 'matplotlib.axes._subplots.AxesSubplot'>
When grouping with ``by``, a Series mapping columns to ``return_type``
is returned:
>>> boxplot = df.boxplot(column=['Col1', 'Col2'], by='X',
... return_type='axes')
>>> type(boxplot)
<class 'pandas.core.series.Series'>
If ``return_type`` is `None`, a NumPy array of axes with the same shape
as ``layout`` is returned:
>>> boxplot = df.boxplot(column=['Col1', 'Col2'], by='X',
... return_type=None)
>>> type(boxplot)
<class 'numpy.ndarray'>
"""
@Appender(_shared_docs['boxplot'] % _shared_doc_kwargs)
def boxplot(data, column=None, by=None, ax=None, fontsize=None,
rot=0, grid=True, figsize=None, layout=None, return_type=None,
**kwds):
# validate return_type:
if return_type not in BoxPlot._valid_return_types:
raise ValueError("return_type must be {'axes', 'dict', 'both'}")
if isinstance(data, ABCSeries):
data = data.to_frame('x')
column = 'x'
def _get_colors():
# num_colors=3 is required as method maybe_color_bp takes the colors
# in positions 0 and 2.
return _get_standard_colors(color=kwds.get('color'), num_colors=3)
def maybe_color_bp(bp):
if 'color' not in kwds:
from matplotlib.artist import setp
setp(bp['boxes'], color=colors[0], alpha=1)
setp(bp['whiskers'], color=colors[0], alpha=1)
setp(bp['medians'], color=colors[2], alpha=1)
def plot_group(keys, values, ax):
keys = [pprint_thing(x) for x in keys]
values = [np.asarray(remove_na_arraylike(v)) for v in values]
bp = ax.boxplot(values, **kwds)
if fontsize is not None:
ax.tick_params(axis='both', labelsize=fontsize)
if kwds.get('vert', 1):
ax.set_xticklabels(keys, rotation=rot)
else:
ax.set_yticklabels(keys, rotation=rot)
maybe_color_bp(bp)
# Return axes in multiplot case, maybe revisit later # 985
if return_type == 'dict':
return bp
elif return_type == 'both':
return BoxPlot.BP(ax=ax, lines=bp)
else:
return ax
colors = _get_colors()
if column is None:
columns = None
else:
if isinstance(column, (list, tuple)):
columns = column
else:
columns = [column]
if by is not None:
# Prefer array return type for 2-D plots to match the subplot layout
# https://github.com/pandas-dev/pandas/pull/12216#issuecomment-241175580
result = _grouped_plot_by_column(plot_group, data, columns=columns,
by=by, grid=grid, figsize=figsize,
ax=ax, layout=layout,
return_type=return_type)
else:
if return_type is None:
return_type = 'axes'
if layout is not None:
raise ValueError("The 'layout' keyword is not supported when "
"'by' is None")
if ax is None:
rc = {'figure.figsize': figsize} if figsize is not None else {}
ax = _gca(rc)
data = data._get_numeric_data()
if columns is None:
columns = data.columns
else:
data = data[columns]
result = plot_group(columns, data.values.T, ax)
ax.grid(grid)
return result
@Appender(_shared_docs['boxplot'] % _shared_doc_kwargs)
def boxplot_frame(self, column=None, by=None, ax=None, fontsize=None, rot=0,
grid=True, figsize=None, layout=None,
return_type=None, **kwds):
import matplotlib.pyplot as plt
_converter._WARN = False
ax = boxplot(self, column=column, by=by, ax=ax, fontsize=fontsize,
grid=grid, rot=rot, figsize=figsize, layout=layout,
return_type=return_type, **kwds)
plt.draw_if_interactive()
return ax
def scatter_plot(data, x, y, by=None, ax=None, figsize=None, grid=False,
**kwargs):
"""
Make a scatter plot from two DataFrame columns
Parameters
----------
data : DataFrame
x : Column name for the x-axis values
y : Column name for the y-axis values
ax : Matplotlib axis object
figsize : A tuple (width, height) in inches
grid : Setting this to True will show the grid
kwargs : other plotting keyword arguments
To be passed to scatter function
Returns
-------
matplotlib.Figure
"""
import matplotlib.pyplot as plt
kwargs.setdefault('edgecolors', 'none')
def plot_group(group, ax):
xvals = group[x].values
yvals = group[y].values
ax.scatter(xvals, yvals, **kwargs)
ax.grid(grid)
if by is not None:
fig = _grouped_plot(plot_group, data, by=by, figsize=figsize, ax=ax)
else:
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
else:
fig = ax.get_figure()
plot_group(data, ax)
ax.set_ylabel(pprint_thing(y))
ax.set_xlabel(pprint_thing(x))
ax.grid(grid)
return fig
def hist_frame(data, column=None, by=None, grid=True, xlabelsize=None,
xrot=None, ylabelsize=None, yrot=None, ax=None, sharex=False,
sharey=False, figsize=None, layout=None, bins=10, **kwds):
"""
Make a histogram of the DataFrame's.
A `histogram`_ is a representation of the distribution of data.
This function calls :meth:`matplotlib.pyplot.hist`, on each series in
the DataFrame, resulting in one histogram per column.
.. _histogram: https://en.wikipedia.org/wiki/Histogram
Parameters
----------
data : DataFrame
The pandas object holding the data.
column : string or sequence
If passed, will be used to limit data to a subset of columns.
by : object, optional
If passed, then used to form histograms for separate groups.
grid : bool, default True
Whether to show axis grid lines.
xlabelsize : int, default None
If specified changes the x-axis label size.
xrot : float, default None
Rotation of x axis labels. For example, a value of 90 displays the
x labels rotated 90 degrees clockwise.
ylabelsize : int, default None
If specified changes the y-axis label size.
yrot : float, default None
Rotation of y axis labels. For example, a value of 90 displays the
y labels rotated 90 degrees clockwise.
ax : Matplotlib axes object, default None
The axes to plot the histogram on.
sharex : bool, default True if ax is None else False
In case subplots=True, share x axis and set some x axis labels to
invisible; defaults to True if ax is None otherwise False if an ax
is passed in.
Note that passing in both an ax and sharex=True will alter all x axis
labels for all subplots in a figure.
sharey : bool, default False
In case subplots=True, share y axis and set some y axis labels to
invisible.
figsize : tuple
The size in inches of the figure to create. Uses the value in
`matplotlib.rcParams` by default.
layout : tuple, optional
Tuple of (rows, columns) for the layout of the histograms.
bins : integer or sequence, default 10
Number of histogram bins to be used. If an integer is given, bins + 1
bin edges are calculated and returned. If bins is a sequence, gives
bin edges, including left edge of first bin and right edge of last
bin. In this case, bins is returned unmodified.
**kwds
All other plotting keyword arguments to be passed to
:meth:`matplotlib.pyplot.hist`.
Returns
-------
matplotlib.AxesSubplot or numpy.ndarray of them
See Also
--------
matplotlib.pyplot.hist : Plot a histogram using matplotlib.
Examples
--------
.. plot::
:context: close-figs
This example draws a histogram based on the length and width of
some animals, displayed in three bins
>>> df = pd.DataFrame({
... 'length': [1.5, 0.5, 1.2, 0.9, 3],
... 'width': [0.7, 0.2, 0.15, 0.2, 1.1]
... }, index= ['pig', 'rabbit', 'duck', 'chicken', 'horse'])
>>> hist = df.hist(bins=3)
"""
_raise_if_no_mpl()
_converter._WARN = False
if by is not None:
axes = grouped_hist(data, column=column, by=by, ax=ax, grid=grid,
figsize=figsize, sharex=sharex, sharey=sharey,
layout=layout, bins=bins, xlabelsize=xlabelsize,
xrot=xrot, ylabelsize=ylabelsize,
yrot=yrot, **kwds)
return axes
if column is not None:
if not isinstance(column, (list, np.ndarray, ABCIndexClass)):
column = [column]
data = data[column]
data = data._get_numeric_data()
naxes = len(data.columns)
fig, axes = _subplots(naxes=naxes, ax=ax, squeeze=False,
sharex=sharex, sharey=sharey, figsize=figsize,
layout=layout)
_axes = _flatten(axes)
for i, col in enumerate(com.try_sort(data.columns)):
ax = _axes[i]
ax.hist(data[col].dropna().values, bins=bins, **kwds)
ax.set_title(col)
ax.grid(grid)
_set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot)
fig.subplots_adjust(wspace=0.3, hspace=0.3)
return axes
def hist_series(self, by=None, ax=None, grid=True, xlabelsize=None,
xrot=None, ylabelsize=None, yrot=None, figsize=None,
bins=10, **kwds):
"""
Draw histogram of the input series using matplotlib.
Parameters
----------
by : object, optional
If passed, then used to form histograms for separate groups
ax : matplotlib axis object
If not passed, uses gca()
grid : bool, default True
Whether to show axis grid lines
xlabelsize : int, default None
If specified changes the x-axis label size
xrot : float, default None
rotation of x axis labels
ylabelsize : int, default None
If specified changes the y-axis label size
yrot : float, default None
rotation of y axis labels
figsize : tuple, default None
figure size in inches by default
bins : integer or sequence, default 10
Number of histogram bins to be used. If an integer is given, bins + 1
bin edges are calculated and returned. If bins is a sequence, gives
bin edges, including left edge of first bin and right edge of last
bin. In this case, bins is returned unmodified.
bins : integer, default 10
Number of histogram bins to be used
`**kwds` : keywords
To be passed to the actual plotting function
See Also
--------
matplotlib.axes.Axes.hist : Plot a histogram using matplotlib.
"""
import matplotlib.pyplot as plt
if by is None:
if kwds.get('layout', None) is not None:
raise ValueError("The 'layout' keyword is not supported when "
"'by' is None")
# hack until the plotting interface is a bit more unified
fig = kwds.pop('figure', plt.gcf() if plt.get_fignums() else
plt.figure(figsize=figsize))
if (figsize is not None and tuple(figsize) !=
tuple(fig.get_size_inches())):
fig.set_size_inches(*figsize, forward=True)
if ax is None:
ax = fig.gca()
elif ax.get_figure() != fig:
raise AssertionError('passed axis not bound to passed figure')
values = self.dropna().values
ax.hist(values, bins=bins, **kwds)
ax.grid(grid)
axes = np.array([ax])
_set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot)
else:
if 'figure' in kwds:
raise ValueError("Cannot pass 'figure' when using the "
"'by' argument, since a new 'Figure' instance "
"will be created")
axes = grouped_hist(self, by=by, ax=ax, grid=grid, figsize=figsize,
bins=bins, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot, **kwds)
if hasattr(axes, 'ndim'):
if axes.ndim == 1 and len(axes) == 1:
return axes[0]
return axes
def grouped_hist(data, column=None, by=None, ax=None, bins=50, figsize=None,
layout=None, sharex=False, sharey=False, rot=90, grid=True,
xlabelsize=None, xrot=None, ylabelsize=None, yrot=None,
**kwargs):
"""
Grouped histogram
Parameters
----------
data : Series/DataFrame
column : object, optional
by : object, optional
ax : axes, optional
bins : int, default 50
figsize : tuple, optional
layout : optional
sharex : bool, default False
sharey : bool, default False
rot : int, default 90
grid : bool, default True
kwargs : dict, keyword arguments passed to matplotlib.Axes.hist
Returns
-------
collection of Matplotlib Axes
"""
_raise_if_no_mpl()
_converter._WARN = False
def plot_group(group, ax):
ax.hist(group.dropna().values, bins=bins, **kwargs)
xrot = xrot or rot
fig, axes = _grouped_plot(plot_group, data, column=column,
by=by, sharex=sharex, sharey=sharey, ax=ax,
figsize=figsize, layout=layout, rot=rot)
_set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot,
ylabelsize=ylabelsize, yrot=yrot)
fig.subplots_adjust(bottom=0.15, top=0.9, left=0.1, right=0.9,
hspace=0.5, wspace=0.3)
return axes
def boxplot_frame_groupby(grouped, subplots=True, column=None, fontsize=None,
rot=0, grid=True, ax=None, figsize=None,
layout=None, sharex=False, sharey=True, **kwds):
"""
Make box plots from DataFrameGroupBy data.
Parameters
----------
grouped : Grouped DataFrame
subplots : bool
* ``False`` - no subplots will be used
* ``True`` - create a subplot for each group
column : column name or list of names, or vector
Can be any valid input to groupby
fontsize : int or string
rot : label rotation angle
grid : Setting this to True will show the grid
ax : Matplotlib axis object, default None
figsize : A tuple (width, height) in inches
layout : tuple (optional)
(rows, columns) for the layout of the plot
sharex : bool, default False
Whether x-axes will be shared among subplots
.. versionadded:: 0.23.1
sharey : bool, default True
Whether y-axes will be shared among subplots
.. versionadded:: 0.23.1
`**kwds` : Keyword Arguments
All other plotting keyword arguments to be passed to
matplotlib's boxplot function
Returns
-------
dict of key/value = group key/DataFrame.boxplot return value
or DataFrame.boxplot return value in case subplots=figures=False
Examples
--------
>>> import itertools
>>> tuples = [t for t in itertools.product(range(1000), range(4))]
>>> index = pd.MultiIndex.from_tuples(tuples, names=['lvl0', 'lvl1'])
>>> data = np.random.randn(len(index),4)
>>> df = pd.DataFrame(data, columns=list('ABCD'), index=index)
>>>
>>> grouped = df.groupby(level='lvl1')
>>> boxplot_frame_groupby(grouped)
>>>
>>> grouped = df.unstack(level='lvl1').groupby(level=0, axis=1)
>>> boxplot_frame_groupby(grouped, subplots=False)
"""
_raise_if_no_mpl()
_converter._WARN = False
if subplots is True:
naxes = len(grouped)
fig, axes = _subplots(naxes=naxes, squeeze=False,
ax=ax, sharex=sharex, sharey=sharey,
figsize=figsize, layout=layout)
axes = _flatten(axes)
from pandas.core.series import Series
ret = Series()
for (key, group), ax in zip(grouped, axes):
d = group.boxplot(ax=ax, column=column, fontsize=fontsize,
rot=rot, grid=grid, **kwds)
ax.set_title(pprint_thing(key))
ret.loc[key] = d
fig.subplots_adjust(bottom=0.15, top=0.9, left=0.1,
right=0.9, wspace=0.2)
else:
from pandas.core.reshape.concat import concat
keys, frames = zip(*grouped)
if grouped.axis == 0:
df = concat(frames, keys=keys, axis=1)
else:
if len(frames) > 1:
df = frames[0].join(frames[1::])
else:
df = frames[0]
ret = df.boxplot(column=column, fontsize=fontsize, rot=rot,
grid=grid, ax=ax, figsize=figsize,
layout=layout, **kwds)
return ret
def _grouped_plot(plotf, data, column=None, by=None, numeric_only=True,
figsize=None, sharex=True, sharey=True, layout=None,
rot=0, ax=None, **kwargs):
if figsize == 'default':
# allowed to specify mpl default with 'default'
warnings.warn("figsize='default' is deprecated. Specify figure"
"size by tuple instead", FutureWarning, stacklevel=4)
figsize = None
grouped = data.groupby(by)
if column is not None:
grouped = grouped[column]
naxes = len(grouped)
fig, axes = _subplots(naxes=naxes, figsize=figsize,
sharex=sharex, sharey=sharey, ax=ax,
layout=layout)
_axes = _flatten(axes)
for i, (key, group) in enumerate(grouped):
ax = _axes[i]
if numeric_only and isinstance(group, ABCDataFrame):
group = group._get_numeric_data()
plotf(group, ax, **kwargs)
ax.set_title(pprint_thing(key))
return fig, axes
def _grouped_plot_by_column(plotf, data, columns=None, by=None,
numeric_only=True, grid=False,
figsize=None, ax=None, layout=None,
return_type=None, **kwargs):
grouped = data.groupby(by)
if columns is None:
if not isinstance(by, (list, tuple)):
by = [by]
columns = data._get_numeric_data().columns.difference(by)
naxes = len(columns)
fig, axes = _subplots(naxes=naxes, sharex=True, sharey=True,
figsize=figsize, ax=ax, layout=layout)
_axes = _flatten(axes)
ax_values = []
for i, col in enumerate(columns):
ax = _axes[i]
gp_col = grouped[col]
keys, values = zip(*gp_col)
re_plotf = plotf(keys, values, ax, **kwargs)
ax.set_title(col)
ax.set_xlabel(pprint_thing(by))
ax_values.append(re_plotf)
ax.grid(grid)
from pandas.core.series import Series
result = Series(ax_values, index=columns)
# Return axes in multiplot case, maybe revisit later # 985
if return_type is None:
result = axes
byline = by[0] if len(by) == 1 else by
fig.suptitle('Boxplot grouped by {byline}'.format(byline=byline))
fig.subplots_adjust(bottom=0.15, top=0.9, left=0.1, right=0.9, wspace=0.2)
return result
class BasePlotMethods(PandasObject):
def __init__(self, data):
self._parent = data # can be Series or DataFrame
def __call__(self, *args, **kwargs):
raise NotImplementedError
class SeriesPlotMethods(BasePlotMethods):
"""
Series plotting accessor and method.
Examples
--------
>>> s.plot.line()
>>> s.plot.bar()
>>> s.plot.hist()
Plotting methods can also be accessed by calling the accessor as a method
with the ``kind`` argument:
``s.plot(kind='line')`` is equivalent to ``s.plot.line()``
"""
def __call__(self, kind='line', ax=None,
figsize=None, use_index=True, title=None, grid=None,
legend=False, style=None, logx=False, logy=False,
loglog=False, xticks=None, yticks=None,
xlim=None, ylim=None,
rot=None, fontsize=None, colormap=None, table=False,
yerr=None, xerr=None,
label=None, secondary_y=False, **kwds):
return plot_series(self._parent, kind=kind, ax=ax, figsize=figsize,
use_index=use_index, title=title, grid=grid,
legend=legend, style=style, logx=logx, logy=logy,
loglog=loglog, xticks=xticks, yticks=yticks,
xlim=xlim, ylim=ylim, rot=rot, fontsize=fontsize,
colormap=colormap, table=table, yerr=yerr,
xerr=xerr, label=label, secondary_y=secondary_y,
**kwds)
__call__.__doc__ = plot_series.__doc__
def line(self, **kwds):
"""
Line plot.
Parameters
----------
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
Examples
--------
.. plot::
:context: close-figs
>>> s = pd.Series([1, 3, 2])
>>> s.plot.line()
"""
return self(kind='line', **kwds)
def bar(self, **kwds):
"""
Vertical bar plot.
Parameters
----------
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
"""
return self(kind='bar', **kwds)
def barh(self, **kwds):
"""
Horizontal bar plot.
Parameters
----------
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
"""
return self(kind='barh', **kwds)
def box(self, **kwds):
"""
Boxplot.
Parameters
----------
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
"""
return self(kind='box', **kwds)
def hist(self, bins=10, **kwds):
"""
Histogram.
Parameters
----------
bins : integer, default 10
Number of histogram bins to be used
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
"""
return self(kind='hist', bins=bins, **kwds)
@Appender(_kde_docstring % {
'this-datatype': 'Series',
'sibling-datatype': 'DataFrame',
'examples': """
Given a Series of points randomly sampled from an unknown
distribution, estimate its PDF using KDE with automatic
bandwidth determination and plot the results, evaluating them at
1000 equally spaced points (default):
.. plot::
:context: close-figs
>>> s = pd.Series([1, 2, 2.5, 3, 3.5, 4, 5])
>>> ax = s.plot.kde()
A scalar bandwidth can be specified. Using a small bandwidth value can
lead to over-fitting, while using a large bandwidth value may result
in under-fitting:
.. plot::
:context: close-figs
>>> ax = s.plot.kde(bw_method=0.3)
.. plot::
:context: close-figs
>>> ax = s.plot.kde(bw_method=3)
Finally, the `ind` parameter determines the evaluation points for the
plot of the estimated PDF:
.. plot::
:context: close-figs
>>> ax = s.plot.kde(ind=[1, 2, 3, 4, 5])
""".strip()
})
def kde(self, bw_method=None, ind=None, **kwds):
return self(kind='kde', bw_method=bw_method, ind=ind, **kwds)
density = kde
def area(self, **kwds):
"""
Area plot.
Parameters
----------
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
"""
return self(kind='area', **kwds)
def pie(self, **kwds):
"""
Pie chart.
Parameters
----------
`**kwds` : optional
Additional keyword arguments are documented in
:meth:`pandas.Series.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
"""
return self(kind='pie', **kwds)
class FramePlotMethods(BasePlotMethods):
"""DataFrame plotting accessor and method
Examples
--------
>>> df.plot.line()
>>> df.plot.scatter('x', 'y')
>>> df.plot.hexbin()
These plotting methods can also be accessed by calling the accessor as a
method with the ``kind`` argument:
``df.plot(kind='line')`` is equivalent to ``df.plot.line()``
"""
def __call__(self, x=None, y=None, kind='line', ax=None,
subplots=False, sharex=None, sharey=False, layout=None,
figsize=None, use_index=True, title=None, grid=None,
legend=True, style=None, logx=False, logy=False, loglog=False,
xticks=None, yticks=None, xlim=None, ylim=None,
rot=None, fontsize=None, colormap=None, table=False,
yerr=None, xerr=None,
secondary_y=False, sort_columns=False, **kwds):
return plot_frame(self._parent, kind=kind, x=x, y=y, ax=ax,
subplots=subplots, sharex=sharex, sharey=sharey,
layout=layout, figsize=figsize, use_index=use_index,
title=title, grid=grid, legend=legend, style=style,
logx=logx, logy=logy, loglog=loglog, xticks=xticks,
yticks=yticks, xlim=xlim, ylim=ylim, rot=rot,
fontsize=fontsize, colormap=colormap, table=table,
yerr=yerr, xerr=xerr, secondary_y=secondary_y,
sort_columns=sort_columns, **kwds)
__call__.__doc__ = plot_frame.__doc__
def line(self, x=None, y=None, **kwds):
"""
Plot DataFrame columns as lines.
This function is useful to plot lines using DataFrame's values
as coordinates.
Parameters
----------
x : int or str, optional
Columns to use for the horizontal axis.
Either the location or the label of the columns to be used.
By default, it will use the DataFrame indices.
y : int, str, or list of them, optional
The values to be plotted.
Either the location or the label of the columns to be used.
By default, it will use the remaining DataFrame numeric columns.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or :class:`numpy.ndarray`
Return an ndarray when ``subplots=True``.
See Also
--------
matplotlib.pyplot.plot : Plot y versus x as lines and/or markers.
Examples
--------
.. plot::
:context: close-figs
The following example shows the populations for some animals
over the years.
>>> df = pd.DataFrame({
... 'pig': [20, 18, 489, 675, 1776],
... 'horse': [4, 25, 281, 600, 1900]
... }, index=[1990, 1997, 2003, 2009, 2014])
>>> lines = df.plot.line()
.. plot::
:context: close-figs
An example with subplots, so an array of axes is returned.
>>> axes = df.plot.line(subplots=True)
>>> type(axes)
<class 'numpy.ndarray'>
.. plot::
:context: close-figs
The following example shows the relationship between both
populations.
>>> lines = df.plot.line(x='pig', y='horse')
"""
return self(kind='line', x=x, y=y, **kwds)
def bar(self, x=None, y=None, **kwds):
"""
Vertical bar plot.
A bar plot is a plot that presents categorical data with
rectangular bars with lengths proportional to the values that they
represent. A bar plot shows comparisons among discrete categories. One
axis of the plot shows the specific categories being compared, and the
other axis represents a measured value.
Parameters
----------
x : label or position, optional
Allows plotting of one column versus another. If not specified,
the index of the DataFrame is used.
y : label or position, optional
Allows plotting of one column versus another. If not specified,
all numerical columns are used.
**kwds
Additional keyword arguments are documented in
:meth:`DataFrame.plot`.
Returns
-------
matplotlib.axes.Axes or np.ndarray of them
An ndarray is returned with one :class:`matplotlib.axes.Axes`
per column when ``subplots=True``.
See Also
--------
DataFrame.plot.barh : Horizontal bar plot.
DataFrame.plot : Make plots of a DataFrame.
matplotlib.pyplot.bar : Make a bar plot with matplotlib.
Examples
--------
Basic plot.
.. plot::
:context: close-figs
>>> df = pd.DataFrame({'lab':['A', 'B', 'C'], 'val':[10, 30, 20]})
>>> ax = df.plot.bar(x='lab', y='val', rot=0)
Plot a whole dataframe to a bar plot. Each column is assigned a
distinct color, and each row is nested in a group along the
horizontal axis.
.. plot::
:context: close-figs
>>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
>>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
>>> index = ['snail', 'pig', 'elephant',
... 'rabbit', 'giraffe', 'coyote', 'horse']
>>> df = pd.DataFrame({'speed': speed,
... 'lifespan': lifespan}, index=index)
>>> ax = df.plot.bar(rot=0)
Instead of nesting, the figure can be split by column with
``subplots=True``. In this case, a :class:`numpy.ndarray` of
:class:`matplotlib.axes.Axes` are returned.
.. plot::
:context: close-figs
>>> axes = df.plot.bar(rot=0, subplots=True)
>>> axes[1].legend(loc=2) # doctest: +SKIP
Plot a single column.
.. plot::
:context: close-figs
>>> ax = df.plot.bar(y='speed', rot=0)
Plot only selected categories for the DataFrame.
.. plot::
:context: close-figs
>>> ax = df.plot.bar(x='lifespan', rot=0)
"""
return self(kind='bar', x=x, y=y, **kwds)
def barh(self, x=None, y=None, **kwds):
"""
Make a horizontal bar plot.
A horizontal bar plot is a plot that presents quantitative data with
rectangular bars with lengths proportional to the values that they
represent. A bar plot shows comparisons among discrete categories. One
axis of the plot shows the specific categories being compared, and the
other axis represents a measured value.
Parameters
----------
x : label or position, default DataFrame.index
Column to be used for categories.
y : label or position, default All numeric columns in dataframe
Columns to be plotted from the DataFrame.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
See Also
--------
DataFrame.plot.bar: Vertical bar plot.
DataFrame.plot : Make plots of DataFrame using matplotlib.
matplotlib.axes.Axes.bar : Plot a vertical bar plot using matplotlib.
Examples
--------
Basic example
.. plot::
:context: close-figs
>>> df = pd.DataFrame({'lab':['A', 'B', 'C'], 'val':[10, 30, 20]})
>>> ax = df.plot.barh(x='lab', y='val')
Plot a whole DataFrame to a horizontal bar plot
.. plot::
:context: close-figs
>>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
>>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
>>> index = ['snail', 'pig', 'elephant',
... 'rabbit', 'giraffe', 'coyote', 'horse']
>>> df = pd.DataFrame({'speed': speed,
... 'lifespan': lifespan}, index=index)
>>> ax = df.plot.barh()
Plot a column of the DataFrame to a horizontal bar plot
.. plot::
:context: close-figs
>>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
>>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
>>> index = ['snail', 'pig', 'elephant',
... 'rabbit', 'giraffe', 'coyote', 'horse']
>>> df = pd.DataFrame({'speed': speed,
... 'lifespan': lifespan}, index=index)
>>> ax = df.plot.barh(y='speed')
Plot DataFrame versus the desired column
.. plot::
:context: close-figs
>>> speed = [0.1, 17.5, 40, 48, 52, 69, 88]
>>> lifespan = [2, 8, 70, 1.5, 25, 12, 28]
>>> index = ['snail', 'pig', 'elephant',
... 'rabbit', 'giraffe', 'coyote', 'horse']
>>> df = pd.DataFrame({'speed': speed,
... 'lifespan': lifespan}, index=index)
>>> ax = df.plot.barh(x='lifespan')
"""
return self(kind='barh', x=x, y=y, **kwds)
def box(self, by=None, **kwds):
r"""
Make a box plot of the DataFrame columns.
A box plot is a method for graphically depicting groups of numerical
data through their quartiles.
The box extends from the Q1 to Q3 quartile values of the data,
with a line at the median (Q2). The whiskers extend from the edges
of box to show the range of the data. The position of the whiskers
is set by default to 1.5*IQR (IQR = Q3 - Q1) from the edges of the
box. Outlier points are those past the end of the whiskers.
For further details see Wikipedia's
entry for `boxplot <https://en.wikipedia.org/wiki/Box_plot>`__.
A consideration when using this chart is that the box and the whiskers
can overlap, which is very common when plotting small sets of data.
Parameters
----------
by : string or sequence
Column in the DataFrame to group by.
**kwds : optional
Additional keywords are documented in
:meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
See Also
--------
DataFrame.boxplot: Another method to draw a box plot.
Series.plot.box: Draw a box plot from a Series object.
matplotlib.pyplot.boxplot: Draw a box plot in matplotlib.
Examples
--------
Draw a box plot from a DataFrame with four columns of randomly
generated data.
.. plot::
:context: close-figs
>>> data = np.random.randn(25, 4)
>>> df = pd.DataFrame(data, columns=list('ABCD'))
>>> ax = df.plot.box()
"""
return self(kind='box', by=by, **kwds)
def hist(self, by=None, bins=10, **kwds):
"""
Draw one histogram of the DataFrame's columns.
A histogram is a representation of the distribution of data.
This function groups the values of all given Series in the DataFrame
into bins and draws all bins in one :class:`matplotlib.axes.Axes`.
This is useful when the DataFrame's Series are in a similar scale.
Parameters
----------
by : str or sequence, optional
Column in the DataFrame to group by.
bins : int, default 10
Number of histogram bins to be used.
**kwds
Additional keyword arguments are documented in
:meth:`DataFrame.plot`.
Returns
-------
class:`matplotlib.AxesSubplot`
Return a histogram plot.
See Also
--------
DataFrame.hist : Draw histograms per DataFrame's Series.
Series.hist : Draw a histogram with Series' data.
Examples
--------
When we draw a dice 6000 times, we expect to get each value around 1000
times. But when we draw two dices and sum the result, the distribution
is going to be quite different. A histogram illustrates those
distributions.
.. plot::
:context: close-figs
>>> df = pd.DataFrame(
... np.random.randint(1, 7, 6000),
... columns = ['one'])
>>> df['two'] = df['one'] + np.random.randint(1, 7, 6000)
>>> ax = df.plot.hist(bins=12, alpha=0.5)
"""
return self(kind='hist', by=by, bins=bins, **kwds)
@Appender(_kde_docstring % {
'this-datatype': 'DataFrame',
'sibling-datatype': 'Series',
'examples': """
Given several Series of points randomly sampled from unknown
distributions, estimate their PDFs using KDE with automatic
bandwidth determination and plot the results, evaluating them at
1000 equally spaced points (default):
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'x': [1, 2, 2.5, 3, 3.5, 4, 5],
... 'y': [4, 4, 4.5, 5, 5.5, 6, 6],
... })
>>> ax = df.plot.kde()
A scalar bandwidth can be specified. Using a small bandwidth value can
lead to over-fitting, while using a large bandwidth value may result
in under-fitting:
.. plot::
:context: close-figs
>>> ax = df.plot.kde(bw_method=0.3)
.. plot::
:context: close-figs
>>> ax = df.plot.kde(bw_method=3)
Finally, the `ind` parameter determines the evaluation points for the
plot of the estimated PDF:
.. plot::
:context: close-figs
>>> ax = df.plot.kde(ind=[1, 2, 3, 4, 5, 6])
""".strip()
})
def kde(self, bw_method=None, ind=None, **kwds):
return self(kind='kde', bw_method=bw_method, ind=ind, **kwds)
density = kde
def area(self, x=None, y=None, **kwds):
"""
Draw a stacked area plot.
An area plot displays quantitative data visually.
This function wraps the matplotlib area function.
Parameters
----------
x : label or position, optional
Coordinates for the X axis. By default uses the index.
y : label or position, optional
Column to plot. By default uses all columns.
stacked : bool, default True
Area plots are stacked by default. Set to False to create a
unstacked plot.
**kwds : optional
Additional keyword arguments are documented in
:meth:`DataFrame.plot`.
Returns
-------
matplotlib.axes.Axes or numpy.ndarray
Area plot, or array of area plots if subplots is True.
See Also
--------
DataFrame.plot : Make plots of DataFrame using matplotlib / pylab.
Examples
--------
Draw an area plot based on basic business metrics:
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'sales': [3, 2, 3, 9, 10, 6],
... 'signups': [5, 5, 6, 12, 14, 13],
... 'visits': [20, 42, 28, 62, 81, 50],
... }, index=pd.date_range(start='2018/01/01', end='2018/07/01',
... freq='M'))
>>> ax = df.plot.area()
Area plots are stacked by default. To produce an unstacked plot,
pass ``stacked=False``:
.. plot::
:context: close-figs
>>> ax = df.plot.area(stacked=False)
Draw an area plot for a single column:
.. plot::
:context: close-figs
>>> ax = df.plot.area(y='sales')
Draw with a different `x`:
.. plot::
:context: close-figs
>>> df = pd.DataFrame({
... 'sales': [3, 2, 3],
... 'visits': [20, 42, 28],
... 'day': [1, 2, 3],
... })
>>> ax = df.plot.area(x='day')
"""
return self(kind='area', x=x, y=y, **kwds)
def pie(self, y=None, **kwds):
"""
Generate a pie plot.
A pie plot is a proportional representation of the numerical data in a
column. This function wraps :meth:`matplotlib.pyplot.pie` for the
specified column. If no column reference is passed and
``subplots=True`` a pie plot is drawn for each numerical column
independently.
Parameters
----------
y : int or label, optional
Label or position of the column to plot.
If not provided, ``subplots=True`` argument must be passed.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
matplotlib.axes.Axes or np.ndarray of them
A NumPy array is returned when `subplots` is True.
See Also
--------
Series.plot.pie : Generate a pie plot for a Series.
DataFrame.plot : Make plots of a DataFrame.
Examples
--------
In the example below we have a DataFrame with the information about
planet's mass and radius. We pass the the 'mass' column to the
pie function to get a pie plot.
.. plot::
:context: close-figs
>>> df = pd.DataFrame({'mass': [0.330, 4.87 , 5.97],
... 'radius': [2439.7, 6051.8, 6378.1]},
... index=['Mercury', 'Venus', 'Earth'])
>>> plot = df.plot.pie(y='mass', figsize=(5, 5))
.. plot::
:context: close-figs
>>> plot = df.plot.pie(subplots=True, figsize=(6, 3))
"""
return self(kind='pie', y=y, **kwds)
def scatter(self, x, y, s=None, c=None, **kwds):
"""
Create a scatter plot with varying marker point size and color.
The coordinates of each point are defined by two dataframe columns and
filled circles are used to represent each point. This kind of plot is
useful to see complex correlations between two variables. Points could
be for instance natural 2D coordinates like longitude and latitude in
a map or, in general, any pair of metrics that can be plotted against
each other.
Parameters
----------
x : int or str
The column name or column position to be used as horizontal
coordinates for each point.
y : int or str
The column name or column position to be used as vertical
coordinates for each point.
s : scalar or array_like, optional
The size of each point. Possible values are:
- A single scalar so all points have the same size.
- A sequence of scalars, which will be used for each point's size
recursively. For instance, when passing [2,14] all points size
will be either 2 or 14, alternatively.
c : str, int or array_like, optional
The color of each point. Possible values are:
- A single color string referred to by name, RGB or RGBA code,
for instance 'red' or '#a98d19'.
- A sequence of color strings referred to by name, RGB or RGBA
code, which will be used for each point's color recursively. For
instance ['green','yellow'] all points will be filled in green or
yellow, alternatively.
- A column name or position whose values will be used to color the
marker points according to a colormap.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
See Also
--------
matplotlib.pyplot.scatter : Scatter plot using multiple input data
formats.
Examples
--------
Let's see how to draw a scatter plot using coordinates from the values
in a DataFrame's columns.
.. plot::
:context: close-figs
>>> df = pd.DataFrame([[5.1, 3.5, 0], [4.9, 3.0, 0], [7.0, 3.2, 1],
... [6.4, 3.2, 1], [5.9, 3.0, 2]],
... columns=['length', 'width', 'species'])
>>> ax1 = df.plot.scatter(x='length',
... y='width',
... c='DarkBlue')
And now with the color determined by a column as well.
.. plot::
:context: close-figs
>>> ax2 = df.plot.scatter(x='length',
... y='width',
... c='species',
... colormap='viridis')
"""
return self(kind='scatter', x=x, y=y, c=c, s=s, **kwds)
def hexbin(self, x, y, C=None, reduce_C_function=None, gridsize=None,
**kwds):
"""
Generate a hexagonal binning plot.
Generate a hexagonal binning plot of `x` versus `y`. If `C` is `None`
(the default), this is a histogram of the number of occurrences
of the observations at ``(x[i], y[i])``.
If `C` is specified, specifies values at given coordinates
``(x[i], y[i])``. These values are accumulated for each hexagonal
bin and then reduced according to `reduce_C_function`,
having as default the NumPy's mean function (:meth:`numpy.mean`).
(If `C` is specified, it must also be a 1-D sequence
of the same length as `x` and `y`, or a column label.)
Parameters
----------
x : int or str
The column label or position for x points.
y : int or str
The column label or position for y points.
C : int or str, optional
The column label or position for the value of `(x, y)` point.
reduce_C_function : callable, default `np.mean`
Function of one argument that reduces all the values in a bin to
a single number (e.g. `np.mean`, `np.max`, `np.sum`, `np.std`).
gridsize : int or tuple of (int, int), default 100
The number of hexagons in the x-direction.
The corresponding number of hexagons in the y-direction is
chosen in a way that the hexagons are approximately regular.
Alternatively, gridsize can be a tuple with two elements
specifying the number of hexagons in the x-direction and the
y-direction.
**kwds
Additional keyword arguments are documented in
:meth:`DataFrame.plot`.
Returns
-------
matplotlib.AxesSubplot
The matplotlib ``Axes`` on which the hexbin is plotted.
See Also
--------
DataFrame.plot : Make plots of a DataFrame.
matplotlib.pyplot.hexbin : Hexagonal binning plot using matplotlib,
the matplotlib function that is used under the hood.
Examples
--------
The following examples are generated with random data from
a normal distribution.
.. plot::
:context: close-figs
>>> n = 10000
>>> df = pd.DataFrame({'x': np.random.randn(n),
... 'y': np.random.randn(n)})
>>> ax = df.plot.hexbin(x='x', y='y', gridsize=20)
The next example uses `C` and `np.sum` as `reduce_C_function`.
Note that `'observations'` values ranges from 1 to 5 but the result
plot shows values up to more than 25. This is because of the
`reduce_C_function`.
.. plot::
:context: close-figs
>>> n = 500
>>> df = pd.DataFrame({
... 'coord_x': np.random.uniform(-3, 3, size=n),
... 'coord_y': np.random.uniform(30, 50, size=n),
... 'observations': np.random.randint(1,5, size=n)
... })
>>> ax = df.plot.hexbin(x='coord_x',
... y='coord_y',
... C='observations',
... reduce_C_function=np.sum,
... gridsize=10,
... cmap="viridis")
"""
if reduce_C_function is not None:
kwds['reduce_C_function'] = reduce_C_function
if gridsize is not None:
kwds['gridsize'] = gridsize
return self(kind='hexbin', x=x, y=y, C=C, **kwds)
| 35.606044
| 84
| 0.557883
|
acb68663901a1662fc8f7e996e8515259d2bd885
| 2,055
|
py
|
Python
|
src/command_modules/azure-cli-network/setup.py
|
noelbundick/azure-cli
|
b56636230e3fd3162b9b144f969175641dc230a8
|
[
"MIT"
] | null | null | null |
src/command_modules/azure-cli-network/setup.py
|
noelbundick/azure-cli
|
b56636230e3fd3162b9b144f969175641dc230a8
|
[
"MIT"
] | null | null | null |
src/command_modules/azure-cli-network/setup.py
|
noelbundick/azure-cli
|
b56636230e3fd3162b9b144f969175641dc230a8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from codecs import open
from setuptools import setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
VERSION = "2.2.12"
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
]
DEPENDENCIES = [
'azure-mgmt-network==2.4.0',
'azure-mgmt-trafficmanager==0.51.0',
'azure-mgmt-dns==2.1.0',
'azure-cli-core',
]
with open('README.rst', 'r', encoding='utf-8') as f:
README = f.read()
with open('HISTORY.rst', 'r', encoding='utf-8') as f:
HISTORY = f.read()
setup(
name='azure-cli-network',
version=VERSION,
description='Microsoft Azure Command-Line Tools Network Command Module',
long_description=README + '\n\n' + HISTORY,
license='MIT',
author='Microsoft Corporation',
author_email='azpycli@microsoft.com',
url='https://github.com/Azure/azure-cli',
classifiers=CLASSIFIERS,
packages=[
'azure',
'azure.cli',
'azure.cli.command_modules',
'azure.cli.command_modules.network',
'azure.cli.command_modules.network.zone_file'
],
install_requires=DEPENDENCIES,
cmdclass=cmdclass
)
| 32.109375
| 94
| 0.603893
|
9beb0ca3bf10bbeac168a78470d246a4f8efb35a
| 933
|
bzl
|
Python
|
build/test.bzl
|
ktprime/quiche-1
|
abf85ce22e1409a870b1bf470cb5a68cbdb28e50
|
[
"BSD-3-Clause"
] | null | null | null |
build/test.bzl
|
ktprime/quiche-1
|
abf85ce22e1409a870b1bf470cb5a68cbdb28e50
|
[
"BSD-3-Clause"
] | null | null | null |
build/test.bzl
|
ktprime/quiche-1
|
abf85ce22e1409a870b1bf470cb5a68cbdb28e50
|
[
"BSD-3-Clause"
] | null | null | null |
"""Tools for building QUICHE tests."""
load("@bazel_skylib//lib:paths.bzl", "paths")
def test_suite_from_source_list(name, srcs, **kwargs):
"""
Generates a test target for every individual test source file specified.
Args:
name: the name of the resulting test_suite target.
srcs: the list of source files from which the test targets are generated.
**kwargs: other arguments that are passed to the cc_test rule directly.s
"""
tests = []
for sourcefile in srcs:
if not sourcefile.endswith("_test.cc"):
fail("All source files passed to test_suite_from_source_list() must end with _test.cc")
test_name, _ = paths.split_extension(paths.basename(sourcefile))
native.cc_test(
name = test_name,
srcs = [sourcefile],
**kwargs
)
tests.append(test_name)
native.test_suite(name = name, tests = tests)
| 34.555556
| 99
| 0.647374
|
adc6027c9a744d04f2e4c92374e9d436a2fbda60
| 3,642
|
py
|
Python
|
st2common/st2common/util/greenpooldispatch.py
|
saucetray/st2
|
8f507d6c8d9483c8371e386fe2b7998596856fd7
|
[
"Apache-2.0"
] | 2
|
2021-08-04T01:04:06.000Z
|
2021-08-04T01:04:08.000Z
|
st2common/st2common/util/greenpooldispatch.py
|
saucetray/st2
|
8f507d6c8d9483c8371e386fe2b7998596856fd7
|
[
"Apache-2.0"
] | 1
|
2022-03-31T03:53:22.000Z
|
2022-03-31T03:53:22.000Z
|
st2common/st2common/util/greenpooldispatch.py
|
saucetray/st2
|
8f507d6c8d9483c8371e386fe2b7998596856fd7
|
[
"Apache-2.0"
] | 1
|
2019-10-11T14:42:28.000Z
|
2019-10-11T14:42:28.000Z
|
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import time
import eventlet
import six.moves.queue
from st2common import log as logging
__all__ = [
'BufferedDispatcher'
]
# If the thread pool has been occupied with no empty threads for more than this number of seconds
# a message will be logged
POOL_BUSY_THRESHOLD_SECONDS = 60
POOL_BUSY_LOG_MESSAGE = """
BufferedDispatcher pool "%s" has been busy with no free threads for more than %s seconds. If there \
are server resources available, consider increasing the dispatcher pool size in the config.
""".strip()
LOG = logging.getLogger(__name__)
class BufferedDispatcher(object):
def __init__(self, dispatch_pool_size=50, monitor_thread_empty_q_sleep_time=5,
monitor_thread_no_workers_sleep_time=1, name=None):
self._pool_limit = dispatch_pool_size
self._dispatcher_pool = eventlet.GreenPool(dispatch_pool_size)
self._dispatch_monitor_thread = eventlet.greenthread.spawn(self._flush)
self._monitor_thread_empty_q_sleep_time = monitor_thread_empty_q_sleep_time
self._monitor_thread_no_workers_sleep_time = monitor_thread_no_workers_sleep_time
self._name = name
self._work_buffer = six.moves.queue.Queue()
# Internal attributes we use to track how long the pool is busy without any free workers
self._pool_last_free_ts = time.time()
@property
def name(self):
return self._name or id(self)
def dispatch(self, handler, *args):
self._work_buffer.put((handler, args), block=True, timeout=1)
self._flush_now()
def shutdown(self):
self._dispatch_monitor_thread.kill()
def _flush(self):
while True:
while self._work_buffer.empty():
eventlet.greenthread.sleep(self._monitor_thread_empty_q_sleep_time)
while self._dispatcher_pool.free() <= 0:
eventlet.greenthread.sleep(self._monitor_thread_no_workers_sleep_time)
self._flush_now()
def _flush_now(self):
if self._dispatcher_pool.free() <= 0:
now = time.time()
if (now - self._pool_last_free_ts) >= POOL_BUSY_THRESHOLD_SECONDS:
LOG.info(POOL_BUSY_LOG_MESSAGE % (self.name, POOL_BUSY_THRESHOLD_SECONDS))
return
# Update the time of when there were free threads available
self._pool_last_free_ts = time.time()
while not self._work_buffer.empty() and self._dispatcher_pool.free() > 0:
(handler, args) = self._work_buffer.get_nowait()
self._dispatcher_pool.spawn(handler, *args)
def __repr__(self):
free_count = self._dispatcher_pool.free()
values = (self.name, self._pool_limit, free_count, self._monitor_thread_empty_q_sleep_time,
self._monitor_thread_no_workers_sleep_time)
return ('<BufferedDispatcher name=%s,dispatch_pool_size=%s,free_threads=%s,'
'monitor_thread_empty_q_sleep_time=%s,monitor_thread_no_workers_sleep_time=%s>' %
values)
| 37.546392
| 100
| 0.711148
|
a59828de93da069fd168af5f100521e3706399f6
| 1,753
|
py
|
Python
|
eva.py
|
Ashwin1934/eva
|
53c1172a0f8a7409cf0ef97efea957979a8290a0
|
[
"Apache-2.0"
] | 1
|
2019-12-08T03:48:44.000Z
|
2019-12-08T03:48:44.000Z
|
eva.py
|
Ashwin1934/eva
|
53c1172a0f8a7409cf0ef97efea957979a8290a0
|
[
"Apache-2.0"
] | null | null | null |
eva.py
|
Ashwin1934/eva
|
53c1172a0f8a7409cf0ef97efea957979a8290a0
|
[
"Apache-2.0"
] | 1
|
2019-12-04T17:09:40.000Z
|
2019-12-04T17:09:40.000Z
|
# coding=utf-8
# Copyright 2018-2020 EVA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from src.server.server import start_server
from src.configuration.configuration_manager import ConfigurationManager
from src.utils.logging_manager import LoggingManager
from src.utils.logging_manager import LoggingLevel
def eva():
"""
Start the eva system
"""
# Get the hostname and port information from the configuration file
config = ConfigurationManager()
hostname = config.get_value('server', 'hostname')
port = config.get_value('server', 'port')
socket_timeout = config.get_value('server', 'socket_timeout')
loop = asyncio.new_event_loop()
stop_server_future = loop.create_future()
# Launch server
try:
asyncio.run(start_server(host=hostname,
port=port,
loop=loop,
socket_timeout=socket_timeout,
stop_server_future=stop_server_future)
)
except Exception as e:
LoggingManager().log(e, LoggingLevel.CRITICAL)
if __name__ == '__main__':
# execute only if run as the entry point into the program
eva()
| 31.872727
| 74
| 0.676554
|
e7359dd4de93861222dfee6c80a75b22e88ba56d
| 969
|
py
|
Python
|
grow/translators/translators.py
|
denmojo/pygrow
|
95d32eed8c13a6beb900effa4d18d9fe1e37d2b4
|
[
"MIT"
] | null | null | null |
grow/translators/translators.py
|
denmojo/pygrow
|
95d32eed8c13a6beb900effa4d18d9fe1e37d2b4
|
[
"MIT"
] | null | null | null |
grow/translators/translators.py
|
denmojo/pygrow
|
95d32eed8c13a6beb900effa4d18d9fe1e37d2b4
|
[
"MIT"
] | null | null | null |
from . import google_translator_toolkit
from grow.common import utils
_kinds_to_classes = {}
_builtins = (
google_translator_toolkit.GoogleTranslatorToolkitTranslator,
)
def install_translator(translator):
_kinds_to_classes[translator.KIND] = translator
def install_builtins():
global _destination_kinds_to_classes
for builtin in _builtins:
install_translator(builtin)
def create_translator(pod, kind, config, project_title=None,
instructions=None):
install_builtins()
if kind not in _kinds_to_classes:
raise ValueError('No translator exists: "{}"'.format(kind))
translator = _kinds_to_classes[kind]
return translator(pod=pod, config=config, project_title=project_title,
instructions=instructions)
def register_extensions(extension_paths, pod_root):
for path in extension_paths:
cls = utils.import_string(path, [pod_root])
install_translator(cls)
| 27.685714
| 74
| 0.729618
|
bdcb8d1d2038561088859d056b6818ac7bf9c196
| 1,374
|
py
|
Python
|
AnalyzeDatabase.py
|
AdamRSterling/USS_StatsBot
|
c715fa2282eb3a2b9005096a642b08bbc92b1900
|
[
"MIT"
] | 1
|
2015-01-04T02:12:27.000Z
|
2015-01-04T02:12:27.000Z
|
AnalyzeDatabase.py
|
AdamRSterling/USS_StatsBot
|
c715fa2282eb3a2b9005096a642b08bbc92b1900
|
[
"MIT"
] | null | null | null |
AnalyzeDatabase.py
|
AdamRSterling/USS_StatsBot
|
c715fa2282eb3a2b9005096a642b08bbc92b1900
|
[
"MIT"
] | null | null | null |
import unicodecsv
import FileDetails
import os
from bokeh.charts import TimeSeries
import datetime
def analyze_databases():
analyze_USS_database()
# analyze_channel_database()
def analyze_USS_database():
file_path = FileDetails.DATA_DIR + '/' + FileDetails.USS_DATABASE_FILENAME
if not os.path.exists(file_path):
print "Cannot find database!"
return
uss_database = unicodecsv.DictReader(open(file_path, 'rb'))
plot_submissions_over_time(uss_database)
def plot_submissions_over_time(uss_database):
sub_counts = []
events = []
for event in uss_database:
sub_counts.append(int(event['# of Submissions']))
date = event['Date'].split('-')
events.append(datetime.date(int(date[0]), int(date[1]), int(date[2])))
# Put down data
plot = TimeSeries(sub_counts, events)
# label the plot
plot.title("Submissions over Time").xlabel("Date").ylabel("# of Submissions")
# Save the plot
plot.filename('test.html')
def analyze_channel_database():
file_path = FileDetails.DATA_DIR + '/' + FileDetails.CHANNEL_DATABASE_FILENAME
if not os.path.exists(file_path):
print "Cannot find database!"
return
channel_database = unicodecsv.DictReader(open(file_path, 'rb'))
for channel in channel_database:
print channel['Channel']
| 29.234043
| 82
| 0.684134
|
2fa205d163d1e9e7722cbe86305217b589d77d0d
| 2,439
|
py
|
Python
|
azkv/main.py
|
undp/azkv
|
8099380b6e1972f6f6c4982c2f1da87538fec24d
|
[
"MIT"
] | 1
|
2021-05-23T17:20:38.000Z
|
2021-05-23T17:20:38.000Z
|
azkv/main.py
|
undp/azkv
|
8099380b6e1972f6f6c4982c2f1da87538fec24d
|
[
"MIT"
] | 1
|
2020-10-20T02:57:08.000Z
|
2020-10-20T07:26:54.000Z
|
azkv/main.py
|
undp/azkv
|
8099380b6e1972f6f6c4982c2f1da87538fec24d
|
[
"MIT"
] | 2
|
2021-05-23T17:20:44.000Z
|
2022-03-25T01:18:11.000Z
|
"""Main app module."""
from cement import App, TestApp, init_defaults
from cement.core.exc import CaughtSignal
from .controllers.base import Base
from .controllers.keyvaults import Keyvaults
from .controllers.secrets import Secrets
from .core.exc import AzKVError
from .core.hooks import extend_vault_creds, log_app_version
from .core.log import AzKVLogHandler
# configuration defaults
CONFIG = init_defaults("azkv", "azkv.credentials", "azkv.keyvaults")
CONFIG["azkv"]["credentials"] = {"type": "EnvironmentVariables"}
CONFIG["azkv"]["keyvaults"] = []
class AzKV(App):
"""AzKV primary application."""
class Meta:
"""Application meta-data."""
label = "azkv"
# configuration defaults
config_defaults = CONFIG
# call sys.exit() on close
exit_on_close = True
# register functions to hooks
hooks = [
("post_setup", log_app_version),
("post_setup", extend_vault_creds),
]
# load additional framework extensions
extensions = [
"colorlog",
"jinja2",
"yaml",
]
# configuration handler
config_handler = "yaml"
# configuration file suffix
config_file_suffix = ".yaml"
# set log handler
log_handler = "colorlog_custom_format"
# set the output handler
output_handler = "jinja2"
# register handlers
handlers = [Base, AzKVLogHandler, Keyvaults, Secrets]
class AzKVTest(TestApp, AzKV):
"""A sub-class of AzKV that is better suited for testing."""
class Meta:
"""Test application meta-data."""
label = "azkv"
def main():
"""App entry point."""
with AzKV() as app:
try:
app.run()
except AssertionError as e:
print("AssertionError > %s" % e.args[0])
app.exit_code = 1
if app.debug is True:
import traceback
traceback.print_exc()
except AzKVError as e:
print("AzKVError > %s" % e.args[0])
app.exit_code = 1
if app.debug is True:
import traceback
traceback.print_exc()
except CaughtSignal as e:
# Default Cement signals are SIGINT and SIGTERM, exit 0 (non-error)
print("\n%s" % e)
app.exit_code = 0
if __name__ == "__main__":
main()
| 23.911765
| 79
| 0.586306
|
8b57a787b0b1b4a2208d9ee6b1acdef6b26dcac0
| 208,098
|
py
|
Python
|
lib/pwiki/customtreectrl.py
|
dszmaj/wikidpad
|
1127375665935524ddb623da8dd5137038c7e53e
|
[
"Apache-2.0",
"MIT"
] | 16
|
2015-02-05T17:32:04.000Z
|
2022-01-14T13:46:36.000Z
|
lib/pwiki/customtreectrl.py
|
dszmaj/wikidpad
|
1127375665935524ddb623da8dd5137038c7e53e
|
[
"Apache-2.0",
"MIT"
] | 8
|
2015-06-20T20:02:41.000Z
|
2016-02-23T14:52:32.000Z
|
lib/pwiki/customtreectrl.py
|
dszmaj/wikidpad
|
1127375665935524ddb623da8dd5137038c7e53e
|
[
"Apache-2.0",
"MIT"
] | 11
|
2015-05-19T09:17:16.000Z
|
2017-09-14T00:43:13.000Z
|
# --------------------------------------------------------------------------------- #
# CUSTOMTREECTRL wxPython IMPLEMENTATION
# Inspired By And Heavily Based On wxGenericTreeCtrl.
#
# Andrea Gavana, @ 17 May 2006
# Latest Revision: 16 Apr 2007, 11.00 CET
#
#
# TODO List
#
# Almost All The Features Of wx.TreeCtrl Are Available, And There Is Practically
# No Limit In What Could Be Added To This Class. The First Things That Comes
# To My Mind Are:
#
# 1. Implement The Style TR_EXTENDED (I Have Never Used It, But It May Be Useful).
#
# 2. Add Support For 3-State CheckBoxes (Is That Really Useful?).
#
# 3. Try To Implement A More Flicker-Free Background Image In Cases Like
# Centered Or Stretched Image (Now CustomTreeCtrl Supports Only Tiled
# Background Images).
#
# 4. Try To Mimic Windows wx.TreeCtrl Expanding/Collapsing behaviour: CustomTreeCtrl
# Suddenly Expands/Collapses The Nodes On Mouse Click While The Native Control
# Has Some Kind Of "Smooth" Expanding/Collapsing, Like A Wave. I Don't Even
# Know Where To Start To Do That.
#
# 5. Speed Up General OnPaint Things? I Have No Idea, Here CustomTreeCtrl Is Quite
# Fast, But We Should See On Slower Machines.
#
#
# For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please
# Write To Me At:
#
# gavana@kpo.kz
# andrea.gavana@gmail.com
#
# Or, Obviously, To The wxPython Mailing List!!!
#
#
# Modifications by Michael Butscher (mbutscher@gmx.de) based on
# rev. 1.14 in wxWidgets repository
#
# Modifications by Michael Butscher Jan. 2007:
#
# - Expand buttons at the same place where they are on Windows tree
# - No button for root element
# - Expansion near the bottom scrolls tree appropriately
# - Flicker-free expansion/collapse (not tested with background image)
# - Unselect also works on single-select tree
# - Option to set image list without generation of grayed icons (faster)
#
# Modifications by Michael Butscher May 2007:
# - Tooltip if label is broader than window
#
# Modifications by Michael Butscher May 2010:
# - Parameter in SelectItem() to suppress event generation
#
#
# End Of Comments
# --------------------------------------------------------------------------------- #
"""
Description
===========
CustomTreeCtrl is a class that mimics the behaviour of wx.TreeCtrl, with almost the
same base functionalities plus some more enhancements. This class does not rely on
the native control, as it is a full owner-drawn tree control.
Apart of the base functionalities of CustomTreeCtrl (described below), in addition
to the standard wx.TreeCtrl behaviour this class supports:
* CheckBox-type items: checkboxes are easy to handle, just selected or unselected
state with no particular issues in handling the item's children;
* RadioButton-type items: since I elected to put radiobuttons in CustomTreeCtrl, I
needed some way to handle them, that made sense. So, I used the following approach:
- All peer-nodes that are radiobuttons will be mutually exclusive. In other words,
only one of a set of radiobuttons that share a common parent can be checked at
once. If a radiobutton node becomes checked, then all of its peer radiobuttons
must be unchecked.
- If a radiobutton node becomes unchecked, then all of its child nodes will become
inactive.
* Hyperlink-type items: they look like an hyperlink, with the proper mouse cursor on
hovering.
* Multiline text items.
* Enabling/disabling items (together with their plain or grayed out icons).
* Whatever non-toplevel widget can be attached next to an item.
* Default selection style, gradient (horizontal/vertical) selection style and Windows
Vista selection style.
* Customized drag and drop images built on the fly.
* Setting the CustomTreeCtrl item buttons to a personalized imagelist.
* Setting the CustomTreeCtrl check/radio item icons to a personalized imagelist.
* Changing the style of the lines that connect the items (in terms of wx.Pen styles).
* Using an image as a CustomTreeCtrl background (currently only in "tile" mode).
And a lot more. Check the demo for an almost complete review of the functionalities.
Base Functionalities
====================
CustomTreeCtrl supports all the wx.TreeCtrl styles, except:
- TR_EXTENDED: supports for this style is on the todo list (Am I sure of this?).
Plus it has 3 more styles to handle checkbox-type items:
- TR_AUTO_CHECK_CHILD : automatically checks/unchecks the item children;
- TR_AUTO_CHECK_PARENT : automatically checks/unchecks the item parent;
- TR_AUTO_TOGGLE_CHILD: automatically toggles the item children.
All the methods available in wx.TreeCtrl are also available in CustomTreeCtrl.
Events
======
All the events supported by wx.TreeCtrl are also available in CustomTreeCtrl, with
a few exceptions:
- EVT_TREE_GET_INFO (don't know what this means);
- EVT_TREE_SET_INFO (don't know what this means);
- EVT_TREE_ITEM_MIDDLE_CLICK (not implemented, but easy to add);
- EVT_TREE_STATE_IMAGE_CLICK: no need for that, look at the checking events below.
Plus, CustomTreeCtrl supports the events related to the checkbutton-type items:
- EVT_TREE_ITEM_CHECKING: an item is being checked;
- EVT_TREE_ITEM_CHECKED: an item has been checked.
And to hyperlink-type items:
- EVT_TREE_ITEM_HYPERLINK: an hyperlink item has been clicked (this event is sent
after the EVT_TREE_SEL_CHANGED event).
Supported Platforms
===================
CustomTreeCtrl has been tested on the following platforms:
* Windows (Windows XP);
* GTK (Thanks to Michele Petrazzo);
* Mac OS (Thanks to John Jackson).
Latest Revision: Andrea Gavana @ 16 Apr 2007, 11.00 CET
Version 1.0
"""
import wx
import zlib
import cStringIO
import types
import traceback
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
_NO_IMAGE = -1
_PIXELS_PER_UNIT = 10
# Bug workaround: In wxPython 2.6 these constants weren't defined
# in 2.8 they are defined under a different name and with different values
try:
wxWINDOWS_NT = wx.OS_WINDOWS_NT
except AttributeError:
wxWINDOWS_NT = 18 # For wxGetOsVersion(), this includes NT 4.0, 2000, XP
try:
wxWIN95 = wx.OS_WINDOWS_9X
except AttributeError:
wxWIN95 = 20 # For wx.GetOsVersion(), this includes also Win 98 and ME
# Start editing the current item after half a second (if the mouse hasn't
# been clicked/moved)
_DELAY = 500
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
# Enum for different images associated with a treectrl item
TreeItemIcon_Normal = 0 # not selected, not expanded
TreeItemIcon_Selected = 1 # selected, not expanded
TreeItemIcon_Expanded = 2 # not selected, expanded
TreeItemIcon_SelectedExpanded = 3 # selected, expanded
TreeItemIcon_Checked = 0 # check button, checked
TreeItemIcon_NotChecked = 1 # check button, not checked
TreeItemIcon_Flagged = 2 # radio button, selected
TreeItemIcon_NotFlagged = 3 # radio button, not selected
# ----------------------------------------------------------------------------
# CustomTreeCtrl flags
# ----------------------------------------------------------------------------
TR_NO_BUTTONS = wx.TR_NO_BUTTONS # for convenience
TR_HAS_BUTTONS = wx.TR_HAS_BUTTONS # draw collapsed/expanded btns
TR_NO_LINES = wx.TR_NO_LINES # don't draw lines at all
TR_LINES_AT_ROOT = wx.TR_LINES_AT_ROOT # connect top-level nodes
TR_TWIST_BUTTONS = wx.TR_TWIST_BUTTONS # still used by wxTreeListCtrl
TR_SINGLE = wx.TR_SINGLE # for convenience
TR_MULTIPLE = wx.TR_MULTIPLE # can select multiple items
TR_EXTENDED = wx.TR_EXTENDED # TODO: allow extended selection
TR_HAS_VARIABLE_ROW_HEIGHT = wx.TR_HAS_VARIABLE_ROW_HEIGHT # what it says
TR_EDIT_LABELS = wx.TR_EDIT_LABELS # can edit item labels
TR_ROW_LINES = wx.TR_ROW_LINES # put border around items
TR_HIDE_ROOT = wx.TR_HIDE_ROOT # don't display root node
TR_FULL_ROW_HIGHLIGHT = wx.TR_FULL_ROW_HIGHLIGHT # highlight full horz space
TR_AUTO_CHECK_CHILD = 0x04000 # only meaningful for checkboxes
TR_AUTO_TOGGLE_CHILD = 0x08000 # only meaningful for checkboxes
TR_AUTO_CHECK_PARENT = 0x10000 # only meaningful for checkboxes
TR_DEFAULT_STYLE = wx.TR_DEFAULT_STYLE # default style for the tree control
# Values for the `flags' parameter of CustomTreeCtrl.HitTest() which determine
# where exactly the specified point is situated:
TREE_HITTEST_ABOVE = wx.TREE_HITTEST_ABOVE
TREE_HITTEST_BELOW = wx.TREE_HITTEST_BELOW
TREE_HITTEST_NOWHERE = wx.TREE_HITTEST_NOWHERE
# on the button associated with an item.
TREE_HITTEST_ONITEMBUTTON = wx.TREE_HITTEST_ONITEMBUTTON
# on the bitmap associated with an item.
TREE_HITTEST_ONITEMICON = wx.TREE_HITTEST_ONITEMICON
# on the indent associated with an item.
TREE_HITTEST_ONITEMINDENT = wx.TREE_HITTEST_ONITEMINDENT
# on the label (string) associated with an item.
TREE_HITTEST_ONITEMLABEL = wx.TREE_HITTEST_ONITEMLABEL
# on the right of the label associated with an item.
TREE_HITTEST_ONITEMRIGHT = wx.TREE_HITTEST_ONITEMRIGHT
# on the label (string) associated with an item.
TREE_HITTEST_ONITEMSTATEICON = wx.TREE_HITTEST_ONITEMSTATEICON
# on the left of the CustomTreeCtrl.
TREE_HITTEST_TOLEFT = wx.TREE_HITTEST_TOLEFT
# on the right of the CustomTreeCtrl.
TREE_HITTEST_TORIGHT = wx.TREE_HITTEST_TORIGHT
# on the upper part (first half) of the item.
TREE_HITTEST_ONITEMUPPERPART = wx.TREE_HITTEST_ONITEMUPPERPART
# on the lower part (second half) of the item.
TREE_HITTEST_ONITEMLOWERPART = wx.TREE_HITTEST_ONITEMLOWERPART
# on the check icon, if present
TREE_HITTEST_ONITEMCHECKICON = 0x4000
# anywhere on the item
TREE_HITTEST_ONITEM = TREE_HITTEST_ONITEMICON | TREE_HITTEST_ONITEMLABEL | TREE_HITTEST_ONITEMCHECKICON
# Background Image Style
_StyleTile = 0
_StyleStretch = 1
# Windows Vista Colours
_rgbSelectOuter = wx.Colour(170, 200, 245)
_rgbSelectInner = wx.Colour(230, 250, 250)
_rgbSelectTop = wx.Colour(210, 240, 250)
_rgbSelectBottom = wx.Colour(185, 215, 250)
_rgbNoFocusTop = wx.Colour(250, 250, 250)
_rgbNoFocusBottom = wx.Colour(235, 235, 235)
_rgbNoFocusOuter = wx.Colour(220, 220, 220)
_rgbNoFocusInner = wx.Colour(245, 245, 245)
# Flags for wx.RendererNative
_CONTROL_EXPANDED = 8
_CONTROL_CURRENT = 16
# Version Info
__version__ = "0.8"
# ----------------------------------------------------------------------------
# CustomTreeCtrl events and binding for handling them
# ----------------------------------------------------------------------------
wxEVT_TREE_BEGIN_DRAG = wx.wxEVT_COMMAND_TREE_BEGIN_DRAG
wxEVT_TREE_BEGIN_RDRAG = wx.wxEVT_COMMAND_TREE_BEGIN_RDRAG
wxEVT_TREE_BEGIN_LABEL_EDIT = wx.wxEVT_COMMAND_TREE_BEGIN_LABEL_EDIT
wxEVT_TREE_END_LABEL_EDIT = wx.wxEVT_COMMAND_TREE_END_LABEL_EDIT
wxEVT_TREE_DELETE_ITEM = wx.wxEVT_COMMAND_TREE_DELETE_ITEM
wxEVT_TREE_GET_INFO = wx.wxEVT_COMMAND_TREE_GET_INFO
wxEVT_TREE_SET_INFO = wx.wxEVT_COMMAND_TREE_SET_INFO
wxEVT_TREE_ITEM_EXPANDED = wx.wxEVT_COMMAND_TREE_ITEM_EXPANDED
wxEVT_TREE_ITEM_EXPANDING = wx.wxEVT_COMMAND_TREE_ITEM_EXPANDING
wxEVT_TREE_ITEM_COLLAPSED = wx.wxEVT_COMMAND_TREE_ITEM_COLLAPSED
wxEVT_TREE_ITEM_COLLAPSING = wx.wxEVT_COMMAND_TREE_ITEM_COLLAPSING
wxEVT_TREE_SEL_CHANGED = wx.wxEVT_COMMAND_TREE_SEL_CHANGED
wxEVT_TREE_SEL_CHANGING = wx.wxEVT_COMMAND_TREE_SEL_CHANGING
wxEVT_TREE_KEY_DOWN = wx.wxEVT_COMMAND_TREE_KEY_DOWN
wxEVT_TREE_ITEM_ACTIVATED = wx.wxEVT_COMMAND_TREE_ITEM_ACTIVATED
wxEVT_TREE_ITEM_RIGHT_CLICK = wx.wxEVT_COMMAND_TREE_ITEM_RIGHT_CLICK
wxEVT_TREE_ITEM_MIDDLE_CLICK = wx.wxEVT_COMMAND_TREE_ITEM_MIDDLE_CLICK
wxEVT_TREE_END_DRAG = wx.wxEVT_COMMAND_TREE_END_DRAG
wxEVT_TREE_STATE_IMAGE_CLICK = wx.wxEVT_COMMAND_TREE_STATE_IMAGE_CLICK
wxEVT_TREE_ITEM_GETTOOLTIP = wx.wxEVT_COMMAND_TREE_ITEM_GETTOOLTIP
wxEVT_TREE_ITEM_MENU = wx.wxEVT_COMMAND_TREE_ITEM_MENU
wxEVT_TREE_ITEM_CHECKING = wx.NewEventType()
wxEVT_TREE_ITEM_CHECKED = wx.NewEventType()
wxEVT_TREE_ITEM_HYPERLINK = wx.NewEventType()
EVT_TREE_BEGIN_DRAG = wx.EVT_TREE_BEGIN_DRAG
EVT_TREE_BEGIN_RDRAG = wx.EVT_TREE_BEGIN_RDRAG
EVT_TREE_BEGIN_LABEL_EDIT = wx.EVT_TREE_BEGIN_LABEL_EDIT
EVT_TREE_END_LABEL_EDIT = wx.EVT_TREE_END_LABEL_EDIT
EVT_TREE_DELETE_ITEM = wx.EVT_TREE_DELETE_ITEM
EVT_TREE_GET_INFO = wx.EVT_TREE_GET_INFO
EVT_TREE_SET_INFO = wx.EVT_TREE_SET_INFO
EVT_TREE_ITEM_EXPANDED = wx.EVT_TREE_ITEM_EXPANDED
EVT_TREE_ITEM_EXPANDING = wx.EVT_TREE_ITEM_EXPANDING
EVT_TREE_ITEM_COLLAPSED = wx.EVT_TREE_ITEM_COLLAPSED
EVT_TREE_ITEM_COLLAPSING = wx.EVT_TREE_ITEM_COLLAPSING
EVT_TREE_SEL_CHANGED = wx.EVT_TREE_SEL_CHANGED
EVT_TREE_SEL_CHANGING = wx.EVT_TREE_SEL_CHANGING
EVT_TREE_KEY_DOWN = wx.EVT_TREE_KEY_DOWN
EVT_TREE_ITEM_ACTIVATED = wx.EVT_TREE_ITEM_ACTIVATED
EVT_TREE_ITEM_RIGHT_CLICK = wx.EVT_TREE_ITEM_RIGHT_CLICK
EVT_TREE_ITEM_MIDDLE_CLICK = wx.EVT_TREE_ITEM_MIDDLE_CLICK
EVT_TREE_END_DRAG = wx.EVT_TREE_END_DRAG
EVT_TREE_STATE_IMAGE_CLICK = wx.EVT_TREE_STATE_IMAGE_CLICK
EVT_TREE_ITEM_GETTOOLTIP = wx.EVT_TREE_ITEM_GETTOOLTIP
EVT_TREE_ITEM_MENU = wx.EVT_TREE_ITEM_MENU
EVT_TREE_ITEM_CHECKING = wx.PyEventBinder(wxEVT_TREE_ITEM_CHECKING, 1)
EVT_TREE_ITEM_CHECKED = wx.PyEventBinder(wxEVT_TREE_ITEM_CHECKED, 1)
EVT_TREE_ITEM_HYPERLINK = wx.PyEventBinder(wxEVT_TREE_ITEM_HYPERLINK, 1)
def GetFlaggedData():
return zlib.decompress(
'x\xda\x012\x02\xcd\xfd\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\r\x00\
\x00\x00\r\x08\x06\x00\x00\x00r\xeb\xe4|\x00\x00\x00\x04sBIT\x08\x08\x08\x08\
|\x08d\x88\x00\x00\x01\xe9IDAT(\x91u\x92\xd1K\xd3a\x14\x86\x9f\xef|J2J\xc3%\
\x85\x8e\x1cb\x93Hl\xd9,\x06F]4\x10\tD3\x83\x88\xc8\xbf\xc0\xb4\xaeBP1\xe9\
\xa2(\xec\xaan\xc3\x82pD\xa1\x84\xb0\x88@3\x8c\xc9\xa2bT\xa2^\x8c\x81V3\xb6\
\xb5\x9f\xce9\xbe.j\xb20\xdf\xeb\xf7\xe19\x07^\xa5D\x93\x9f\x9ea\xbf\t\x04\
\xbf\x12\x8b[\xd8Kl\xf8<.\xeet\xb5\xab\xfc\x8e\xca\x87*ZzM\xf3\xb1j|G\xab\
\xf0\xd4\x94\x13\x9a_&0\xbb\xc8\xd8\xf4g\xa2\xcfo\xa8-P\xc7\xf5\x07\xa6\xedD\
\r\x8d\xb5\xfb\x11\x11\xb4\xd6\x88h\xb4\xd6L}\x8a\xf0\xe4\xd5G\x1e\rt*\x00\
\xc9\x19\xb6\x03D4\xa7\xdcU\\8\xed\xa6\xa2\xa5\xd7\x00\xe8\xab\xf7\x9e\x9a\
\xca\xb2\x9d\\\xf2\xd5!"dT\x86\xc9\xe4\x14\x83s\x83HF\xe3\xdc\xe5\xa4\xa8\
\xb0\x88\xaa\xf2=D\x7f$il>\xdf\xafSe\xf5\xfd\x9dM\x87\xa9\xdc\xb7\x1b\xad5\
\x93\xc9)\xfc\xe9Q\x12\xe9\x04\x13\x0b\x13\x94\xaaR\xdc{\x8f "\xec(,\xe0\xfe\
\xb3\xb7H,a\xe1\xa9)\xdf<e$2Ble\x85\x94e\xb1\x96\xcep\xfb\xdd-D\x04\xa5\x14\
\xdeZ\'\xb1\x84\x85\xd8\x8bm\x84\xe6\x977\x7f8kog)\xba\xc4\xb7\xe5\xef$\xe2?\
\xe9\xa9\xbf\x86R\n\x11a&\x1c\xc1^lC|\r.\x02\xb3\x8b\x9b\xa6&G\x13W\xaa\xbb\
\x91_\x05\x0c\x1d\xbfI\xc7\xa1\x8e\xbf&a|:\x8c\xaf\xc1\x05J4\x8e\xd6>36\x192\
\xc9d\xdc\xa4RI\xb3\xbaj\x99tz\xcd\xac\xaf\xa7\xcd\xc6F\xc6d\xb3Y\xf32\xf8\
\xc58Z\xfb\x8c\x12\xfd\x07R\xa2\xb98\xf0\xd0\xbcx\xf3a[\xe0\xf2\xd0c\x93\xeb\
nYD\xdb\xc9:\xcex\x0f\xe2\xadu2\x13\x8e0>\x1d\xc6\xff\xfa\xfd\xff\x17\x91K\
\xf7\xf0\xa8\t\x04\xe7X\x89[\x94\x96\xd8\xf0y\x0ep\xb7\xeb\xdc?\xdb\xfb\r|\
\xd0\xd1]\x98\xbdm\xdc\x00\x00\x00\x00IEND\xaeB`\x82\x91\xe2\x08\x8f' )
def GetFlaggedBitmap():
return wx.BitmapFromImage(GetFlaggedImage())
def GetFlaggedImage():
stream = cStringIO.StringIO(GetFlaggedData())
return wx.ImageFromStream(stream)
#----------------------------------------------------------------------
def GetNotFlaggedData():
return zlib.decompress(
'x\xda\x01\xad\x01R\xfe\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\r\x00\
\x00\x00\r\x08\x06\x00\x00\x00r\xeb\xe4|\x00\x00\x00\x04sBIT\x08\x08\x08\x08\
|\x08d\x88\x00\x00\x01dIDAT(\x91\x95\xd21K\x82a\x14\x86\xe1\xe7=\xef798\xb8\
\x89\x0e"|Cd\x94\x88\x83\x065\x88\x108\x88Q\x8b-\xd1\x1f\x88\x9a\n\x04\x11j\
\x8eh\x08\xdaZ\x84(\x82\xc2 0\xc1 $\xb4P\xa1\x10\x11D\xb061\xd4\xd4\xcc\xe44\
\x84 \xa8Hg~.\xcer\x0bA\x12\x83\xb7ux\xce\xd1T\x01\xd5z\x0b:\xad\x06n\xbb\
\x8a\x83\xcdU1\xb8\x11\x83\xc8\xe0\r\xf0\x92\xdd\x0c\x97\xd5\x04\x9b\xaaG\
\xb6XA,]B\xe41\x8f\xf7\xab=1\x84Vv\x8e\xd97\xaf\xc29m\x04\x91\x84\x94\n\xa4\
\x94P\x14\x05\x89\xd77\x9c\xc5_\x10\x0em\x08\x00\xa0\xfe\x87q@J\x89\xc593\
\xfc\xaeY\x18\xbc\x01\x06\x00\xb1}t\xc9\xf5F\x03\x01\xbfs$ \x92 "\x10I\xec\
\x9e\xdcBQ\x08\x14M\x15\xe0\xb2\x9a&\x02"\x82\xc71\x85h\xaa\x00\xaa\xd6[\xb0\
\xa9\xfa\x89\x80\x88\xe0\xb0\x98P\xad\xb7@:\xad\x06\xd9be" "$se\xe8\xb4\x1a\
\x90\xdb\xae"\x96.M\x04D\x84H"\x07\xb7]\x05\x04I\x18}A\xbe\xbe\x7f\xe6Z\xed\
\x83\x1b\x8d\x1a7\x9b\x9f\xdcn\xb7\xb8\xd3\xf9\xe2n\xf7\x9b{\xbd\x1f\xbe{\
\xca\xb3\xd1\x17dA\xf2\x0f\t\x92X\x0b\x9d\xf2\xcdCf,X\xdf\x0fs\x7f;T\xc4\xf2\
\xc2\x0c<\x8e)8,&$seD\x129\\\xc43\xa3\x8b\xf8O{\xbf\xf1\xb5\xa5\x990\x0co\
\xd6\x00\x00\x00\x00IEND\xaeB`\x82&\x11\xab!' )
def GetNotFlaggedBitmap():
return wx.BitmapFromImage(GetNotFlaggedImage())
def GetNotFlaggedImage():
stream = cStringIO.StringIO(GetNotFlaggedData())
return wx.ImageFromStream(stream)
#----------------------------------------------------------------------
def GetCheckedData():
return zlib.decompress(
"x\xda\xeb\x0c\xf0s\xe7\xe5\x92\xe2b``\xe0\xf5\xf4p\t\x02\xd1 \xcc\xc1\x06$\
\x8b^?\xa9\x01R,\xc5N\x9e!\x1c@P\xc3\x91\xd2\x01\xe4\xaf\xf4tq\x0c\xd1\x98\
\x98<\x853\xe7\xc7y\x07\xa5\x84\xc4\x84\x84\x04\x0b3C1\xbd\x03'N\x1c9p\x84\
\xe5\xe0\x993gx||\xce\x14\xcc\xea\xec\xect4^7\xbf\x91\xf3&\x8b\x93\xd4\x8c\
\x19\n\xa7fv\\L\xd8p\x90C\xebx\xcf\x05\x17\x0ff \xb8c\xb6Cm\x06\xdb\xea\xd8\
\xb2\x08\xd3\x03W\x0c\x8c\x8c\x16e%\xa5\xb5E\xe4\xee\xba\xca\xe4|\xb8\xb7\
\xe35OOO\xcf\n\xb3\x83>m\x8c1R\x12\x92\x81s\xd8\x0b/\xb56\x14k|l\\\xc7x\xb4\
\xf2\xc4\xc1*\xd5'B~\xbc\x19uNG\x98\x85\x85\x8d\xe3x%\x16\xb2_\xee\xf1\x07\
\x99\xcb\xacl\x99\xc9\xcf\xb0\xc0_.\x87+\xff\x99\x05\xd0\xd1\x0c\x9e\xae~.\
\xeb\x9c\x12\x9a\x00\x92\xccS\x9f" )
def GetCheckedBitmap():
return wx.BitmapFromImage(GetCheckedImage())
def GetCheckedImage():
stream = cStringIO.StringIO(GetCheckedData())
return wx.ImageFromStream(stream)
#----------------------------------------------------------------------
def GetNotCheckedData():
return zlib.decompress(
"x\xda\xeb\x0c\xf0s\xe7\xe5\x92\xe2b``\xe0\xf5\xf4p\t\x02\xd1 \xcc\xc1\x06$\
\x8b^?\xa9\x01R,\xc5N\x9e!\x1c@P\xc3\x91\xd2\x01\xe4\xe7z\xba8\x86hL\x9c{\
\xe9 o\x83\x01\x07\xeb\x85\xf3\xed\x86w\x0ed\xdaT\x96\x8a\xbc\x9fw\xe7\xc4\
\xd9/\x01\x8b\x97\x8a\xd7\xab*\xfar\xf0Ob\x93^\xf6\xd5%\x9d\x85A\xe6\xf6\x1f\
\x11\x8f{/\x0b\xf8wX+\x9d\xf2\xb6:\x96\xca\xfe\x9a3\xbeA\xe7\xed\x1b\xc6%\
\xfb=X3'sI-il\t\xb9\xa0\xc0;#\xd4\x835m\x9a\xf9J\x85\xda\x16.\x86\x03\xff\
\xee\xdcc\xdd\xc0\xce\xf9\xc8\xcc(\xbe\x1bh1\x83\xa7\xab\x9f\xcb:\xa7\x84&\
\x00\x87S=\xbe" )
def GetNotCheckedBitmap():
return wx.BitmapFromImage(GetNotCheckedImage())
def GetNotCheckedImage():
stream = cStringIO.StringIO(GetNotCheckedData())
return wx.ImageFromStream(stream)
def GrayOut(anImage):
"""
Convert the given image (in place) to a grayed-out version,
appropriate for a 'disabled' appearance.
"""
factor = 0.7 # 0 < f < 1. Higher Is Grayer
if anImage.HasMask():
maskColor = (anImage.GetMaskRed(), anImage.GetMaskGreen(), anImage.GetMaskBlue())
else:
maskColor = None
data = map(ord, list(anImage.GetData()))
for i in range(0, len(data), 3):
pixel = (data[i], data[i+1], data[i+2])
pixel = MakeGray(pixel, factor, maskColor)
for x in range(3):
data[i+x] = pixel[x]
anImage.SetData(''.join(map(chr, data)))
return anImage
def MakeGray((r,g,b), factor, maskColor):
"""
Make a pixel grayed-out. If the pixel matches the maskcolor, it won't be
changed.
"""
if (r,g,b) != maskColor:
return map(lambda x: int((230 - x) * factor) + x, (r,g,b))
else:
return (r,g,b)
def DrawTreeItemButton(win, dc, rect, flags):
""" A simple replacement of wx.RendererNative.DrawTreeItemButton. """
# white background
dc.SetPen(wx.GREY_PEN)
dc.SetBrush(wx.WHITE_BRUSH)
dc.DrawRectangleRect(rect)
# black lines
xMiddle = rect.x + rect.width/2
yMiddle = rect.y + rect.height/2
# half of the length of the horz lines in "-" and "+"
halfWidth = rect.width/2 - 2
dc.SetPen(wx.BLACK_PEN)
dc.DrawLine(xMiddle - halfWidth, yMiddle,
xMiddle + halfWidth + 1, yMiddle)
if not flags & _CONTROL_EXPANDED:
# turn "-" into "+"
halfHeight = rect.height/2 - 2
dc.DrawLine(xMiddle, yMiddle - halfHeight,
xMiddle, yMiddle + halfHeight + 1)
#---------------------------------------------------------------------------
# DragImage Implementation
# This Class Handles The Creation Of A Custom Image In Case Of Item Drag
# And Drop.
#---------------------------------------------------------------------------
class DragImage(wx.DragImage):
"""
This class handles the creation of a custom image in case of item drag
and drop.
"""
def __init__(self, treeCtrl, item):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
text = item.GetText()
font = item.Attr().GetFont()
colour = item.Attr().GetTextColour()
if not colour:
colour = wx.BLACK
if not font:
font = treeCtrl._normalFont
backcolour = treeCtrl.GetBackgroundColour()
r, g, b = int(backcolour.Red()), int(backcolour.Green()), int(backcolour.Blue())
backcolour = ((r >> 1) + 20, (g >> 1) + 20, (b >> 1) + 20)
backcolour = wx.Colour(backcolour[0], backcolour[1], backcolour[2])
self._backgroundColour = backcolour
tempdc = wx.ClientDC(treeCtrl)
tempdc.SetFont(font)
width, height, dummy = tempdc.GetMultiLineTextExtent(text + "M")
image = item.GetCurrentImage()
image_w, image_h = 0, 0
wcheck, hcheck = 0, 0
itemcheck = None
itemimage = None
ximagepos = 0
yimagepos = 0
xcheckpos = 0
ycheckpos = 0
if image != _NO_IMAGE:
if treeCtrl._imageListNormal:
image_w, image_h = treeCtrl._imageListNormal.GetSize(image)
image_w += 4
itemimage = treeCtrl._imageListNormal.GetBitmap(image)
checkimage = item.GetCurrentCheckedImage()
if checkimage is not None:
if treeCtrl._imageListCheck:
wcheck, hcheck = treeCtrl._imageListCheck.GetSize(checkimage)
wcheck += 4
itemcheck = treeCtrl._imageListCheck.GetBitmap(checkimage)
total_h = max(hcheck, height)
total_h = max(image_h, total_h)
if image_w:
ximagepos = wcheck
yimagepos = ((total_h > image_h) and [(total_h-image_h)/2] or [0])[0]
if checkimage is not None:
xcheckpos = 2
ycheckpos = ((total_h > image_h) and [(total_h-image_h)/2] or [0])[0] + 2
extraH = ((total_h > height) and [(total_h - height)/2] or [0])[0]
xtextpos = wcheck + image_w
ytextpos = extraH
total_h = max(image_h, hcheck)
total_h = max(total_h, height)
if total_h < 30:
total_h += 2 # at least 2 pixels
else:
total_h += total_h/10 # otherwise 10% extra spacing
total_w = image_w + wcheck + width
self._total_w = total_w
self._total_h = total_h
self._itemimage = itemimage
self._itemcheck = itemcheck
self._text = text
self._colour = colour
self._font = font
self._xtextpos = xtextpos
self._ytextpos = ytextpos
self._ximagepos = ximagepos
self._yimagepos = yimagepos
self._xcheckpos = xcheckpos
self._ycheckpos = ycheckpos
self._textwidth = width
self._textheight = height
self._extraH = extraH
self._bitmap = self.CreateBitmap()
wx.DragImage.__init__(self, self._bitmap)
def CreateBitmap(self):
"""Actually creates the dnd bitmap."""
memory = wx.MemoryDC()
bitmap = wx.EmptyBitmap(self._total_w, self._total_h)
memory.SelectObject(bitmap)
memory.SetTextBackground(self._backgroundColour)
memory.SetBackground(wx.Brush(self._backgroundColour))
memory.SetFont(self._font)
memory.SetTextForeground(self._colour)
memory.Clear()
if self._itemimage:
memory.DrawBitmap(self._itemimage, self._ximagepos, self._yimagepos, True)
if self._itemcheck:
memory.DrawBitmap(self._itemcheck, self._xcheckpos, self._ycheckpos, True)
textrect = wx.Rect(self._xtextpos, self._ytextpos+self._extraH, self._textwidth, self._textheight)
memory.DrawLabel(self._text, textrect)
memory.SelectObject(wx.NullBitmap)
return bitmap
# ----------------------------------------------------------------------------
# TreeItemAttr: a structure containing the visual attributes of an item
# ----------------------------------------------------------------------------
class TreeItemAttr:
"""Creates the item attributes (text colour, background colour and font)."""
def __init__(self, colText=wx.NullColour, colBack=wx.NullColour, font=wx.NullFont):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
self._colText = colText
self._colBack = colBack
self._font = font
# setters
def SetTextColour(self, colText):
"""Sets the attribute text colour."""
self._colText = colText
def SetBackgroundColour(self, colBack):
"""Sets the attribute background colour."""
self._colBack = colBack
def SetFont(self, font):
"""Sets the attribute font."""
self._font = font
# accessors
def HasTextColour(self):
"""Returns whether the attribute has text colour."""
return self._colText != wx.NullColour
def HasBackgroundColour(self):
"""Returns whether the attribute has background colour."""
return self._colBack != wx.NullColour
def HasFont(self):
"""Returns whether the attribute has font."""
return self._font != wx.NullFont
# getters
def GetTextColour(self):
"""Returns the attribute text colour."""
return self._colText
def GetBackgroundColour(self):
"""Returns the attribute background colour."""
return self._colBack
def GetFont(self):
"""Returns the attribute font."""
return self._font
# ----------------------------------------------------------------------------
# CommandTreeEvent Is A Special Subclassing Of wx.PyCommandEvent
#
# NB: Note That Not All The Accessors Make Sense For All The Events, See The
# Event Description Below.
# ----------------------------------------------------------------------------
class CommandTreeEvent(wx.PyCommandEvent):
"""
CommandTreeEvent is a special subclassing of wx.PyCommandEvent.
NB: note that not all the accessors make sense for all the events, see the
event description for every method in this class.
"""
def __init__(self, type, id, item=None, evtKey=None, point=None,
label=None, **kwargs):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
wx.PyCommandEvent.__init__(self, type, id, **kwargs)
self._item = item
self._evtKey = evtKey
self._pointDrag = point
self._label = label
def GetItem(self):
"""
Gets the item on which the operation was performed or the newly selected
item for EVT_TREE_SEL_CHANGED/ING events.
"""
return self._item
def SetItem(self, item):
"""
Sets the item on which the operation was performed or the newly selected
item for EVT_TREE_SEL_CHANGED/ING events.
"""
self._item = item
def GetOldItem(self):
"""For EVT_TREE_SEL_CHANGED/ING events, gets the previously selected item."""
return self._itemOld
def SetOldItem(self, item):
"""For EVT_TREE_SEL_CHANGED/ING events, sets the previously selected item."""
self._itemOld = item
def GetPoint(self):
"""
Returns the point where the mouse was when the drag operation started
(for EVT_TREE_BEGIN(R)DRAG events only) or the click position.
"""
return self._pointDrag
def SetPoint(self, pt):
"""
Sets the point where the mouse was when the drag operation started
(for EVT_TREE_BEGIN(R)DRAG events only) or the click position.
"""
self._pointDrag = pt
def GetKeyEvent(self):
"""Keyboard data (for EVT_TREE_KEY_DOWN only)."""
return self._evtKey
def GetKeyCode(self):
"""Returns the integer key code (for EVT_TREE_KEY_DOWN only)."""
return self._evtKey.GetKeyCode()
def SetKeyEvent(self, evt):
"""Keyboard data (for EVT_TREE_KEY_DOWN only)."""
self._evtKey = evt
def GetLabel(self):
"""Returns the label-itemtext (for EVT_TREE_BEGIN|END_LABEL_EDIT only)."""
return self._label
def SetLabel(self, label):
"""Sets the label-itemtext (for EVT_TREE_BEGIN|END_LABEL_EDIT only)."""
self._label = label
def IsEditCancelled(self):
"""Returns the edit cancel flag (for EVT_TREE_BEGIN|END_LABEL_EDIT only)."""
return self._editCancelled
def SetEditCanceled(self, editCancelled):
"""Sets the edit cancel flag (for EVT_TREE_BEGIN|END_LABEL_EDIT only)."""
self._editCancelled = editCancelled
def SetToolTip(self, toolTip):
"""Sets the tooltip for the item (for EVT_TREE_ITEM_GETTOOLTIP events)."""
self._label = toolTip
def GetToolTip(self):
"""Gets the tooltip for the item (for EVT_TREE_ITEM_GETTOOLTIP events)."""
return self._label
# ----------------------------------------------------------------------------
# TreeEvent is a special class for all events associated with tree controls
#
# NB: note that not all accessors make sense for all events, see the event
# descriptions below
# ----------------------------------------------------------------------------
class TreeEvent(CommandTreeEvent):
def __init__(self, type, id, item=None, evtKey=None, point=None,
label=None, **kwargs):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
CommandTreeEvent.__init__(self, type, id, item, evtKey, point, label, **kwargs)
self.notify = wx.NotifyEvent(type, id)
def GetNotifyEvent(self):
"""Returns the actual wx.NotifyEvent."""
return self.notify
def IsAllowed(self):
"""Returns whether the event is allowed or not."""
return self.notify.IsAllowed()
def Veto(self):
"""Vetos the event."""
self.notify.Veto()
def Allow(self):
"""The event is allowed."""
self.notify.Allow()
# -----------------------------------------------------------------------------
# Auxiliary Classes: TreeRenameTimer
# -----------------------------------------------------------------------------
class TreeRenameTimer(wx.Timer):
"""Timer used for enabling in-place edit."""
def __init__(self, owner):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
wx.Timer.__init__(self)
self._owner = owner
def Notify(self):
"""The timer has expired."""
self._owner.OnRenameTimer()
# -----------------------------------------------------------------------------
# Auxiliary Classes: TreeTextCtrl
# This Is The Temporary wx.TextCtrl Created When You Edit The Text Of An Item
# -----------------------------------------------------------------------------
class TreeTextCtrl(wx.TextCtrl):
"""Control used for in-place edit."""
def __init__(self, owner, item=None):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
self._owner = owner
self._itemEdited = item
self._startValue = item.GetText()
self._finished = False
self._aboutToFinish = False
w = self._itemEdited.GetWidth()
h = self._itemEdited.GetHeight()
wnd = self._itemEdited.GetWindow()
if wnd:
w = w - self._itemEdited.GetWindowSize()[0]
h = 0
x, y = self._owner.CalcScrolledPosition(item.GetX(), item.GetY())
image_h = 0
image_w = 0
image = item.GetCurrentImage()
if image != _NO_IMAGE:
if self._owner._imageListNormal:
image_w, image_h = self._owner._imageListNormal.GetSize(image)
image_w += 4
else:
raise Exception("\n ERROR: You Must Create An Image List To Use Images!")
checkimage = item.GetCurrentCheckedImage()
if checkimage is not None:
wcheck, hcheck = self._owner._imageListCheck.GetSize(checkimage)
wcheck += 4
else:
wcheck = 0
if wnd:
h = max(hcheck, image_h)
dc = wx.ClientDC(self._owner)
h = max(h, dc.GetTextExtent("Aq")[1])
h = h + 2
# FIXME: what are all these hardcoded 4, 8 and 11s really?
x += image_w + wcheck
w -= image_w + 4 + wcheck
wx.TextCtrl.__init__(self, self._owner, wx.ID_ANY, self._startValue,
wx.Point(x - 4, y), wx.Size(w + 15, h))
if wx.Platform == "__WXMAC__":
self.SetFont(owner.GetFont())
bs = self.GetBestSize()
self.SetSize((-1, bs.height))
self.Bind(wx.EVT_CHAR, self.OnChar)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
def AcceptChanges(self):
"""Accepts/refuses the changes made by the user."""
value = self.GetValue()
if value == self._startValue:
# nothing changed, always accept
# when an item remains unchanged, the owner
# needs to be notified that the user decided
# not to change the tree item label, and that
# the edit has been cancelled
self._owner.OnRenameCancelled(self._itemEdited)
return True
if not self._owner.OnRenameAccept(self._itemEdited, value):
# vetoed by the user
return False
# accepted, do rename the item
self._owner.SetItemText(self._itemEdited, value)
return True
def Finish(self):
"""Finish editing."""
if not self._finished:
## wxPendingDelete.Append(this)
self._finished = True
self._owner.SetFocusIgnoringChildren()
self._owner.ResetTextControl()
def OnChar(self, event):
"""Handles the wx.EVT_CHAR event for TreeTextCtrl."""
keycode = event.GetKeyCode()
if keycode == wx.WXK_RETURN:
self._aboutToFinish = True
# Notify the owner about the changes
self.AcceptChanges()
# Even if vetoed, close the control (consistent with MSW)
wx.CallAfter(self.Finish)
elif keycode == wx.WXK_ESCAPE:
self.StopEditing()
else:
event.Skip()
def OnKeyUp(self, event):
"""Handles the wx.EVT_KEY_UP event for TreeTextCtrl."""
if not self._finished:
# auto-grow the textctrl:
parentSize = self._owner.GetSize()
myPos = self.GetPosition()
mySize = self.GetSize()
sx, sy = self.GetTextExtent(self.GetValue() + "M")
if myPos.x + sx > parentSize.x:
sx = parentSize.x - myPos.x
if mySize.x > sx:
sx = mySize.x
self.SetSize((sx, -1))
event.Skip()
def OnKillFocus(self, event):
"""Handles the wx.EVT_KILL_FOCUS event for TreeTextCtrl."""
# I commented out those lines, and everything seems to work fine.
# But why in the world are these lines of code here? Maybe GTK
# or MAC give troubles?
## if not self._finished and not self._aboutToFinish:
##
## # We must finish regardless of success, otherwise we'll get
## # focus problems:
##
## if not self.AcceptChanges():
## self._owner.OnRenameCancelled(self._itemEdited)
# We must let the native text control handle focus, too, otherwise
# it could have problems with the cursor (e.g., in wxGTK).
event.Skip()
def StopEditing(self):
"""Suddenly stops the editing."""
self._owner.OnRenameCancelled(self._itemEdited)
self.Finish()
def item(self):
"""Returns the item currently edited."""
return self._itemEdited
# -----------------------------------------------------------------------------
# Auxiliary Classes: TreeFindTimer
# Timer Used To Clear CustomTreeCtrl._findPrefix If No Key Was Pressed For A
# Sufficiently Long Time.
# -----------------------------------------------------------------------------
class TreeFindTimer(wx.Timer):
"""
Timer used to clear CustomTreeCtrl._findPrefix if no key was pressed
for a sufficiently long time.
"""
def __init__(self, owner):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
wx.Timer.__init__(self)
self._owner = owner
def Notify(self):
"""The timer has expired."""
self._owner._findPrefix = ""
# -----------------------------------------------------------------------------
# GenericTreeItem Implementation.
# This Class Holds All The Information And Methods For Every Single Item In
# CustomTreeCtrl.
# -----------------------------------------------------------------------------
class GenericTreeItem:
"""
This class holds all the information and methods for every single item in
CustomTreeCtrl. No wx based.
"""
def __init__(self, parent, text="", ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""
Default class constructor.
For internal use: do not call it in your code!
"""
# since there can be very many of these, we save size by chosing
# the smallest representation for the elements and by ordering
# the members to avoid padding.
assert isinstance(text, types.StringTypes)
self._text = text # label to be rendered for item
self._data = data # user-provided data
self._children = [] # list of children
self._parent = parent # parent of this item
self._attr = None # attributes???
# tree ctrl images for the normal, selected, expanded and
# expanded+selected states
self._images = [-1, -1, -1, -1]
self._images[TreeItemIcon_Normal] = image
self._images[TreeItemIcon_Selected] = selImage
self._images[TreeItemIcon_Expanded] = _NO_IMAGE
self._images[TreeItemIcon_SelectedExpanded] = _NO_IMAGE
self._checkedimages = [None, None, None, None]
self._x = 0 # (virtual) offset from top
self._y = 0 # (virtual) offset from left
self._width = 0 # width of this item
self._height = 0 # height of this item
self._isCollapsed = True
self._hasHilight = False # same as focused
self._hasPlus = False # used for item which doesn't have
# children but has a [+] button
self._isBold = False # render the label in bold font
self._isItalic = False # render the label in italic font
self._ownsAttr = False # delete attribute when done
self._type = ct_type # item type: 0=normal, 1=check, 2=radio
self._checked = False # only meaningful for check and radio
self._enabled = True # flag to enable/disable an item
self._hypertext = False # indicates if the item is hypertext
self._visited = False # visited state for an hypertext item
if self._type > 0:
# do not construct the array for normal items
self._checkedimages[TreeItemIcon_Checked] = 0
self._checkedimages[TreeItemIcon_NotChecked] = 1
self._checkedimages[TreeItemIcon_Flagged] = 2
self._checkedimages[TreeItemIcon_NotFlagged] = 3
if parent:
if parent.GetType() == 2 and not parent.IsChecked():
# if the node parent is a radio not enabled, we are disabled
self._enabled = False
self._wnd = wnd # are we holding a window?
if wnd:
self.SetWindow(wnd)
def IsOk(self):
"""
Returns whether the item is ok or not. Useless on Python, but added for
backward compatibility with the C++ implementation.
"""
return True
def GetChildren(self):
"""Returns the item's children."""
return self._children
def GetText(self):
"""Returns the item text."""
return self._text
def GetImage(self, which=TreeItemIcon_Normal):
"""Returns the item image for a particular state."""
return self._images[which]
def GetCheckedImage(self, which=TreeItemIcon_Checked):
"""Returns the item check image. Meaningful only for radio & check items."""
return self._checkedimages[which]
def GetData(self):
"""Returns the data associated to this item."""
return self._data
def SetImage(self, image, which):
"""Sets the item image."""
self._images[which] = image
def SetData(self, data):
"""Sets the data associated to this item."""
self._data = data
def SetHasPlus(self, has=True):
"""Sets whether an item has the 'plus' button."""
self._hasPlus = has
def SetBold(self, bold):
"""Sets the item font bold."""
self._isBold = bold
def SetItalic(self, italic):
"""Sets the item font italic."""
self._isItalic = italic
def GetX(self):
"""Returns the x position on an item in the ScrolledWindow."""
return self._x
def GetY(self):
"""Returns the y position on an item in the ScrolledWindow."""
return self._y
def SetX(self, x):
"""Sets the x position on an item in the ScrolledWindow."""
self._x = x
def SetY(self, y):
"""Sets the y position on an item in the ScrolledWindow."""
self._y = y
def GetHeight(self):
"""Returns the height of the item."""
return self._height
def GetWidth(self):
"""Returns the width of the item."""
return self._width
def SetHeight(self, h):
"""Sets the height of the item."""
self._height = h
def SetWidth(self, w):
"""Sets the width of the item."""
self._width = w
def SetWindow(self, wnd):
"""Sets the window associated to the item."""
self._wnd = wnd
if wnd.GetSizer(): # the window is a complex one hold by a sizer
size = wnd.GetBestSize()
else: # simple window, without sizers
size = wnd.GetSize()
# We have to bind the wx.EVT_SET_FOCUS for the associated window
# No other solution to handle the focus changing from an item in
# CustomTreeCtrl and the window associated to an item
# Do better strategies exist?
self._wnd.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self._height = size.GetHeight() + 2
self._width = size.GetWidth()
self._windowsize = size
# We don't show the window if the item is collapsed
if self._isCollapsed:
self._wnd.Show(False)
# The window is enabled only if the item is enabled
self._wnd.Enable(self._enabled)
self._windowenabled = self._enabled
def GetWindow(self):
"""Returns the window associated to the item."""
return self._wnd
def DeleteWindow(self):
"""Deletes the window associated to the item (if any)."""
if self._wnd:
self._wnd.Destroy()
self._wnd = None
def GetWindowEnabled(self):
"""Returns whether the associated window is enabled or not."""
if not self._wnd:
raise Exception("\nERROR: This Item Has No Window Associated")
return self._windowenabled
def SetWindowEnabled(self, enable=True):
"""Sets whether the associated window is enabled or not."""
if not self._wnd:
raise Exception("\nERROR: This Item Has No Window Associated")
self._windowenabled = enable
self._wnd.Enable(enable)
def GetWindowSize(self):
"""Returns the associated window size."""
return self._windowsize
def OnSetFocus(self, event):
"""Handles the wx.EVT_SET_FOCUS event for the associated window."""
treectrl = self._wnd.GetParent()
select = treectrl.GetSelection()
# If the window is associated to an item that currently is selected
# (has focus) we don't kill the focus. Otherwise we do it.
if select != self:
treectrl._hasFocus = False
else:
treectrl._hasFocus = True
event.Skip()
def GetType(self):
"""
Returns the item type. It should be one of:
0: normal items
1: checkbox item
2: radiobutton item
"""
return self._type
def SetHyperText(self, hyper=True):
"""Sets whether the item is hypertext or not."""
self._hypertext = hyper
def SetVisited(self, visited=True):
"""Sets whether an hypertext item was visited or not."""
self._visited = visited
def GetVisited(self):
"""Returns whether an hypertext item was visited or not."""
return self._visited
def IsHyperText(self):
"""Returns whether the item is hypetext or not."""
return self._hypertext
def GetParent(self):
"""Gets the item parent."""
return self._parent
def Insert(self, child, index):
"""Inserts an item in the item children."""
self._children.insert(index, child)
def Expand(self):
"""Expand the item."""
self._isCollapsed = False
def Collapse(self):
"""Collapse the item."""
self._isCollapsed = True
def SetHilight(self, set=True):
"""Sets the item focus/unfocus."""
self._hasHilight = set
def HasChildren(self):
"""Returns whether the item has children or not."""
return len(self._children) > 0
def IsSelected(self):
"""Returns whether the item is selected or not."""
return self._hasHilight != 0
def IsExpanded(self):
"""Returns whether the item is expanded or not."""
return not self._isCollapsed
def IsChecked(self):
"""Returns whether the item is checked or not."""
return self._checked
def Check(self, checked=True):
"""Check an item. Meaningful only for check and radio items."""
self._checked = checked
def HasPlus(self):
"""Returns whether the item has the plus button or not."""
return self._hasPlus or self.HasChildren()
def IsBold(self):
"""Returns whether the item font is bold or not."""
return self._isBold != 0
def IsItalic(self):
"""Returns whether the item font is italic or not."""
return self._isItalic != 0
def Enable(self, enable=True):
"""Enables/disables the item."""
self._enabled = enable
def IsEnabled(self):
"""Returns whether the item is enabled or not."""
return self._enabled
def GetAttributes(self):
"""Returns the item attributes (font, colours)."""
return self._attr
def Attr(self):
"""Creates a new attribute (font, colours)."""
if not self._attr:
self._attr = TreeItemAttr()
self._ownsAttr = True
return self._attr
def SetAttributes(self, attr):
"""Sets the item attributes (font, colours)."""
if self._ownsAttr:
del self._attr
self._attr = attr
self._ownsAttr = False
def AssignAttributes(self, attr):
"""Assigns the item attributes (font, colours)."""
self.SetAttributes(attr)
self._ownsAttr = True
def DeleteChildren(self, tree):
"""Deletes the item children."""
for child in self._children:
if tree:
tree.SendDeleteEvent(child)
child.DeleteChildren(tree)
if child == tree._select_me:
tree._select_me = None
# We have to destroy the associated window
wnd = child.GetWindow()
if wnd:
wnd.Destroy()
child._wnd = None
if child in tree._itemWithWindow:
tree._itemWithWindow.remove(child)
del child
self._children = []
def SetText(self, text):
"""Sets the item text."""
assert isinstance(text, types.StringTypes)
self._text = text
def GetChildrenCount(self, recursively=True):
"""Gets the number of children."""
count = len(self._children)
if not recursively:
return count
total = count
for n in xrange(count):
total += self._children[n].GetChildrenCount()
return total
def GetSize(self, x, y, theButton):
"""Returns the item size."""
bottomY = self._y + theButton.GetLineHeight(self)
if y < bottomY:
y = bottomY
width = self._x + self._width
if x < width:
x = width
if self.IsExpanded():
for child in self._children:
x, y = child.GetSize(x, y, theButton)
return x, y
def HitTest(self, point, theCtrl, flags=0, level=0):
"""
HitTest method for an item. Called from the main window HitTest.
see the CustomTreeCtrl HitTest method for the flags explanation.
"""
# for a hidden root node, don't evaluate it, but do evaluate children
if not (level == 0 and theCtrl.HasFlag(TR_HIDE_ROOT)):
# evaluate the item
h = theCtrl.GetLineHeight(self)
if point.y > self._y and point.y < self._y + h:
y_mid = self._y + h/2
if point.y < y_mid:
flags |= TREE_HITTEST_ONITEMUPPERPART
else:
flags |= TREE_HITTEST_ONITEMLOWERPART
xCross = self._x - theCtrl.GetSpacing()
if xCross > theCtrl.GetIndent():
xCross -= theCtrl.GetIndent()
if wx.Platform == "__WXMAC__":
# according to the drawing code the triangels are drawn
# at -4 , -4 from the position up to +10/+10 max
if point.x > xCross-4 and point.x < xCross+10 and point.y > y_mid-4 and \
point.y < y_mid+10 and self.HasPlus() and theCtrl.HasButtons():
flags |= TREE_HITTEST_ONITEMBUTTON
return self, flags
else:
# 5 is the size of the plus sign
if point.x > xCross-6 and point.x < xCross+6 and point.y > y_mid-6 and \
point.y < y_mid+6 and self.HasPlus() and theCtrl.HasButtons():
flags |= TREE_HITTEST_ONITEMBUTTON
return self, flags
if point.x >= self._x and point.x <= self._x + self._width:
image_w = -1
wcheck = 0
# assuming every image (normal and selected) has the same size!
if self.GetImage() != _NO_IMAGE and theCtrl._imageListNormal:
image_w, image_h = theCtrl._imageListNormal.GetSize(self.GetImage())
if self.GetCheckedImage() is not None:
wcheck, hcheck = theCtrl._imageListCheck.GetSize(self.GetCheckedImage())
if wcheck and point.x <= self._x + wcheck + 1:
flags |= TREE_HITTEST_ONITEMCHECKICON
return self, flags
if image_w != -1 and point.x <= self._x + wcheck + image_w + 1:
flags |= TREE_HITTEST_ONITEMICON
else:
flags |= TREE_HITTEST_ONITEMLABEL
return self, flags
if point.x < self._x:
flags |= TREE_HITTEST_ONITEMINDENT
if point.x > self._x + self._width:
flags |= TREE_HITTEST_ONITEMRIGHT
return self, flags
# if children are expanded, fall through to evaluate them
if self._isCollapsed:
return None, 0
# evaluate children
for child in self._children:
res, flags = child.HitTest(point, theCtrl, flags, level + 1)
if res != None:
return res, flags
return None, 0
def GetCurrentImage(self):
"""Returns the current item image."""
image = _NO_IMAGE
if self.IsExpanded():
if self.IsSelected():
image = self.GetImage(TreeItemIcon_SelectedExpanded)
if image == _NO_IMAGE:
# we usually fall back to the normal item, but try just the
# expanded one (and not selected) first in this case
image = self.GetImage(TreeItemIcon_Expanded)
else: # not expanded
if self.IsSelected():
image = self.GetImage(TreeItemIcon_Selected)
# maybe it doesn't have the specific image we want,
# try the default one instead
if image == _NO_IMAGE:
image = self.GetImage()
return image
def GetCurrentCheckedImage(self):
"""Returns the current item check image."""
if self._type == 0:
return None
if self.IsChecked():
if self._type == 1: # Checkbox
return self._checkedimages[TreeItemIcon_Checked]
else: # Radiobutton
return self._checkedimages[TreeItemIcon_Flagged]
else:
if self._type == 1: # Checkbox
return self._checkedimages[TreeItemIcon_NotChecked]
else: # Radiobutton
return self._checkedimages[TreeItemIcon_NotFlagged]
def EventFlagsToSelType(style, shiftDown=False, ctrlDown=False):
"""
Translate the key or mouse event flag to the type of selection we
are dealing with.
"""
is_multiple = (style & TR_MULTIPLE) != 0
extended_select = shiftDown and is_multiple
unselect_others = not (extended_select or (ctrlDown and is_multiple))
return is_multiple, extended_select, unselect_others
# -----------------------------------------------------------------------------
# CustomTreeCtrl Main Implementation.
# This Is The Main Class.
# -----------------------------------------------------------------------------
class CustomTreeCtrl(wx.PyScrolledWindow):
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize,
style=TR_DEFAULT_STYLE, ctstyle=0, validator=wx.DefaultValidator,
name="CustomTreeCtrl"):
"""
Default class constructor.
parent: parent window. Must not be none.
id: window identifier. A value of -1 indicates a default value.
pos: window position.
size: window size. If the default size (-1, -1) is specified then the window is sized appropriately.
style: the underlying wx.ScrolledWindow style + CustomTreeCtrl window style. This can be one of:
TR_NO_BUTTONS
TR_HAS_BUTTONS # draw collapsed/expanded btns
TR_NO_LINES # don't draw lines at all
TR_LINES_AT_ROOT # connect top-level nodes
TR_TWIST_BUTTONS # draw mac-like twist buttons
TR_SINGLE # single selection mode
TR_MULTIPLE # can select multiple items
TR_EXTENDED # todo: allow extended selection
TR_HAS_VARIABLE_ROW_HEIGHT # allows rows to have variable height
TR_EDIT_LABELS # can edit item labels
TR_ROW_LINES # put border around items
TR_HIDE_ROOT # don't display root node
TR_FULL_ROW_HIGHLIGHT # highlight full horizontal space
TR_AUTO_CHECK_CHILD # only meaningful for checkboxes
TR_AUTO_CHECK_PARENT # only meaningful for checkboxes
TR_AUTO_TOGGLE_CHILD # only meaningful for checkboxes
ctstyle: kept for backward compatibility.
validator: window validator.
name: window name.
"""
style = style | ctstyle
self._current = self._key_current = self._anchor = self._select_me = None
self._hasFocus = False
self._dirty = False
# Default line height: it will soon be changed
self._lineHeight = 10
# Item indent wrt parent
self._indent = 15
# item horizontal spacing between the start and the text
self._spacing = 18
# Brushes for focused/unfocused items (also gradient type)
self._hilightBrush = wx.Brush(wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT))
btnshadow = wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNSHADOW)
self._hilightUnfocusedBrush = wx.Brush(btnshadow)
r, g, b = btnshadow.Red(), btnshadow.Green(), btnshadow.Blue()
backcolour = (max((r >> 1) - 20, 0),
max((g >> 1) - 20, 0),
max((b >> 1) - 20, 0))
backcolour = wx.Colour(backcolour[0], backcolour[1], backcolour[2])
self._hilightUnfocusedBrush2 = wx.Brush(backcolour)
# image list for icons
self._imageListNormal = self._imageListButtons = self._imageListState = self._imageListCheck = None
self._ownsImageListNormal = self._ownsImageListButtons = self._ownsImageListState = False
# Drag and drop initial settings
self._dragCount = 0
self._countDrag = 0
self._isDragging = False
self._dropTarget = self._oldSelection = None
self._dragImage = None
self._underMouse = None
self._selectedNodeWhileMousePressed = None
# TextCtrl initial settings for editable items
self._textCtrl = None
self._renameTimer = None
# This one allows us to handle Freeze() and Thaw() calls
self._freezeCount = 0
self._findPrefix = ""
self._findTimer = None
self._dropEffectAboveItem = False
self._lastOnSame = False
# Default normal and bold fonts for an item
self._hasFont = True
self._normalFont = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
self._boldFont = wx.Font(self._normalFont.GetPointSize(), self._normalFont.GetFamily(),
self._normalFont.GetStyle(), wx.BOLD, self._normalFont.GetUnderlined(),
self._normalFont.GetFaceName(), self._normalFont.GetEncoding())
# Hyperlinks things
self._hypertextfont = wx.Font(self._normalFont.GetPointSize(), self._normalFont.GetFamily(),
self._normalFont.GetStyle(), wx.NORMAL, True,
self._normalFont.GetFaceName(), self._normalFont.GetEncoding())
self._hypertextnewcolour = wx.BLUE
self._hypertextvisitedcolour = wx.Colour(200, 47, 200)
self._isonhyperlink = False
# Default CustomTreeCtrl background colour.
# self._backgroundColour = wx.WHITE
# self._backgroundColour = wx.SystemSettings.GetColour(
# wx.SYS_COLOUR_WINDOW)
self._backgroundColour = wx.NullColour
# Background image settings
self._backgroundImage = None
self._imageStretchStyle = _StyleTile
# Disabled items colour
self._disabledColour = wx.Colour(180, 180, 180)
# Gradient selection colours
self._firstcolour = color= wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT)
self._secondcolour = wx.WHITE
self._usegradients = False
self._gradientstyle = 0 # Horizontal Gradient
# Vista Selection Styles
self._vistaselection = False
self._defaultScrollVisiblePos = "auto" # Other possibility: "middle"
# Connection lines style
# if wx.Platform != "__WXMAC__":
if wx.GetOsVersion()[0] == wxWINDOWS_NT:
self._dottedPen = wx.Pen("grey", 1, wx.USER_DASH)
self._dottedPen.SetDashes([1,1])
self._dottedPen.SetCap(wx.CAP_BUTT)
else:
self._dottedPen = wx.Pen("light grey", 1)
# Pen Used To Draw The Border Around Selected Items
self._borderPen = wx.BLACK_PEN
self._cursor = wx.StockCursor(wx.CURSOR_ARROW)
# For Appended Windows
self._hasWindows = False
self._itemWithWindow = []
if wx.Platform == "__WXMAC__":
style &= ~TR_LINES_AT_ROOT
style |= TR_NO_LINES
platform, major, minor = wx.GetOsVersion()
if major < 10:
style |= TR_ROW_LINES
self._windowStyle = style
# Create the default check image list
self.SetImageListCheck(13, 13)
# A constant to use my translation of RendererNative.DrawTreeItemButton
# if the wxPython version is less or equal 2.6.3.2.
## if wx.VERSION_STRING < "2.6.2.1":
if wx.VERSION_STRING <= "2.6.3.2":
self._drawingfunction = DrawTreeItemButton
else:
self._drawingfunction = wx.RendererNative.Get().DrawTreeItemButton
# Create our container... at last!
wx.PyScrolledWindow.__init__(self, parent, id, pos, size, style|wx.HSCROLL|wx.VSCROLL, name)
# If the tree display has no buttons, but does have
# connecting lines, we can use a narrower layout.
# It may not be a good idea to force this...
if not self.HasButtons() and not self.HasFlag(TR_NO_LINES):
self._indent= 10
self._spacing = 10
self.SetValidator(validator)
attr = self.GetDefaultAttributes()
self.SetOwnForegroundColour(attr.colFg)
self.SetOwnBackgroundColour(wx.WHITE)
if not self._hasFont:
self.SetOwnFont(attr.font)
self.SetSize(size)
# Bind the events
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_SYS_COLOUR_CHANGED, self.OnSysColourChanged)
self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
self.Bind(EVT_TREE_ITEM_GETTOOLTIP, self.OnGetToolTip)
self.Bind(wx.EVT_WINDOW_DESTROY, self.OnDestroy)
# Sets the focus to ourselves: this is useful if you have items
# with associated widgets.
self.SetFocus()
def AcceptsFocus(self):
# overridden base class method, allows this ctrl to
# participate in the tab-order, etc. It's overridable because
# of deriving this class from wx.PyScrolledWindow...
return True
def OnDestroy(self, event):
"""Handles the wx.EVT_WINDOW_DESTROY event."""
# Here there may be something I miss... do I have to destroy
# something else?
if self._renameTimer and self._renameTimer.IsRunning():
self._renameTimer.Stop()
del self._renameTimer
if self._findTimer and self._findTimer.IsRunning():
self._findTimer.Stop()
del self._findTimer
event.Skip()
def GetCount(self):
"""Returns the global number of items in the tree."""
if not self._anchor:
# the tree is empty
return 0
count = self._anchor.GetChildrenCount()
if not self.HasFlag(TR_HIDE_ROOT):
# take the root itself into account
count = count + 1
return count
def GetIndent(self):
"""Returns the item indentation."""
return self._indent
def GetSpacing(self):
"""Returns the spacing between the start and the text."""
return self._spacing
def GetRootItem(self):
"""Returns the root item."""
return self._anchor
def GetSelection(self):
"""Returns the current selection: TR_SINGLE only."""
return self._current
def ToggleItemSelection(self, item):
"""Toggles the item selection."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
self.SelectItem(item, not self.IsSelected(item))
def EnableChildren(self, item, enable=True):
"""Enables/disables item children. Used internally."""
torefresh = False
if item.IsExpanded():
torefresh = True
if item.GetType() == 2 and enable and not item.IsChecked():
# We hit a radiobutton item not checked, we don't want to
# enable the children
return
child, cookie = self.GetFirstChild(item)
while child:
self.EnableItem(child, enable, torefresh=torefresh)
# Recurse on tree
if child.GetType != 2 or (child.GetType() == 2 and item.IsChecked()):
self.EnableChildren(child, enable)
(child, cookie) = self.GetNextChild(item, cookie)
def EnableItem(self, item, enable=True, torefresh=True):
"""Enables/disables an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if item.IsEnabled() == enable:
return
if not enable and item.IsSelected():
self.SelectItem(item, False)
item.Enable(enable)
wnd = item.GetWindow()
# Handles the eventual window associated to the item
if wnd:
wndenable = item.GetWindowEnabled()
if enable:
if wndenable:
wnd.Enable(enable)
else:
wnd.Enable(enable)
if torefresh:
# We have to refresh the item line
dc = wx.ClientDC(self)
self.CalculateSize(item, dc)
self.RefreshLine(item)
def IsItemEnabled(self, item):
"""Returns whether an item is enabled or disabled."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsEnabled()
def SetDisabledColour(self, colour):
"""Sets the items disabled colour."""
self._disabledColour = colour
self._dirty = True
def GetDisabledColour(self):
"""Returns the items disabled colour."""
return self._disabledColour
def IsItemChecked(self, item):
"""Returns whether an item is checked or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsChecked()
def CheckItem2(self, item, checked=True, torefresh=False):
"""Used internally to avoid EVT_TREE_ITEM_CHECKED events."""
if item.GetType() == 0:
return
item.Check(checked)
if torefresh:
dc = wx.ClientDC(self)
self.CalculateSize(item, dc)
self.RefreshLine(item)
def UnCheckRadioParent(self, item, checked=False):
"""Used internally to handle radio node parent correctly."""
e = TreeEvent(wxEVT_TREE_ITEM_CHECKING, self.GetId())
e.SetItem(item)
e.SetEventObject(self)
if self.GetEventHandler().ProcessEvent(e):
return False
item.Check(checked)
self.RefreshLine(item)
self.EnableChildren(item, checked)
e = TreeEvent(wxEVT_TREE_ITEM_CHECKED, self.GetId())
e.SetItem(item)
e.SetEventObject(self)
self.GetEventHandler().ProcessEvent(e)
return True
def CheckItem(self, item, checked=True):
"""
Actually checks/uncheks an item, sending (eventually) the two
events EVT_TREE_ITEM_CHECKING/EVT_TREE_ITEM_CHECKED.
"""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
# Should we raise an error here?!?
if item.GetType() == 0:
return
if item.GetType() == 2: # it's a radio button
if not checked and item.IsChecked(): # Try To Unckeck?
if item.HasChildren():
self.UnCheckRadioParent(item, checked)
return
else:
if not self.UnCheckRadioParent(item, checked):
return
self.CheckSameLevel(item, False)
return
# Radiobuttons are done, let's handle checkbuttons...
e = TreeEvent(wxEVT_TREE_ITEM_CHECKING, self.GetId())
e.SetItem(item)
e.SetEventObject(self)
if self.GetEventHandler().ProcessEvent(e):
# Blocked by user
return
item.Check(checked)
dc = wx.ClientDC(self)
self.RefreshLine(item)
if self._windowStyle & TR_AUTO_CHECK_CHILD:
ischeck = self.IsItemChecked(item)
self.AutoCheckChild(item, ischeck)
if self._windowStyle & TR_AUTO_CHECK_PARENT:
ischeck = self.IsItemChecked(item)
self.AutoCheckParent(item, ischeck)
elif self._windowStyle & TR_AUTO_TOGGLE_CHILD:
self.AutoToggleChild(item)
e = TreeEvent(wxEVT_TREE_ITEM_CHECKED, self.GetId())
e.SetItem(item)
e.SetEventObject(self)
self.GetEventHandler().ProcessEvent(e)
def AutoToggleChild(self, item):
"""Transverses the tree and toggles the items. Meaningful only for check items."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
child, cookie = self.GetFirstChild(item)
torefresh = False
if item.IsExpanded():
torefresh = True
# Recurse on tree
while child:
if child.GetType() == 1 and child.IsEnabled():
self.CheckItem2(child, not child.IsChecked(), torefresh=torefresh)
self.AutoToggleChild(child)
(child, cookie) = self.GetNextChild(item, cookie)
def AutoCheckChild(self, item, checked):
"""Transverses the tree and checks/unchecks the items. Meaningful only for check items."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
(child, cookie) = self.GetFirstChild(item)
torefresh = False
if item.IsExpanded():
torefresh = True
while child:
if child.GetType() == 1 and child.IsEnabled():
self.CheckItem2(child, checked, torefresh=torefresh)
self.AutoCheckChild(child, checked)
(child, cookie) = self.GetNextChild(item, cookie)
def AutoCheckParent(self, item, checked):
"""Traverses up the tree and checks/unchecks parent items.
Meaningful only for check items."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
parent = item.GetParent()
if not parent or parent.GetType() != 1:
return
(child, cookie) = self.GetFirstChild(parent)
while child:
if child.GetType() == 1 and child.IsEnabled():
if checked != child.IsChecked():
return
(child, cookie) = self.GetNextChild(parent, cookie)
self.CheckItem2(parent, checked, torefresh=True)
self.AutoCheckParent(parent, checked)
def CheckChilds(self, item, checked=True):
"""Programatically check/uncheck item children. Does not generate EVT_TREE_CHECK* events."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if checked == None:
self.AutoToggleChild(item)
else:
self.AutoCheckChild(item, checked)
def CheckSameLevel(self, item, checked=False):
"""
Uncheck radio items which are on the same level of the checked one.
Used internally.
"""
parent = item.GetParent()
if not parent:
return
torefresh = False
if parent.IsExpanded():
torefresh = True
(child, cookie) = self.GetFirstChild(parent)
while child:
if child.GetType() == 2 and child != item:
self.CheckItem2(child, checked, torefresh=torefresh)
if child.GetType != 2 or (child.GetType() == 2 and child.IsChecked()):
self.EnableChildren(child, checked)
(child, cookie) = self.GetNextChild(parent, cookie)
def EditLabel(self, item):
"""Starts editing an item label."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
self.Edit(item)
def ShouldInheritColours(self):
"""We don't inherit colours from anyone."""
return False
def SetIndent(self, indent):
"""Sets item indentation."""
self._indent = indent
self._dirty = True
def SetSpacing(self, spacing):
"""Sets item spacing."""
self._spacing = spacing
self._dirty = True
def HasFlag(self, flag):
"""Returns whether CustomTreeCtrl has a flag."""
return self._windowStyle & flag
def HasChildren(self, item):
"""Returns whether an item has children or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return len(item.GetChildren()) > 0
def GetChildrenCount(self, item, recursively=True):
"""Gets the item children count."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.GetChildrenCount(recursively)
def SetTreeStyle(self, styles):
"""Sets the CustomTreeCtrl style. See __init__ method for the styles explanation."""
# Do not try to expand the root node if it hasn't been created yet
if self._anchor and not self.HasFlag(TR_HIDE_ROOT) and styles & TR_HIDE_ROOT:
# if we will hide the root, make sure children are visible
self._anchor.SetHasPlus()
self._anchor.Expand()
self.CalculatePositions()
# right now, just sets the styles. Eventually, we may
# want to update the inherited styles, but right now
# none of the parents has updatable styles
if self._windowStyle & TR_MULTIPLE and not (styles & TR_MULTIPLE):
selections = self.GetSelections()
for select in selections[0:-1]:
self.SelectItem(select, False)
self._windowStyle = styles
self._dirty = True
def GetTreeStyle(self):
"""Returns the CustomTreeCtrl style."""
return self._windowStyle
def HasButtons(self):
"""Returns whether CustomTreeCtrl has the TR_AHS_BUTTONS flag."""
return self.HasFlag(TR_HAS_BUTTONS)
# -----------------------------------------------------------------------------
# functions to work with tree items
# -----------------------------------------------------------------------------
def GetItemText(self, item):
"""Returns the item text."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.GetText()
def GetItemImage(self, item, which=TreeItemIcon_Normal):
"""Returns the item image."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.GetImage(which)
def GetPyData(self, item):
"""Returns the data associated to an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.GetData()
GetItemPyData = GetPyData
def GetItemTextColour(self, item):
"""Returns the item text colour."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.Attr().GetTextColour()
def GetItemBackgroundColour(self, item):
"""Returns the item background colour."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.Attr().GetBackgroundColour()
def GetItemFont(self, item):
"""Returns the item font."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.Attr().GetFont()
def IsItemHyperText(self, item):
"""Returns whether an item is hypertext or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsHyperText()
def SetItemText(self, item, text, recalcSize=True):
"""Sets the item text."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
# dc = wx.ClientDC(self)
# item.SetText(text)
# self.CalculateSize(item, dc)
# self.RefreshLine(item)
item.SetText(text)
if recalcSize:
dc = wx.ClientDC(self)
self.CalculateSize(item, dc)
self.RefreshLine(item)
def SetItemImage(self, item, image, which=TreeItemIcon_Normal):
"""Sets the item image, depending on the item state."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
item.SetImage(image, which)
dc = wx.ClientDC(self)
self.CalculateSize(item, dc)
self.RefreshLine(item)
def SetPyData(self, item, data):
"""Sets the data associated to an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
item.SetData(data)
SetItemPyData = SetPyData
def SetItemHasChildren(self, item, has=True):
"""Forces the appearance of the button next to the item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
item.SetHasPlus(has)
self.RefreshLine(item)
def SetItemBold(self, item, bold=True):
"""Sets the item font bold/unbold."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
# avoid redrawing the tree if no real change
if item.IsBold() != bold:
item.SetBold(bold)
self._dirty = True
def SetItemItalic(self, item, italic=True):
"""Sets the item font italic/non-italic."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if item.IsItalic() != italic:
itemFont = self.GetItemFont(item)
if itemFont != wx.NullFont:
style = wx.ITALIC
if not italic:
style = ~style
item.SetItalic(italic)
itemFont.SetStyle(style)
self.SetItemFont(item, itemFont)
self._dirty = True
def SetItemDropHighlight(self, item, highlight=True):
"""
Gives the item the visual feedback for drag and drop operations.
This is useful when something is dragged from outside the CustomTreeCtrl.
"""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if highlight:
bg = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT)
fg = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)
item.Attr().SetTextColour(fg)
item.Attr.SetBackgroundColour(bg)
self.RefreshLine(item)
def SetItemTextColour(self, item, col):
"""Sets the item text colour."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if self.GetItemTextColour(item) == col:
return
item.Attr().SetTextColour(col)
self.RefreshLine(item)
def SetItemBackgroundColour(self, item, col):
"""Sets the item background colour."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
item.Attr().SetBackgroundColour(col)
self.RefreshLine(item)
def SetItemHyperText(self, item, hyper=True):
"""Sets whether the item is hypertext or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
item.SetHyperText(hyper)
self.RefreshLine(item)
def SetItemFont(self, item, font):
"""Sets the item font."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if self.GetItemFont(item) == font:
return
item.Attr().SetFont(font)
self._dirty = True
def SetFont(self, font):
"""Sets the CustomTreeCtrl font."""
if font is None or not font.IsOk():
font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
wx.ScrolledWindow.SetFont(self, font)
self._normalFont = font
self._boldFont = wx.Font(self._normalFont.GetPointSize(), self._normalFont.GetFamily(),
self._normalFont.GetStyle(), wx.BOLD, self._normalFont.GetUnderlined(),
self._normalFont.GetFaceName(), self._normalFont.GetEncoding())
return True
def GetHyperTextFont(self):
"""Returns the font used to render an hypertext item."""
return self._hypertextfont
def SetHyperTextFont(self, font):
"""Sets the font used to render an hypertext item."""
self._hypertextfont = font
self._dirty = True
def SetHyperTextNewColour(self, colour):
"""Sets the colour used to render a non-visited hypertext item."""
self._hypertextnewcolour = colour
self._dirty = True
def GetHyperTextNewColour(self):
"""Returns the colour used to render a non-visited hypertext item."""
return self._hypertextnewcolour
def SetHyperTextVisitedColour(self, colour):
"""Sets the colour used to render a visited hypertext item."""
self._hypertextvisitedcolour = colour
self._dirty = True
def GetHyperTextVisitedColour(self):
"""Returns the colour used to render a visited hypertext item."""
return self._hypertextvisitedcolour
def SetItemVisited(self, item, visited=True):
"""Sets whether an hypertext item was visited."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
item.SetVisited(visited)
self.RefreshLine(item)
def GetItemVisited(self, item):
"""Returns whether an hypertext item was visited."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.GetVisited()
def SetHilightFocusColour(self, colour):
"""
Sets the colour used to highlight focused selected items.
This is applied only if gradient and Windows Vista styles are disabled.
"""
self._hilightBrush = wx.Brush(colour)
self.RefreshSelected()
def SetHilightNonFocusColour(self, colour):
"""
Sets the colour used to highlight unfocused selected items.
This is applied only if gradient and Windows Vista styles are disabled.
"""
self._hilightUnfocusedBrush = wx.Brush(colour)
self.RefreshSelected()
def GetHilightFocusColour(self):
"""
Returns the colour used to highlight focused selected items.
This is applied only if gradient and Windows Vista styles are disabled.
"""
return self._hilightBrush.GetColour()
def GetHilightNonFocusColour(self):
"""
Returns the colour used to highlight unfocused selected items.
This is applied only if gradient and Windows Vista styles are disabled.
"""
return self._hilightUnfocusedBrush.GetColour()
def SetFirstGradientColour(self, colour=None):
"""Sets the first gradient colour."""
if colour is None:
colour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT)
self._firstcolour = colour
if self._usegradients:
self.RefreshSelected()
def SetSecondGradientColour(self, colour=None):
"""Sets the second gradient colour."""
if colour is None:
# No colour given, generate a slightly darker from the
# CustomTreeCtrl background colour
color = self.GetBackgroundColour()
r, g, b = int(color.Red()), int(color.Green()), int(color.Blue())
color = ((r >> 1) + 20, (g >> 1) + 20, (b >> 1) + 20)
colour = wx.Colour(color[0], color[1], color[2])
self._secondcolour = colour
if self._usegradients:
self.RefreshSelected()
def GetFirstGradientColour(self):
"""Returns the first gradient colour."""
return self._firstcolour
def GetSecondGradientColour(self):
"""Returns the second gradient colour."""
return self._secondcolour
def EnableSelectionGradient(self, enable=True):
"""Globally enables/disables drawing of gradient selection."""
self._usegradients = enable
self._vistaselection = False
self.RefreshSelected()
def SetGradientStyle(self, vertical=0):
"""
Sets the gradient style:
0: horizontal gradient
1: vertical gradient
"""
# 0 = Horizontal, 1 = Vertical
self._gradientstyle = vertical
if self._usegradients:
self.RefreshSelected()
def GetGradientStyle(self):
"""
Returns the gradient style:
0: horizontal gradient
1: vertical gradient
"""
return self._gradientstyle
def EnableSelectionVista(self, enable=True):
"""Globally enables/disables drawing of Windows Vista selection."""
self._usegradients = False
self._vistaselection = enable
self.RefreshSelected()
def SetBorderPen(self, pen):
"""
Sets the pen used to draw the selected item border.
The border pen is not used if the Windows Vista style is applied.
"""
self._borderPen = pen
self.RefreshSelected()
def GetBorderPen(self):
"""
Returns the pen used to draw the selected item border.
The border pen is not used if the Windows Vista style is applied.
"""
return self._borderPen
def SetConnectionPen(self, pen):
"""Sets the pen used to draw the connecting lines between items."""
self._dottedPen = pen
self._dirty = True
def GetConnectionPen(self):
"""Returns the pen used to draw the connecting lines between items."""
return self._dottedPen
def SetBackgroundImage(self, image):
"""Sets the CustomTreeCtrl background image (can be none)."""
self._backgroundImage = image
self.Refresh()
def GetBackgroundImage(self):
"""Returns the CustomTreeCtrl background image (can be none)."""
return self._backgroundImage
def GetItemWindow(self, item):
"""Returns the window associated to the item (if any)."""
if not item:
raise Exception("\nERROR: Invalid Item")
return item.GetWindow()
def GetItemWindowEnabled(self, item):
"""Returns whether the window associated to the item is enabled."""
if not item:
raise Exception("\nERROR: Invalid Item")
return item.GetWindowEnabled()
def SetItemWindowEnabled(self, item, enable=True):
"""Enables/disables the window associated to the item."""
if not item:
raise Exception("\nERROR: Invalid Item")
item.SetWindowEnabled(enable)
def GetItemType(self, item):
"""
Returns the item type:
0: normal
1: checkbox item
2: radiobutton item
"""
if not item:
raise Exception("\nERROR: Invalid Item")
return item.GetType()
# -----------------------------------------------------------------------------
# item status inquiries
# -----------------------------------------------------------------------------
def IsVisible(self, item):
"""Returns whether the item is visible or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
# An item is only visible if it's not a descendant of a collapsed item
parent = item.GetParent()
while parent:
if not parent.IsExpanded():
return False
parent = parent.GetParent()
startX, startY = self.GetViewStart()
clientSize = self.GetClientSize()
rect = self.GetBoundingRect(item)
if not rect:
return False
if rect.GetWidth() == 0 or rect.GetHeight() == 0:
return False
if rect.GetBottom() < 0 or rect.GetTop() > clientSize.y:
return False
if rect.GetRight() < 0 or rect.GetLeft() > clientSize.x:
return False
return True
def ItemHasChildren(self, item):
"""Returns whether the item has children or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
# consider that the item does have children if it has the "+" button: it
# might not have them (if it had never been expanded yet) but then it
# could have them as well and it's better to err on this side rather than
# disabling some operations which are restricted to the items with
# children for an item which does have them
return item.HasPlus()
def IsExpanded(self, item):
"""Returns whether the item is expanded or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsExpanded()
def IsSelected(self, item):
"""Returns whether the item is selected or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsSelected()
def IsBold(self, item):
"""Returns whether the item font is bold or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsBold()
def IsItalic(self, item):
"""Returns whether the item font is italic or not."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.IsItalic()
# -----------------------------------------------------------------------------
# navigation
# -----------------------------------------------------------------------------
def GetItemParent(self, item):
"""Gets the item parent."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
return item.GetParent()
def GetFirstChild(self, item):
"""Gets the item first child."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
cookie = 0
return self.GetNextChild(item, cookie)
def GetNextChild(self, item, cookie):
"""
Gets the item next child based on the 'cookie' parameter.
This method has no sense if you do not call GetFirstChild() before.
"""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
children = item.GetChildren()
# it's ok to cast cookie to size_t, we never have indices big enough to
# overflow "void *"
if cookie < len(children):
return children[cookie], cookie+1
else:
# there are no more of them
return None, cookie
def GetLastChild(self, item):
"""Gets the item last child."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
children = item.GetChildren()
return (len(children) == 0 and [None] or [children[-1]])[0]
def GetNextSibling(self, item):
"""Gets the next sibling of an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
i = item
parent = i.GetParent()
if parent == None:
# root item doesn't have any siblings
return None
siblings = parent.GetChildren()
index = siblings.index(i)
n = index + 1
return (n == len(siblings) and [None] or [siblings[n]])[0]
def GetPrevSibling(self, item):
"""Gets the previous sibling of an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
i = item
parent = i.GetParent()
if parent == None:
# root item doesn't have any siblings
return None
siblings = parent.GetChildren()
index = siblings.index(i)
return (index == 0 and [None] or [siblings[index-1]])[0]
def GetNext(self, item):
"""Gets the next item. Only for internal use right now."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
i = item
# First see if there are any children.
children = i.GetChildren()
if len(children) > 0:
return children[0]
else:
# Try a sibling of this or ancestor instead
p = item
toFind = None
while p and not toFind:
toFind = self.GetNextSibling(p)
p = self.GetItemParent(p)
return toFind
def GetFirstVisibleItem(self):
"""Returns the first visible item."""
id = self.GetRootItem()
if not id:
return id
while id:
if self.IsVisible(id):
return id
id = self.GetNext(id)
return None
def GetNextVisible(self, item):
"""Returns the next visible item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
id = item
while id:
id = self.GetNext(id)
if id and self.IsVisible(id):
return id
return None
def GetPrevVisible(self, item):
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
raise Exception("\nERROR: Not Implemented")
return None
def ResetTextControl(self):
"""Called by TreeTextCtrl when it marks itself for deletion."""
self._textCtrl.Destroy()
self._textCtrl = None
def FindItem(self, idParent, prefixOrig):
"""Finds the first item starting with the given prefix after the given item."""
# match is case insensitive as this is more convenient to the user: having
# to press Shift-letter to go to the item starting with a capital letter
# would be too bothersome
prefix = prefixOrig.lower()
# determine the starting point: we shouldn't take the current item (this
# allows to switch between two items starting with the same letter just by
# pressing it) but we shouldn't jump to the next one if the user is
# continuing to type as otherwise he might easily skip the item he wanted
id = idParent
if len(prefix) == 1:
id = self.GetNext(id)
# look for the item starting with the given prefix after it
while id and not self.GetItemText(id).lower().startswith(prefix):
id = self.GetNext(id)
# if we haven't found anything...
if not id:
# ... wrap to the beginning
id = self.GetRootItem()
if self.HasFlag(TR_HIDE_ROOT):
# can't select virtual root
id = self.GetNext(id)
# and try all the items (stop when we get to the one we started from)
while id != idParent and not self.GetItemText(id).lower().startswith(prefix):
id = self.GetNext(id)
return id
# -----------------------------------------------------------------------------
# operations
# -----------------------------------------------------------------------------
def DoInsertItem(self, parentId, previous, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Actually inserts an item in the tree."""
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if ct_type < 0 or ct_type > 2:
raise Exception("\nERROR: Item Type Should Be 0 (Normal), 1 (CheckBox) or 2 (RadioButton). ")
parent = parentId
if not parent:
# should we give a warning here?
return self.AddRoot(text, ct_type, wnd, image, selImage, data)
self._dirty = True # do this first so stuff below doesn't cause flicker
item = GenericTreeItem(parent, text, ct_type, wnd, image, selImage, data)
if wnd is not None:
self._hasWindows = True
self._itemWithWindow.append(item)
parent.Insert(item, previous)
return item
def AddRoot(self, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Adds a root to the CustomTreeCtrl. Only one root must exist."""
if self._anchor:
raise Exception("\nERROR: Tree Can Have Only One Root")
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if ct_type < 0 or ct_type > 2:
raise Exception("\nERROR: Item Type Should Be 0 (Normal), 1 (CheckBox) or 2 (RadioButton). ")
self._dirty = True # do this first so stuff below doesn't cause flicker
self._anchor = GenericTreeItem(None, text, ct_type, wnd, image, selImage, data)
if wnd is not None:
self._hasWindows = True
self._itemWithWindow.append(self._anchor)
if self.HasFlag(TR_HIDE_ROOT):
# if root is hidden, make sure we can navigate
# into children
self._anchor.SetHasPlus()
self._anchor.Expand()
self.CalculatePositions()
if not self.HasFlag(TR_MULTIPLE):
self._current = self._key_current = self._anchor
self._current.SetHilight(True)
return self._anchor
def PrependItem(self, parent, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Appends an item as a first child of parent."""
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
return self.DoInsertItem(parent, 0, text, ct_type, wnd, image, selImage, data)
def InsertItemByItem(self, parentId, idPrevious, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Auxiliary function to cope with the C++ hideous multifunction."""
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
parent = parentId
if not parent:
# should we give a warning here?
return self.AddRoot(text, ct_type, wnd, image, selImage, data)
index = -1
if idPrevious:
try:
index = parent.GetChildren().index(idPrevious)
except:
raise Exception("ERROR: Previous Item In CustomTreeCtrl.InsertItem() Is Not A Sibling")
return self.DoInsertItem(parentId, index+1, text, ct_type, wnd, image, selImage, data)
def InsertItemByIndex(self, parentId, before, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Auxiliary function to cope with the C++ hideous multifunction."""
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
parent = parentId
if not parent:
# should we give a warning here?
return self.AddRoot(text, ct_type, wnd, image, selImage, data)
return self.DoInsertItem(parentId, before, text, ct_type, wnd, image, selImage, data)
def InsertItem(self, parentId, input, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Inserts an item after the given previous."""
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if type(input) == type(1):
return self.InsertItemByIndex(parentId, input, text, ct_type, wnd, image, selImage, data)
else:
return self.InsertItemByItem(parentId, input, text, ct_type, wnd, image, selImage, data)
def InsertItemBefore(self, parentId, before, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
return self.InsertItemByIndex(parentId, before, text, ct_type, wnd, image, selImage, data)
def AppendItem(self, parentId, text, ct_type=0, wnd=None, image=-1, selImage=-1, data=None):
"""Appends an item as a last child of its parent."""
if wnd is not None and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert Controls You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
if text.find("\n") >= 0 and not (self._windowStyle & TR_HAS_VARIABLE_ROW_HEIGHT):
raise Exception("\nERROR: In Order To Append/Insert A MultiLine Text You Have To Use The Style TR_HAS_VARIABLE_ROW_HEIGHT")
parent = parentId
if not parent:
# should we give a warning here?
return self.AddRoot(text, ct_type, wnd, image, selImage, data)
return self.DoInsertItem(parent, len(parent.GetChildren()), text, ct_type, wnd, image, selImage, data)
def SendDeleteEvent(self, item):
"""Actully sends the EVT_TREE_DELETE_ITEM event."""
event = TreeEvent(wxEVT_TREE_DELETE_ITEM, self.GetId())
event._item = item
event.SetEventObject(self)
self.GetEventHandler().ProcessEvent(event)
def IsDescendantOf(self, parent, item):
"""Checks if the given item is under another one."""
while item:
if item == parent:
# item is a descendant of parent
return True
item = item.GetParent()
return False
# Don't leave edit or selection on a child which is about to disappear
def ChildrenClosing(self, item):
"""We are about to destroy the item children."""
if self._textCtrl != None and item != self._textCtrl.item() and self.IsDescendantOf(item, self._textCtrl.item()):
self._textCtrl.StopEditing()
if item != self._key_current and self.IsDescendantOf(item, self._key_current):
self._key_current = None
if self.IsDescendantOf(item, self._select_me):
self._select_me = item
if item != self._current and self.IsDescendantOf(item, self._current):
self._current.SetHilight(False)
self._current = None
self._select_me = item
def DeleteChildren(self, item):
"""Delete item children."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
self._dirty = True # do this first so stuff below doesn't cause flicker
self.ChildrenClosing(item)
item.DeleteChildren(self)
def Delete(self, item):
"""Delete an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
self._dirty = True # do this first so stuff below doesn't cause flicker
if self._textCtrl != None and self.IsDescendantOf(item, self._textCtrl.item()):
# can't delete the item being edited, cancel editing it first
self._textCtrl.StopEditing()
parent = item.GetParent()
# don't keep stale pointers around!
if self.IsDescendantOf(item, self._key_current):
# Don't silently change the selection:
# do it properly in idle time, so event
# handlers get called.
# self._key_current = parent
self._key_current = None
# self._select_me records whether we need to select
# a different item, in idle time.
if self._select_me and self.IsDescendantOf(item, self._select_me):
self._select_me = parent
if self.IsDescendantOf(item, self._current):
# Don't silently change the selection:
# do it properly in idle time, so event
# handlers get called.
# self._current = parent
self._current = None
self._select_me = parent
# remove the item from the tree
if parent:
parent.GetChildren().remove(item) # remove by value # Can throw ValueError, catch?
else: # deleting the root
# nothing will be left in the tree
self._anchor = None
# and delete all of its children and the item itself now
item.DeleteChildren(self)
self.SendDeleteEvent(item)
if item == self._select_me:
self._select_me = None
# Remove the item with window
if item in self._itemWithWindow:
wnd = item.GetWindow()
wnd.Hide()
wnd.Destroy()
item._wnd = None
self._itemWithWindow.remove(item)
del item
def DeleteAllItems(self):
"""Delete all items in the CustomTreeCtrl."""
if self._anchor:
self.Delete(self._anchor)
def Expand(self, item):
"""
Expands an item, sending a EVT_TREE_ITEM_EXPANDING and
EVT_TREE_ITEM_EXPANDED events.
"""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if self.HasFlag(TR_HIDE_ROOT) and item == self.GetRootItem():
raise Exception("\nERROR: Can't Expand An Hidden Root. ")
if not item.HasPlus():
return
if item.IsExpanded():
return
event = TreeEvent(wxEVT_TREE_ITEM_EXPANDING, self.GetId())
event._item = item
event.SetEventObject(self)
if self.GetEventHandler().ProcessEvent(event) and not event.IsAllowed():
# cancelled by program
return
item.Expand()
self.CalculatePositions()
self.RefreshSubtree(item)
self.AdjustMyScrollbars()
clientHeight = self.GetClientSize()[1] - 15
children = item.GetChildren()
# Get total height of all children of the item in pixels
if len(children) == 0:
totalHeight = 0
childrenHeight = 0
else:
childrenHeight = children[-1].GetY() + self.GetLineHeight(children[-1])
childrenHeight -= children[0].GetY()
rect = self.GetBoundingRect(item)
if childrenHeight > clientHeight:
# Childrens have in sum a larger height than the tree ctrl
# -> scroll parent item to top
scrollYBy = rect.GetTop()
if scrollYBy > 0:
x_pos = self.GetScrollPos(wx.HORIZONTAL)
# Round down so parent is definitely visible
y_pos = self.GetScrollPos(wx.VERTICAL) + \
scrollYBy // _PIXELS_PER_UNIT
self.Scroll(x_pos, y_pos)
else:
# Childrens are not as high as the tree ctrl
# -> scroll so that all children are visible
scrollYBy = rect.GetTop() + rect.GetHeight() + childrenHeight - \
clientHeight
if scrollYBy > 0:
x_pos = self.GetScrollPos(wx.HORIZONTAL)
# Round up so last child is definitely visible
y_pos = self.GetScrollPos(wx.VERTICAL) + \
(scrollYBy + _PIXELS_PER_UNIT) // _PIXELS_PER_UNIT
self.Scroll(x_pos, y_pos)
if self._hasWindows:
# We hide the associated window here, we may show it after
self.HideWindows()
event.SetEventType(wxEVT_TREE_ITEM_EXPANDED)
self.GetEventHandler().ProcessEvent(event)
def ExpandAllChildren(self, item):
"""Expands all the items children of the input item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if not self.HasFlag(TR_HIDE_ROOT) or item != self.GetRootItem():
self.Expand(item)
if not self.IsExpanded(item):
return
child, cookie = self.GetFirstChild(item)
while child:
self.ExpandAllChildren(child)
child, cookie = self.GetNextChild(item, cookie)
def ExpandAll(self):
"""Expands all CustomTreeCtrl items."""
if self._anchor:
self.ExpandAllChildren(self._anchor)
def Collapse(self, item):
"""
Collapse an item, sending a EVT_TREE_ITEM_COLLAPSING and
EVT_TREE_ITEM_COLLAPSED events.
"""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if self.HasFlag(TR_HIDE_ROOT) and item == self.GetRootItem():
raise Exception("\nERROR: Can't Collapse An Hidden Root. ")
if not item.IsExpanded():
return
event = TreeEvent(wxEVT_TREE_ITEM_COLLAPSING, self.GetId())
event._item = item
event.SetEventObject(self)
if self.GetEventHandler().ProcessEvent(event) and not event.IsAllowed():
# cancelled by program
return
self.ChildrenClosing(item)
item.Collapse()
self.CalculatePositions()
self.RefreshSubtree(item)
if self._hasWindows:
self.HideWindows()
event.SetEventType(wxEVT_TREE_ITEM_COLLAPSED)
self.GetEventHandler().ProcessEvent(event)
def CollapseAndReset(self, item):
"""Collapse the given item and deletes its children."""
self.Collapse(item)
self.DeleteChildren(item)
def Toggle(self, item):
"""Toggles the item state (collapsed/expanded)."""
if item.IsExpanded():
self.Collapse(item)
else:
self.Expand(item)
def HideWindows(self):
"""Hides the windows associated to the items. Used internally."""
for child in self._itemWithWindow:
if not self.IsVisible(child):
wnd = child.GetWindow()
wnd.Hide()
def Unselect(self):
"""Unselects the current selection."""
if self._current:
self._current.SetHilight(False)
self.RefreshLine(self._current)
self._current = None
self._select_me = None
def UnselectAllChildren(self, item):
"""Unselects all the children of the given item."""
if item.IsSelected():
item.SetHilight(False)
self.RefreshLine(item)
if item.HasChildren():
for child in item.GetChildren():
self.UnselectAllChildren(child)
def UnselectAll(self):
"""Unselect all the items."""
rootItem = self.GetRootItem()
# the tree might not have the root item at all
if rootItem:
self.UnselectAllChildren(rootItem)
self.Unselect()
# Recursive function !
# To stop we must have crt_item<last_item
# Algorithm :
# Tag all next children, when no more children,
# Move to parent (not to tag)
# Keep going... if we found last_item, we stop.
def TagNextChildren(self, crt_item, last_item, select):
"""Used internally."""
parent = crt_item.GetParent()
if parent == None: # This is root item
return self.TagAllChildrenUntilLast(crt_item, last_item, select)
children = parent.GetChildren()
index = children.index(crt_item)
count = len(children)
for n in xrange(index+1, count):
if self.TagAllChildrenUntilLast(children[n], last_item, select):
return True
return self.TagNextChildren(parent, last_item, select)
def TagAllChildrenUntilLast(self, crt_item, last_item, select):
"""Used internally."""
crt_item.SetHilight(select)
self.RefreshLine(crt_item)
if crt_item == last_item:
return True
if crt_item.HasChildren():
for child in crt_item.GetChildren():
if self.TagAllChildrenUntilLast(child, last_item, select):
return True
return False
def SelectItemRange(self, item1, item2):
"""Selects all the items between item1 and item2."""
self._select_me = None
# item2 is not necessary after item1
# choice first' and 'last' between item1 and item2
first = (item1.GetY() < item2.GetY() and [item1] or [item2])[0]
last = (item1.GetY() < item2.GetY() and [item2] or [item1])[0]
select = self._current.IsSelected()
if self.TagAllChildrenUntilLast(first, last, select):
return
self.TagNextChildren(first, last, select)
def DoSelectItem(self, item, unselect_others=True, extended_select=False,
expand_if_necessary=True, send_events=True):
"""Actually selects/unselects an item, sending a EVT_TREE_SEL_CHANGED event."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
self._select_me = None
is_single = not (self.GetTreeStyle() & TR_MULTIPLE)
# to keep going anyhow !!!
if is_single:
if item.IsSelected():
return # nothing to do
unselect_others = True
extended_select = False
elif unselect_others and item.IsSelected():
# selection change if there is more than one item currently selected
if len(self.GetSelections()) == 1:
return
if send_events:
event = TreeEvent(wxEVT_TREE_SEL_CHANGING, self.GetId())
event._item = item
event._itemOld = self._current
event.SetEventObject(self)
# TODO : Here we don't send any selection mode yet !
if self.GetEventHandler().ProcessEvent(event) and not event.IsAllowed():
return
parent = self.GetItemParent(item)
while parent:
if not self.IsExpanded(parent):
if expand_if_necessary:
self.Expand(parent)
else:
return # TODO Better reaction?
parent = self.GetItemParent(parent)
# ctrl press
if unselect_others:
if is_single:
self.Unselect() # to speed up thing
else:
self.UnselectAll()
# shift press
if extended_select:
if not self._current:
self._current = self._key_current = self.GetRootItem()
# don't change the mark (self._current)
self.SelectItemRange(self._current, item)
else:
select = True # the default
# Check if we need to toggle hilight (ctrl mode)
if not unselect_others:
select = not item.IsSelected()
self._current = self._key_current = item
self._current.SetHilight(select)
self.RefreshLine(self._current)
# This can cause idle processing to select the root
# if no item is selected, so it must be after the
# selection is set
self.EnsureVisible(item)
if send_events:
event.SetEventType(wxEVT_TREE_SEL_CHANGED)
self.GetEventHandler().ProcessEvent(event)
# Handles hypertext items
if self.IsItemHyperText(item):
event = TreeEvent(wxEVT_TREE_ITEM_HYPERLINK, self.GetId())
event._item = item
self.GetEventHandler().ProcessEvent(event)
def SelectItem(self, item, select=True, expand_if_necessary=True,
send_events=True):
"""Selects/deselects an item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
if select:
self.DoSelectItem(item, not self.HasFlag(TR_MULTIPLE),
expand_if_necessary=expand_if_necessary,
send_events=send_events)
else: # deselect
item.SetHilight(False)
self.RefreshLine(item)
def FillArray(self, item, array=[]):
"""
Internal function. Used to populate an array of selected items when
the style TR_MULTIPLE is used.
"""
if not array:
array = []
if item.IsSelected():
array.append(item)
if item.HasChildren() and item.IsExpanded():
for child in item.GetChildren():
array = self.FillArray(child, array)
return array
def GetSelections(self):
"""
Returns a list of selected items. This can be used only if CustomTreeCtrl has
the TR_MULTIPLE style set.
"""
array = []
idRoot = self.GetRootItem()
if idRoot:
array = self.FillArray(idRoot, array)
#else: the tree is empty, so no selections
return array
def SetDefaultScrollVisiblePos(self, dpos):
self._defaultScrollVisiblePos = dpos
def GetDefaultScrollVisiblePos(self):
return self._defaultScrollVisiblePos
def EnsureVisible(self, item, toMiddle=None):
"""Ensure that an item is visible in CustomTreeCtrl."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
# first expand all parent branches
parent = item.GetParent()
if self.HasFlag(TR_HIDE_ROOT):
while parent and parent != self._anchor:
self.Expand(parent)
parent = parent.GetParent()
else:
while parent:
self.Expand(parent)
parent = parent.GetParent()
if toMiddle is None:
toMiddle = self._defaultScrollVisiblePos == "middle"
if toMiddle:
self.ScrollToMiddle(item)
else:
self.ScrollTo(item)
def ScrollTo(self, item):
"""Scrolls the specified item into view."""
if not item:
return
# We have to call this here because the label in
# question might just have been added and no screen
# update taken place.
if self._dirty:
if wx.Platform in ["__WXMSW__", "__WXMAC__"]:
self.Update()
else:
wx.YieldIfNeeded()
# now scroll to the item
item_y = item.GetY()
start_x, start_y = self.GetViewStart()
start_y *= _PIXELS_PER_UNIT
client_w, client_h = self.GetClientSize()
x, y = 0, 0
if item_y < start_y+3:
# going down
x, y = self._anchor.GetSize(x, y, self)
y += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x_pos = self.GetScrollPos(wx.HORIZONTAL)
# Item should appear at top
self.SetScrollbars(_PIXELS_PER_UNIT, _PIXELS_PER_UNIT, x/_PIXELS_PER_UNIT, y/_PIXELS_PER_UNIT, x_pos, item_y/_PIXELS_PER_UNIT)
elif item_y+self.GetLineHeight(item) > start_y+client_h:
# going up
x, y = self._anchor.GetSize(x, y, self)
y += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
item_y += _PIXELS_PER_UNIT+2
x_pos = self.GetScrollPos(wx.HORIZONTAL)
# Item should appear at bottom
self.SetScrollbars(_PIXELS_PER_UNIT, _PIXELS_PER_UNIT, x/_PIXELS_PER_UNIT, y/_PIXELS_PER_UNIT, x_pos, (item_y+self.GetLineHeight(item)-client_h)/_PIXELS_PER_UNIT )
def ScrollToMiddle(self, item):
"""Scrolls the specified item into the vertical middle of the view."""
if not item:
return
# We have to call this here because the label in
# question might just have been added and no screen
# update taken place.
if self._dirty:
if wx.Platform in ["__WXMSW__", "__WXMAC__"]:
self.Update()
else:
wx.YieldIfNeeded()
# now scroll to the item
item_y = item.GetY()
start_x, start_y = self.GetViewStart()
start_y *= _PIXELS_PER_UNIT
client_w, client_h = self.GetClientSize()
target_y = item_y - (client_h - self.GetLineHeight(item))// 2
target_y = max(0, target_y)
x, y = 0, 0
x, y = self._anchor.GetSize(x, y, self)
y += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x_pos = self.GetScrollPos(wx.HORIZONTAL)
self.SetScrollbars(_PIXELS_PER_UNIT, _PIXELS_PER_UNIT, x/_PIXELS_PER_UNIT, y/_PIXELS_PER_UNIT, x_pos, target_y//_PIXELS_PER_UNIT)
def OnCompareItems(self, item1, item2):
"""
Returns whether 2 items have the same text.
Override this function in the derived class to change the sort order of the items
in the CustomTreeCtrl. The function should return a negative, zero or positive
value if the first item is less than, equal to or greater than the second one.
The base class version compares items alphabetically.
"""
return self.GetItemText(item1) == self.GetItemText(item2)
def SortChildren(self, item):
"""
Sorts the children of the given item using OnCompareItems method of CustomTreeCtrl.
You should override that method to change the sort order (the default is ascending
case-sensitive alphabetical order).
"""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
children = item.GetChildren()
if len(children) > 1:
self._dirty = True
children.sort(self.OnCompareItems)
def GetImageList(self):
"""Returns the normal image list."""
return self._imageListNormal
def GetButtonsImageList(self):
"""Returns the buttons image list (from which application-defined button images are taken)."""
return self._imageListButtons
def GetStateImageList(self):
"""Returns the state image list (from which application-defined state images are taken)."""
return self._imageListState
def GetImageListCheck(self):
"""Returns the image list used to build the check/radio buttons."""
return self._imageListCheck
def CalculateLineHeight(self):
"""Calculates the height of a line."""
dc = wx.ClientDC(self)
self._lineHeight = dc.GetCharHeight()
if self._imageListNormal:
# Calculate a self._lineHeight value from the normal Image sizes.
# May be toggle off. Then CustomTreeCtrl will spread when
# necessary (which might look ugly).
n = self._imageListNormal.GetImageCount()
for i in xrange(n):
width, height = self._imageListNormal.GetSize(i)
if height > self._lineHeight:
self._lineHeight = height
if self._imageListButtons:
# Calculate a self._lineHeight value from the Button image sizes.
# May be toggle off. Then CustomTreeCtrl will spread when
# necessary (which might look ugly).
n = self._imageListButtons.GetImageCount()
for i in xrange(n):
width, height = self._imageListButtons.GetSize(i)
if height > self._lineHeight:
self._lineHeight = height
if self._imageListCheck:
# Calculate a self._lineHeight value from the check/radio image sizes.
# May be toggle off. Then CustomTreeCtrl will spread when
# necessary (which might look ugly).
n = self._imageListCheck.GetImageCount()
for i in xrange(n):
width, height = self._imageListCheck.GetSize(i)
if height > self._lineHeight:
self._lineHeight = height
# if self._lineHeight < 30:
# self._lineHeight += 2 # at least 2 pixels
# else:
# self._lineHeight += self._lineHeight/10 # otherwise 10% extra spacing
def SetImageList(self, imageList):
"""Sets the normal image list."""
if self._ownsImageListNormal:
del self._imageListNormal
self._imageListNormal = imageList
self._ownsImageListNormal = False
self._dirty = True
# Don't do any drawing if we're setting the list to NULL,
# since we may be in the process of deleting the tree control.
if imageList:
self.CalculateLineHeight()
# We gray out the image list to use the grayed icons with disabled items
sz = imageList.GetSize(0)
self._grayedImageList = wx.ImageList(sz[0], sz[1], True, 0)
for ii in xrange(imageList.GetImageCount()):
bmp = imageList.GetBitmap(ii)
image = wx.ImageFromBitmap(bmp)
image = GrayOut(image)
newbmp = wx.BitmapFromImage(image)
self._grayedImageList.Add(newbmp)
def SetImageListNoGrayedItems(self, imageList):
"""
Sets the normal image list, but not the grayed image list
"""
if self._ownsImageListNormal:
del self._imageListNormal
self._imageListNormal = imageList
self._ownsImageListNormal = False
self._dirty = True
# Don't do any drawing if we're setting the list to NULL,
# since we may be in the process of deleting the tree control.
if imageList:
self.CalculateLineHeight()
def SetStateImageList(self, imageList):
"""Sets the state image list (from which application-defined state images are taken)."""
if self._ownsImageListState:
del self._imageListState
self._imageListState = imageList
self._ownsImageListState = False
def SetButtonsImageList(self, imageList):
"""Sets the buttons image list (from which application-defined button images are taken)."""
if self._ownsImageListButtons:
del self._imageListButtons
self._imageListButtons = imageList
self._ownsImageListButtons = False
self._dirty = True
self.CalculateLineHeight()
def SetImageListCheck(self, sizex, sizey, imglist=None):
"""Sets the check image list."""
if imglist is None:
self._imageListCheck = wx.ImageList(sizex, sizey)
self._imageListCheck.Add(GetCheckedBitmap())
self._imageListCheck.Add(GetNotCheckedBitmap())
self._imageListCheck.Add(GetFlaggedBitmap())
self._imageListCheck.Add(GetNotFlaggedBitmap())
else:
sizex, sizey = imglist.GetSize(0)
self._imageListCheck = imglist
# We gray out the image list to use the grayed icons with disabled items
self._grayedCheckList = wx.ImageList(sizex, sizey, True, 0)
for ii in xrange(self._imageListCheck.GetImageCount()):
bmp = self._imageListCheck.GetBitmap(ii)
image = wx.ImageFromBitmap(bmp)
image = GrayOut(image)
newbmp = wx.BitmapFromImage(image)
self._grayedCheckList.Add(newbmp)
self._dirty = True
if imglist:
self.CalculateLineHeight()
def AssignImageList(self, imageList):
"""Assigns the normal image list."""
self.SetImageList(imageList)
self._ownsImageListNormal = True
def AssignStateImageList(self, imageList):
"""Assigns the state image list."""
self.SetStateImageList(imageList)
self._ownsImageListState = True
def AssignButtonsImageList(self, imageList):
"""Assigns the button image list."""
self.SetButtonsImageList(imageList)
self._ownsImageListButtons = True
# -----------------------------------------------------------------------------
# helpers
# -----------------------------------------------------------------------------
def AdjustMyScrollbars(self):
"""Adjust the wx.ScrolledWindow scrollbars."""
if self._anchor:
x, y = self._anchor.GetSize(0, 0, self)
y += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x += _PIXELS_PER_UNIT + 2 # one more scrollbar unit + 2 pixels
x_pos = self.GetScrollPos(wx.HORIZONTAL)
y_pos = self.GetScrollPos(wx.VERTICAL)
self.SetScrollbars(_PIXELS_PER_UNIT, _PIXELS_PER_UNIT, x/_PIXELS_PER_UNIT, y/_PIXELS_PER_UNIT, x_pos, y_pos)
else:
self.SetScrollbars(0, 0, 0, 0)
def GetLineHeight(self, item):
"""Returns the line height for the given item."""
if self.GetTreeStyle() & TR_HAS_VARIABLE_ROW_HEIGHT:
return item.GetHeight()
else:
return self._lineHeight
def DrawVerticalGradient(self, dc, rect, hasfocus):
"""Gradient fill from colour 1 to colour 2 from top to bottom."""
oldpen = dc.GetPen()
oldbrush = dc.GetBrush()
dc.SetPen(wx.TRANSPARENT_PEN)
# calculate gradient coefficients
if hasfocus:
col2 = self._secondcolour
col1 = self._firstcolour
else:
col2 = self._hilightUnfocusedBrush.GetColour()
col1 = self._hilightUnfocusedBrush2.GetColour()
r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue())
r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue())
flrect = float(rect.height)
rstep = float((r2 - r1)) / flrect
gstep = float((g2 - g1)) / flrect
bstep = float((b2 - b1)) / flrect
rf, gf, bf = 0, 0, 0
for y in xrange(rect.y, rect.y + rect.height):
currCol = (r1 + rf, g1 + gf, b1 + bf)
dc.SetBrush(wx.Brush(currCol, wx.SOLID))
dc.DrawRectangle(rect.x, y, rect.width, 1)
rf = rf + rstep
gf = gf + gstep
bf = bf + bstep
dc.SetPen(oldpen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangleRect(rect)
dc.SetBrush(oldbrush)
def DrawHorizontalGradient(self, dc, rect, hasfocus):
"""Gradient fill from colour 1 to colour 2 from left to right."""
oldpen = dc.GetPen()
oldbrush = dc.GetBrush()
dc.SetPen(wx.TRANSPARENT_PEN)
# calculate gradient coefficients
if hasfocus:
col2 = self._secondcolour
col1 = self._firstcolour
else:
col2 = self._hilightUnfocusedBrush.GetColour()
col1 = self._hilightUnfocusedBrush2.GetColour()
r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue())
r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue())
flrect = float(rect.width)
rstep = float((r2 - r1)) / flrect
gstep = float((g2 - g1)) / flrect
bstep = float((b2 - b1)) / flrect
rf, gf, bf = 0, 0, 0
for x in xrange(rect.x, rect.x + rect.width):
currCol = (int(r1 + rf), int(g1 + gf), int(b1 + bf))
dc.SetBrush(wx.Brush(currCol, wx.SOLID))
dc.DrawRectangle(x, rect.y, 1, rect.height)
rf = rf + rstep
gf = gf + gstep
bf = bf + bstep
dc.SetPen(oldpen)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangleRect(rect)
dc.SetBrush(oldbrush)
def DrawVistaRectangle(self, dc, rect, hasfocus):
"""Draw the selected item(s) with the Windows Vista style."""
if hasfocus:
outer = _rgbSelectOuter
inner = _rgbSelectInner
top = _rgbSelectTop
bottom = _rgbSelectBottom
else:
outer = _rgbNoFocusOuter
inner = _rgbNoFocusInner
top = _rgbNoFocusTop
bottom = _rgbNoFocusBottom
oldpen = dc.GetPen()
oldbrush = dc.GetBrush()
bdrRect = wx.Rect(*rect.Get())
filRect = wx.Rect(*rect.Get())
filRect.Deflate(1,1)
r1, g1, b1 = int(top.Red()), int(top.Green()), int(top.Blue())
r2, g2, b2 = int(bottom.Red()), int(bottom.Green()), int(bottom.Blue())
flrect = float(filRect.height)
if flrect < 1:
flrect = self._lineHeight
rstep = float((r2 - r1)) / flrect
gstep = float((g2 - g1)) / flrect
bstep = float((b2 - b1)) / flrect
rf, gf, bf = 0, 0, 0
dc.SetPen(wx.TRANSPARENT_PEN)
for y in xrange(filRect.y, filRect.y + filRect.height):
currCol = (r1 + rf, g1 + gf, b1 + bf)
dc.SetBrush(wx.Brush(currCol, wx.SOLID))
dc.DrawRectangle(filRect.x, y, filRect.width, 1)
rf = rf + rstep
gf = gf + gstep
bf = bf + bstep
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(outer))
dc.DrawRoundedRectangleRect(bdrRect, 3)
bdrRect.Deflate(1, 1)
dc.SetPen(wx.Pen(inner))
dc.DrawRoundedRectangleRect(bdrRect, 2)
dc.SetPen(oldpen)
dc.SetBrush(oldbrush)
def PaintItem(self, item, dc):
"""Actually paint an item."""
attr = item.GetAttributes()
if attr and attr.HasFont():
dc.SetFont(attr.GetFont())
elif item.IsBold():
dc.SetFont(self._boldFont)
if item.IsHyperText():
dc.SetFont(self.GetHyperTextFont())
if item.GetVisited():
dc.SetTextForeground(self.GetHyperTextVisitedColour())
else:
dc.SetTextForeground(self.GetHyperTextNewColour())
text_w, text_h, dummy = dc.GetMultiLineTextExtent(item.GetText())
image = item.GetCurrentImage()
checkimage = item.GetCurrentCheckedImage()
image_w, image_h = 0, 0
if image != _NO_IMAGE:
if self._imageListNormal:
image_w, image_h = self._imageListNormal.GetSize(image)
image_w += 4
else:
image = _NO_IMAGE
if item.GetType() != 0:
wcheck, hcheck = self._imageListCheck.GetSize(item.GetType())
wcheck += 4
else:
wcheck, hcheck = 0, 0
total_h = self.GetLineHeight(item)
drawItemBackground = False
if item.IsSelected():
# under mac selections are only a rectangle in case they don't have the focus
if wx.Platform == "__WXMAC__":
if not self._hasFocus:
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT), 1, wx.SOLID))
else:
dc.SetBrush(self._hilightBrush)
else:
dc.SetBrush((self._hasFocus and [self._hilightBrush] or [self._hilightUnfocusedBrush])[0])
drawItemBackground = True
else:
if attr and attr.HasBackgroundColour():
drawItemBackground = True
colBg = attr.GetBackgroundColour()
else:
colBg = self._backgroundColour
dc.SetBrush(wx.Brush(colBg, wx.SOLID))
dc.SetPen(wx.TRANSPARENT_PEN)
offset = (self.HasFlag(TR_ROW_LINES) and [1] or [0])[0]
if self.HasFlag(TR_FULL_ROW_HIGHLIGHT):
x = 0
w, h = self.GetClientSize()
itemrect = wx.Rect(x, item.GetY()+offset, w, total_h-offset)
if item.IsSelected():
if self._usegradients:
if self._gradientstyle == 0: # Horizontal
self.DrawHorizontalGradient(dc, itemrect, self._hasFocus)
else: # Vertical
self.DrawVerticalGradient(dc, itemrect, self._hasFocus)
elif self._vistaselection:
self.DrawVistaRectangle(dc, itemrect, self._hasFocus)
else:
if wx.Platform in ["__WXGTK2__", "__WXMAC__"]:
flags = wx.CONTROL_SELECTED
if self._hasFocus: flags = flags | wx.CONTROL_FOCUSED
wx.RendererNative.Get().DrawItemSelectionRect(self, dc, itemrect, flags)
else:
dc.DrawRectangleRect(itemrect)
else:
if item.IsSelected():
# If it's selected, and there's an image, then we should
# take care to leave the area under the image painted in the
# background colour.
wnd = item.GetWindow()
wndx = 0
if wnd:
wndx, wndy = item.GetWindowSize()
itemrect = wx.Rect(item.GetX() + wcheck + image_w - 2,
item.GetY()+offset,
item.GetWidth() - image_w - wcheck + 2 - wndx,
total_h-offset)
if self._usegradients:
if self._gradientstyle == 0: # Horizontal
self.DrawHorizontalGradient(dc, itemrect, self._hasFocus)
else: # Vertical
self.DrawVerticalGradient(dc, itemrect, self._hasFocus)
elif self._vistaselection:
self.DrawVistaRectangle(dc, itemrect, self._hasFocus)
else:
if wx.Platform in ["__WXGTK2__", "__WXMAC__"]:
flags = wx.CONTROL_SELECTED
if self._hasFocus: flags = flags | wx.CONTROL_FOCUSED
wx.RendererNative.Get().DrawItemSelectionRect(self, dc, itemrect, flags)
else:
dc.DrawRectangleRect(itemrect)
# On GTK+ 2, drawing a 'normal' background is wrong for themes that
# don't allow backgrounds to be customized. Not drawing the background,
# except for custom item backgrounds, works for both kinds of theme.
elif drawItemBackground:
minusicon = wcheck + image_w - 2
itemrect = wx.Rect(item.GetX()+minusicon,
item.GetY()+offset,
item.GetWidth()-minusicon,
total_h-offset)
if self._usegradients and self._hasFocus:
if self._gradientstyle == 0: # Horizontal
self.DrawHorizontalGradient(dc, itemrect, self._hasFocus)
else: # Vertical
self.DrawVerticalGradient(dc, itemrect, self._hasFocus)
else:
dc.DrawRectangleRect(itemrect)
if image != _NO_IMAGE:
dc.SetClippingRegion(item.GetX(), item.GetY(), wcheck+image_w-2, total_h)
if item.IsEnabled():
imglist = self._imageListNormal
else:
imglist = self._grayedImageList
imglist.Draw(image, dc,
item.GetX() + wcheck,
item.GetY() + ((total_h > image_h) and [(total_h-image_h)/2] or [0])[0],
wx.IMAGELIST_DRAW_TRANSPARENT)
dc.DestroyClippingRegion()
if wcheck:
if item.IsEnabled():
imglist = self._imageListCheck
else:
imglist = self._grayedCheckList
imglist.Draw(checkimage, dc,
item.GetX(),
item.GetY() + ((total_h > hcheck) and [(total_h-hcheck)/2] or [0])[0],
wx.IMAGELIST_DRAW_TRANSPARENT)
dc.SetBackgroundMode(wx.TRANSPARENT)
extraH = ((total_h > text_h) and [(total_h - text_h)/2] or [0])[0]
textrect = wx.Rect(wcheck + image_w + item.GetX(), item.GetY() + extraH, text_w, text_h)
if not item.IsEnabled():
foreground = dc.GetTextForeground()
dc.SetTextForeground(self._disabledColour)
dc.DrawLabel(item.GetText(), textrect)
dc.SetTextForeground(foreground)
else:
if wx.Platform == "__WXMAC__" and item.IsSelected() and self._hasFocus:
dc.SetTextForeground(wx.WHITE)
dc.DrawLabel(item.GetText(), textrect)
wnd = item.GetWindow()
if wnd:
wndx = wcheck + image_w + item.GetX() + text_w + 4
xa, ya = self.CalcScrolledPosition((0, item.GetY()))
wndx += xa
if item.GetHeight() > item.GetWindowSize()[1]:
ya += (item.GetHeight() - item.GetWindowSize()[1])/2
if not wnd.IsShown():
wnd.Show()
if wnd.GetPosition() != (wndx, ya):
wnd.SetPosition((wndx, ya))
# restore normal font
dc.SetFont(self._normalFont)
# Now y stands for the top of the item, whereas it used to stand for middle !
def PaintLevel(self, item, dc, level, y):
y = self._RecurPaintLevel(item, dc, level, y)
self.PaintButtons(item, dc, level)
return y
# Now y stands for the top of the item, whereas it used to stand for middle !
def _RecurPaintLevel(self, item, dc, level, y):
"""Paint a level of CustomTreeCtrl."""
x = level * self._indent
# print "PaintLevel1", repr(level)
if not self.HasFlag(TR_HIDE_ROOT):
x += self._indent
elif level == 0:
# always expand hidden root
origY = y
children = item.GetChildren()
count = len(children)
if count > 0:
n = 0
while n < count:
oldY = y
y = self._RecurPaintLevel(children[n], dc, 1, y)
n = n + 1
if not self.HasFlag(TR_NO_LINES) and self.HasFlag(TR_LINES_AT_ROOT) and count > 0:
# draw line down to last child
origY += self.GetLineHeight(children[0])>>1
oldY += self.GetLineHeight(children[n-1])>>1
oldPen = dc.GetPen()
dc.SetPen(self._dottedPen)
dc.DrawLine(3, origY, 3, oldY)
dc.SetPen(oldPen)
return y
item.SetX(x+self._spacing)
item.SetY(y)
h = self.GetLineHeight(item)
y_top = y
y_mid = y_top + (h>>1)
y += h
exposed_x = dc.LogicalToDeviceX(0)
exposed_y = dc.LogicalToDeviceY(y_top)
if self.IsExposed(exposed_x, exposed_y, 10000, h): # 10000 = very much
if wx.Platform == "__WXMAC__":
# don't draw rect outline if we already have the
# background color under Mac
pen = ((item.IsSelected() and self._hasFocus) and [self._borderPen] or [wx.TRANSPARENT_PEN])[0]
else:
pen = self._borderPen
if item.IsSelected():
if (wx.Platform == "__WXMAC__" and self._hasFocus):
colText = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)
else:
colText = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)
else:
attr = item.GetAttributes()
if attr and attr.HasTextColour():
colText = attr.GetTextColour()
else:
colText = self.GetForegroundColour()
if self._vistaselection:
colText = wx.BLACK
# prepare to draw
dc.SetTextForeground(colText)
dc.SetPen(pen)
oldpen = pen
# draw
self.PaintItem(item, dc)
if self.HasFlag(TR_ROW_LINES):
# if the background colour is white, choose a
# contrasting color for the lines
medium_grey = wx.Pen(wx.Colour(200, 200, 200))
dc.SetPen(((self.GetBackgroundColour() == wx.WHITE) and [medium_grey] or [wx.WHITE_PEN])[0])
dc.DrawLine(0, y_top, 10000, y_top)
dc.DrawLine(0, y, 10000, y)
# restore DC objects
dc.SetBrush(wx.WHITE_BRUSH)
dc.SetTextForeground(wx.BLACK)
if not self.HasFlag(TR_NO_LINES):
# draw the horizontal line here
dc.SetPen(self._dottedPen)
x_start = x
if x > self._indent:
x_start -= self._indent
elif self.HasFlag(TR_LINES_AT_ROOT):
x_start = 3
dc.DrawLine(x_start, y_mid, x + self._spacing, y_mid)
dc.SetPen(oldpen)
origlevel = level
if item.IsExpanded():
children = item.GetChildren()
count = len(children)
if count > 0:
n = 0
level = level + 1
while n < count:
oldY = y
y = self._RecurPaintLevel(children[n], dc, level, y)
n = n + 1
if not self.HasFlag(TR_NO_LINES) and count > 0:
# draw line down to last child
oldY += self.GetLineHeight(children[n-1])>>1
if self.HasButtons():
y_mid += 5
# Only draw the portion of the line that is visible, in case it is huge
xOrigin, yOrigin = dc.GetDeviceOrigin()
yOrigin = abs(yOrigin)
width, height = self.GetClientSize()
# Move end points to the begining/end of the view?
if y_mid < yOrigin:
y_mid = yOrigin
if oldY > yOrigin + height:
oldY = yOrigin + height
# after the adjustments if y_mid is larger than oldY then the line
# isn't visible at all so don't draw anything
if y_mid < oldY:
dc.SetPen(self._dottedPen)
dc.DrawLine(x, y_mid, x, oldY)
for c in children:
self.PaintButtons(c, dc, level)
return y
def PaintButtons(self, item, dc, level):
x = level * self._indent
if not self.HasFlag(TR_HIDE_ROOT):
x += self._indent
h = self.GetLineHeight(item)
y_mid = item.GetY() + (h>>1)
if item.HasPlus() and self.HasButtons() and x > self._indent:
x_start = x
# if x > self._indent:
x_start -= self._indent
if self._imageListButtons:
# draw the image button here
image_h = 0
image_w = 0
image = (item.IsExpanded() and [TreeItemIcon_Expanded] or [TreeItemIcon_Normal])[0]
if item.IsSelected():
image += TreeItemIcon_Selected - TreeItemIcon_Normal
image_w, image_h = self._imageListButtons.GetSize(image)
# xx = x - image_w/2
xx = x_start - image_w/2
yy = y_mid - image_h/2
dc.SetClippingRegion(xx, yy, image_w, image_h)
self._imageListButtons.Draw(image, dc, xx, yy,
wx.IMAGELIST_DRAW_TRANSPARENT)
dc.DestroyClippingRegion()
else: # no custom buttons
if self._windowStyle & TR_TWIST_BUTTONS:
# We draw something like the Mac twist buttons
dc.SetPen(wx.BLACK_PEN)
dc.SetBrush(self._hilightBrush)
button = [wx.Point(), wx.Point(), wx.Point()]
if item.IsExpanded():
button[0].x = x_start - 5
button[0].y = y_mid - 3
button[1].x = x_start + 5
button[1].y = button[0].y
button[2].x = x_start
button[2].y = button[0].y + 6
else:
button[0].x = x_start - 3
button[0].y = y_mid - 5
button[1].x = button[0].x
button[1].y = y_mid + 5
button[2].x = button[0].x + 5
button[2].y = y_mid
dc.DrawPolygon(button)
else:
# These are the standard wx.TreeCtrl buttons as wx.RendererNative knows
wImage = 9
hImage = 9
flag = 0
if item.IsExpanded():
flag |= _CONTROL_EXPANDED
if item == self._underMouse:
flag |= _CONTROL_CURRENT
self._drawingfunction(self, dc, wx.Rect(x_start - wImage/2, y_mid - hImage/2,wImage, hImage), flag)
# -----------------------------------------------------------------------------
# wxWidgets callbacks
# -----------------------------------------------------------------------------
def OnPaint(self, event):
"""Handles the wx.EVT_PAINT event."""
# dc = wx.PaintDC(self)
dc = wx.BufferedPaintDC(self)
if self._backgroundColour == wx.NullColour:
bgBrush = wx.Brush(wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOW))
else:
bgBrush = wx.Brush(self._backgroundColour)
dc.SetBackground(bgBrush)
dc.Clear()
dc.SetBackground(wx.NullBrush)
self.PrepareDC(dc)
if not self._anchor:
return
dc.SetFont(self._normalFont)
dc.SetPen(self._dottedPen)
y = 2
dc.BeginDrawing()
self.PaintLevel(self._anchor, dc, 0, y)
dc.EndDrawing()
def OnEraseBackground(self, event):
"""Handles the wx.EVT_ERASE_BACKGROUND event."""
# Can we actually do something here (or in OnPaint()) To Handle
# background images that are stretchable or always centered?
# I tried but I get enormous flickering...
if not self._backgroundImage:
# event.Skip()
return
if self._imageStretchStyle == _StyleTile:
dc = event.GetDC()
if not dc:
dc = wx.ClientDC(self)
rect = self.GetUpdateRegion().GetBox()
dc.SetClippingRect(rect)
self.TileBackground(dc)
def OnSysColourChanged(self, evt):
# self._backgroundColour = wx.SystemSettings.GetColour(
# wx.SYS_COLOUR_WINDOW)
self.Refresh()
def TileBackground(self, dc):
"""Tiles the background image to fill all the available area."""
sz = self.GetClientSize()
w = self._backgroundImage.GetWidth()
h = self._backgroundImage.GetHeight()
x = 0
while x < sz.width:
y = 0
while y < sz.height:
dc.DrawBitmap(self._backgroundImage, x, y, True)
y = y + h
x = x + w
def OnSetFocus(self, event):
"""Handles the wx.EVT_SET_FOCUS event."""
self._hasFocus = True
self.RefreshSelected()
event.Skip()
def OnKillFocus(self, event):
"""Handles the wx.EVT_KILL_FOCUS event."""
self._hasFocus = False
self.RefreshSelected()
event.Skip()
def OnKeyDown(self, event):
"""Handles the wx.EVT_CHAR event, sending a EVT_TREE_KEY_DOWN event."""
te = TreeEvent(wxEVT_TREE_KEY_DOWN, self.GetId())
te._evtKey = event
te.SetEventObject(self)
if self.GetEventHandler().ProcessEvent(te):
# intercepted by the user code
return
if self._current is None or self._key_current is None:
if self._key_current is None:
event.Skip()
return
else: # MB: Not really knowing what I'm doing here
self._current = self._key_current
# how should the selection work for this event?
is_multiple, extended_select, unselect_others = EventFlagsToSelType(self.GetTreeStyle(), event.ShiftDown(), event.CmdDown())
# + : Expand
# - : Collaspe
# * : Expand all/Collapse all
# ' ' | return : activate
# up : go up (not last children!)
# down : go down
# left : go to parent
# right : open if parent and go next
# home : go to root
# end : go to last item without opening parents
# alnum : start or continue searching for the item with this prefix
keyCode = event.GetKeyCode()
if keyCode in [ord("+"), wx.WXK_ADD, wx.WXK_NUMPAD_ADD]: # "+"
if self._current.HasPlus() and not self.IsExpanded(self._current) and self.IsItemEnabled(self._current):
self.Expand(self._current)
elif keyCode in [ord("*"), wx.WXK_MULTIPLY, wx.WXK_NUMPAD_MULTIPLY]: # "*"
if not self.IsExpanded(self._current) and self.IsItemEnabled(self._current):
# expand all
self.ExpandAll(self._current)
elif keyCode in [ord("-"), wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT]: # "-"
if self.IsExpanded(self._current):
self.Collapse(self._current)
elif keyCode == wx.WXK_MENU:
# Use the item's bounding rectangle to determine position for the event
itemRect = self.GetBoundingRect(self._current, True)
event = TreeEvent(wxEVT_TREE_ITEM_MENU, self.GetId())
event._item = self._current
# Use the left edge, vertical middle
event._pointDrag = wx.Point(itemRect.GetX(), itemRect.GetY() + itemRect.GetHeight()/2)
event.SetEventObject(self)
self.GetEventHandler().ProcessEvent(event)
elif keyCode in [wx.WXK_RETURN, wx.WXK_SPACE]:
if not self.IsItemEnabled(self._current):
event.Skip()
return
if not event.HasModifiers():
event = TreeEvent(wxEVT_TREE_ITEM_ACTIVATED, self.GetId())
event._item = self._current
event.SetEventObject(self)
self.GetEventHandler().ProcessEvent(event)
if keyCode == wx.WXK_SPACE and self.GetItemType(self._current) > 0:
checked = not self.IsItemChecked(self._current)
self.CheckItem(self._current, checked)
# in any case, also generate the normal key event for this key,
# even if we generated the ACTIVATED event above: this is what
# wxMSW does and it makes sense because you might not want to
# process ACTIVATED event at all and handle Space and Return
# directly (and differently) which would be impossible otherwise
event.Skip()
# up goes to the previous sibling or to the last
# of its children if it's expanded
elif keyCode in (wx.WXK_UP, wx.WXK_NUMPAD_UP):
prev = self.GetPrevSibling(self._key_current)
if not prev:
prev = self.GetItemParent(self._key_current)
if prev == self.GetRootItem() and self.HasFlag(TR_HIDE_ROOT):
return
if prev:
current = self._key_current
# TODO: Huh? If we get here, we'd better be the first child of our parent. How else could it be?
if current == self.GetFirstChild(prev)[0] and self.IsItemEnabled(prev):
# otherwise we return to where we came from
self.DoSelectItem(prev, unselect_others, extended_select)
self._key_current = prev
else:
current = self._key_current
# We are going to another parent node
while self.IsExpanded(prev) and self.HasChildren(prev):
child = self.GetLastChild(prev)
if child:
prev = child
current = prev
# Try to get the previous siblings and see if they are active
while prev and not self.IsItemEnabled(prev):
prev = self.GetPrevSibling(prev)
if not prev:
# No previous siblings active: go to the parent and up
prev = self.GetItemParent(current)
while prev and not self.IsItemEnabled(prev):
prev = self.GetItemParent(prev)
if prev:
self.DoSelectItem(prev, unselect_others, extended_select)
self._key_current = prev
# left arrow goes to the parent
elif keyCode in (wx.WXK_LEFT, wx.WXK_NUMPAD_LEFT):
prev = self.GetItemParent(self._current)
if prev == self.GetRootItem() and self.HasFlag(TR_HIDE_ROOT):
# don't go to root if it is hidden
prev = self.GetPrevSibling(self._current)
if self.IsExpanded(self._current):
self.Collapse(self._current)
else:
if prev and self.IsItemEnabled(prev):
self.DoSelectItem(prev, unselect_others, extended_select)
elif keyCode in (wx.WXK_RIGHT, wx.WXK_NUMPAD_RIGHT):
# this works the same as the down arrow except that we
# also expand the item if it wasn't expanded yet
if self.IsExpanded(self._current) and self.HasChildren(self._current):
child, cookie = self.GetFirstChild(self._key_current)
if self.IsItemEnabled(child):
self.DoSelectItem(child, unselect_others, extended_select)
self._key_current = child
else:
self.Expand(self._current)
# fall through
elif keyCode in (wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN):
if self.IsExpanded(self._key_current) and self.HasChildren(self._key_current):
child = self.GetNextActiveItem(self._key_current)
if child:
self.DoSelectItem(child, unselect_others, extended_select)
self._key_current = child
else:
next = self.GetNextSibling(self._key_current)
if not next:
current = self._key_current
while current and not next:
current = self.GetItemParent(current)
if current:
next = self.GetNextSibling(current)
if not next or not self.IsItemEnabled(next):
next = None
else:
while next and not self.IsItemEnabled(next):
next = self.GetNext(next)
if next:
self.DoSelectItem(next, unselect_others, extended_select)
self._key_current = next
# <End> selects the last visible tree item
elif keyCode in (wx.WXK_END, wx.WXK_NUMPAD_END):
last = self.GetRootItem()
while last and self.IsExpanded(last):
lastChild = self.GetLastChild(last)
# it may happen if the item was expanded but then all of
# its children have been deleted - so IsExpanded() returned
# true, but GetLastChild() returned invalid item
if not lastChild:
break
last = lastChild
if last and self.IsItemEnabled(last):
self.DoSelectItem(last, unselect_others, extended_select)
# <Home> selects the root item
elif keyCode in (wx.WXK_HOME, wx.WXK_NUMPAD_HOME):
prev = self.GetRootItem()
if not prev:
return
if self.HasFlag(TR_HIDE_ROOT):
prev, cookie = self.GetFirstChild(prev)
if not prev:
return
if self.IsItemEnabled(prev):
self.DoSelectItem(prev, unselect_others, extended_select)
else:
if not event.HasModifiers() and ((keyCode >= ord('0') and keyCode <= ord('9')) or \
(keyCode >= ord('a') and keyCode <= ord('z')) or \
(keyCode >= ord('A') and keyCode <= ord('Z'))):
# find the next item starting with the given prefix
ch = chr(keyCode)
id = self.FindItem(self._current, self._findPrefix + ch)
if not id:
# no such item
return
if self.IsItemEnabled(id):
self.SelectItem(id)
self._findPrefix += ch
# also start the timer to reset the current prefix if the user
# doesn't press any more alnum keys soon -- we wouldn't want
# to use this prefix for a new item search
if not self._findTimer:
self._findTimer = TreeFindTimer(self)
self._findTimer.Start(_DELAY, wx.TIMER_ONE_SHOT)
else:
event.Skip()
def GetNextActiveItem(self, item, down=True):
"""Returns the next active item. Used Internally at present. """
if down:
sibling = self.GetNextSibling
else:
sibling = self.GetPrevSibling
if self.GetItemType(item) == 2 and not self.IsItemChecked(item):
# Is an unchecked radiobutton... all its children are inactive
# try to get the next/previous sibling
found = 0
while 1:
child = sibling(item)
if (child and self.IsItemEnabled(child)) or not child:
break
item = child
else:
# Tha's not a radiobutton... but some of its children can be
# inactive
child, cookie = self.GetFirstChild(item)
while child and not self.IsItemEnabled(child):
child, cookie = self.GetNextChild(item, cookie)
if child and self.IsItemEnabled(child):
return child
return None
def HitTest(self, point, flags=0):
"""
Calculates which (if any) item is under the given point, returning the tree item
at this point plus extra information flags. Flags is a bitlist of the following:
TREE_HITTEST_ABOVE above the client area
TREE_HITTEST_BELOW below the client area
TREE_HITTEST_NOWHERE no item has been hit
TREE_HITTEST_ONITEMBUTTON on the button associated to an item
TREE_HITTEST_ONITEMICON on the icon associated to an item
TREE_HITTEST_ONITEMCHECKICON on the check/radio icon, if present
TREE_HITTEST_ONITEMINDENT on the indent associated to an item
TREE_HITTEST_ONITEMLABEL on the label (string) associated to an item
TREE_HITTEST_ONITEMRIGHT on the right of the label associated to an item
TREE_HITTEST_TOLEFT on the left of the client area
TREE_HITTEST_TORIGHT on the right of the client area
TREE_HITTEST_ONITEMUPPERPART on the upper part (first half) of the item
TREE_HITTEST_ONITEMLOWERPART on the lower part (second half) of the item
TREE_HITTEST_ONITEM anywhere on the item
Note: both the item (if any, None otherwise) and the flag are always returned as a tuple.
"""
w, h = self.GetSize()
flags = 0
if point.x < 0:
flags |= TREE_HITTEST_TOLEFT
if point.x > w:
flags |= TREE_HITTEST_TORIGHT
if point.y < 0:
flags |= TREE_HITTEST_ABOVE
if point.y > h:
flags |= TREE_HITTEST_BELOW
if flags:
return None, flags
if self._anchor == None:
flags = TREE_HITTEST_NOWHERE
return None, flags
hit, flags = self._anchor.HitTest(self.CalcUnscrolledPosition(point), self, flags, 0)
if hit == None:
flags = TREE_HITTEST_NOWHERE
return None, flags
if not self.IsItemEnabled(hit):
return None, flags
return hit, flags
def GetBoundingRect(self, item, textOnly=False):
"""Gets the bounding rectangle of the item."""
if not item:
raise Exception("\nERROR: Invalid Tree Item. ")
i = item
startX, startY = self.GetViewStart()
rect = wx.Rect()
rect.x = i.GetX() - startX*_PIXELS_PER_UNIT
rect.y = i.GetY() - startY*_PIXELS_PER_UNIT
rect.width = i.GetWidth()
rect.height = self.GetLineHeight(i)
return rect
def Edit(self, item):
"""
Internal function. Starts the editing of an item label, sending a
EVT_TREE_BEGIN_LABEL_EDIT event.
"""
te = TreeEvent(wxEVT_TREE_BEGIN_LABEL_EDIT, self.GetId())
te._item = item
te.SetEventObject(self)
if self.GetEventHandler().ProcessEvent(te) and not te.IsAllowed():
# vetoed by user
return
# We have to call this here because the label in
# question might just have been added and no screen
# update taken place.
if self._dirty:
if wx.Platform in ["__WXMSW__", "__WXMAC__"]:
self.Update()
else:
wx.YieldIfNeeded()
if self._textCtrl != None and item != self._textCtrl.item():
self._textCtrl.StopEditing()
self._textCtrl = TreeTextCtrl(self, item=item)
self._textCtrl.SetFocus()
def GetEditControl(self):
"""
Returns a pointer to the edit TextCtrl if the item is being edited or
None otherwise (it is assumed that no more than one item may be edited
simultaneously).
"""
return self._textCtrl
def OnRenameAccept(self, item, value):
"""
Called by TreeTextCtrl, to accept the changes and to send the
EVT_TREE_END_LABEL_EDIT event.
"""
le = TreeEvent(wxEVT_TREE_END_LABEL_EDIT, self.GetId())
le._item = item
le.SetEventObject(self)
le._label = value
le._editCancelled = False
return not self.GetEventHandler().ProcessEvent(le) or le.IsAllowed()
def OnRenameCancelled(self, item):
"""
Called by TreeTextCtrl, to cancel the changes and to send the
EVT_TREE_END_LABEL_EDIT event.
"""
# let owner know that the edit was cancelled
le = TreeEvent(wxEVT_TREE_END_LABEL_EDIT, self.GetId())
le._item = item
le.SetEventObject(self)
le._label = ""
le._editCancelled = True
self.GetEventHandler().ProcessEvent(le)
def OnRenameTimer(self):
"""The timer for renaming has expired. Start editing."""
self.Edit(self._current)
def OnMouse(self, event):
"""Handles a bunch of wx.EVT_MOUSE_EVENTS events."""
if not self._anchor:
return
pt = self.CalcUnscrolledPosition(event.GetPosition())
# Is the mouse over a tree item button?
flags = 0
thisItem, flags = self._anchor.HitTest(pt, self, flags, 0)
underMouse = thisItem
underMouseChanged = underMouse != self._underMouse
if underMouse and (flags & TREE_HITTEST_ONITEM) and not event.LeftIsDown() and \
not self._isDragging and (not self._renameTimer or not self._renameTimer.IsRunning()):
underMouse = underMouse
else:
underMouse = None
if underMouse != self._underMouse:
if self._underMouse:
# unhighlight old item
self._underMouse = None
self._underMouse = underMouse
# Determines what item we are hovering over and need a tooltip for
hoverItem = thisItem
# We do not want a tooltip if we are dragging, or if the rename timer is running
if underMouseChanged and not self._isDragging and (not self._renameTimer or not self._renameTimer.IsRunning()):
if hoverItem is not None:
tooltipString = u""
# Ask the tree control what tooltip (if any) should be shown
hevent = TreeEvent(wxEVT_TREE_ITEM_GETTOOLTIP, self.GetId())
hevent._item = hoverItem
hevent.SetEventObject(self)
# if self.GetEventHandler().ProcessEvent(hevent) and hevent.IsAllowed():
# self.SetToolTip(hevent._label)
if self.GetEventHandler().ProcessEvent(hevent):
if hevent.IsAllowed():
self.SetToolTipString(hevent._label)
else:
if flags & TREE_HITTEST_ONITEMLABEL:
hPt = event.GetPosition()
hPt.x = self.GetSizeTuple()[0] # To right border
hPt = self.CalcUnscrolledPosition(hPt)
# If point at right border is inside label the
# label is probably longer than window width
if hoverItem.HitTest(hPt, self)[1] & \
TREE_HITTEST_ONITEMLABEL:
tooltipString = hoverItem.GetText()
self.SetToolTipString(tooltipString)
else:
self.SetToolTipString(tooltipString)
if hoverItem.IsHyperText() and (flags & TREE_HITTEST_ONITEMLABEL) and hoverItem.IsEnabled():
self.SetCursor(wx.StockCursor(wx.CURSOR_HAND))
self._isonhyperlink = True
else:
if self._isonhyperlink:
self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
self._isonhyperlink = False
# we process left mouse up event (enables in-place edit), right down
# (pass to the user code), left dbl click (activate item) and
# dragging/moving events for items drag-and-drop
if not (event.LeftDown() or event.LeftUp() or event.RightDown() or event.LeftDClick() or \
event.Dragging() or ((event.Moving() or event.RightUp()) and self._isDragging)):
event.Skip()
return
flags = 0
item, flags = self._anchor.HitTest(pt, self, flags, 0)
if event.Dragging() and not self._isDragging and \
(self._dragCount != 0 or (flags & TREE_HITTEST_ONITEMICON) or
(flags & TREE_HITTEST_ONITEMLABEL)):
if self._dragCount == 0:
self._dragStart = pt
self._countDrag = 0
self._dragCount = self._dragCount + 1
if self._dragCount != 6: # Orig. value: 3
# wait until user drags a bit further...
return
command = (event.RightIsDown() and [wxEVT_TREE_BEGIN_RDRAG] or [wxEVT_TREE_BEGIN_DRAG])[0]
nevent = TreeEvent(command, self.GetId())
nevent._item = self._selectedNodeWhileMousePressed # self._current
nevent.SetEventObject(self)
newpt = self.CalcScrolledPosition(pt)
nevent.SetPoint(newpt)
# by default the dragging is not supported, the user code must
# explicitly allow the event for it to take place
nevent.Veto()
if self.GetEventHandler().ProcessEvent(nevent) and nevent.IsAllowed():
# we're going to drag this item
self._isDragging = True
# remember the old cursor because we will change it while
# dragging
self._oldCursor = self._cursor
# in a single selection control, hide the selection temporarily
if not (self.GetTreeStyle() & TR_MULTIPLE):
self._oldSelection = self.GetSelection()
if self._oldSelection:
self._oldSelection.SetHilight(False)
self.RefreshLine(self._oldSelection)
else:
selections = self.GetSelections()
if len(selections) == 1:
self._oldSelection = selections[0]
self._oldSelection.SetHilight(False)
self.RefreshLine(self._oldSelection)
if self._dragImage:
del self._dragImage
# Create the custom draw image from the icons and the text of the item
self._dragImage = DragImage(self, self._selectedNodeWhileMousePressed) # self._current)
# print "self._dragImage =", repr(self._selectedNodeWhileMousePressed.GetText())
self._dragImage.BeginDrag(wx.Point(0,0), self)
self._dragImage.Show()
self._dragImage.Move(self.CalcScrolledPosition(pt))
elif event.Dragging() and self._isDragging:
self._dragImage.Move(self.CalcScrolledPosition(pt))
if self._countDrag == 0 and item:
self._oldItem = item
if item != self._dropTarget:
# unhighlight the previous drop target
if self._dropTarget:
self._dropTarget.SetHilight(False)
self.RefreshLine(self._dropTarget)
if item:
item.SetHilight(True)
self.RefreshLine(item)
self._countDrag = self._countDrag + 1
self._dropTarget = item
self.Update()
if self._countDrag >= 3:
# Here I am trying to avoid ugly repainting problems... hope it works
self.RefreshLine(self._oldItem)
self._countDrag = 0
elif (event.LeftUp() or event.RightUp()) and self._isDragging:
if self._dragImage:
self._dragImage.EndDrag()
if self._dropTarget:
self._dropTarget.SetHilight(False)
if not self.HasFlag(TR_MULTIPLE) and \
self._selectedNodeWhileMousePressed:
self.DoSelectItem(self._selectedNodeWhileMousePressed,
unselect_others, extended_select)
elif self._oldSelection:
self._oldSelection.SetHilight(True)
self.RefreshLine(self._oldSelection)
self._oldSelection = None
# generate the drag end event
event = TreeEvent(wxEVT_TREE_END_DRAG, self.GetId())
event._item = self._selectedNodeWhileMousePressed # item
# print "event._item =", repr(self._selectedNodeWhileMousePressed.GetText())
event._pointDrag = self.CalcScrolledPosition(pt)
event.SetEventObject(self)
self.GetEventHandler().ProcessEvent(event)
self._isDragging = False
self._dropTarget = None
self._dragCount = 0 # Added ???
self.SetCursor(self._oldCursor)
if wx.Platform in ["__WXMSW__", "__WXMAC__"]:
self.Refresh()
else:
# Probably this is not enough on GTK. Try a Refresh() if it does not work.
wx.YieldIfNeeded()
else:
# If we got to this point, we are not dragging or moving the mouse.
# Because the code in carbon/toplevel.cpp will only set focus to the tree
# if we skip for EVT_LEFT_DOWN, we MUST skip this event here for focus to work.
# We skip even if we didn't hit an item because we still should
# restore focus to the tree control even if we didn't exactly hit an item.
if event.LeftDown():
self._hasFocus = True
self.SetFocusIgnoringChildren()
event.Skip()
# here we process only the messages which happen on tree items
self._dragCount = 0
if item == None:
if self._textCtrl != None and item != self._textCtrl.item():
self._textCtrl.StopEditing()
return # we hit the blank area
if event.RightDown():
if self._textCtrl != None and item != self._textCtrl.item():
self._textCtrl.StopEditing()
self._hasFocus = True
self.SetFocusIgnoringChildren()
# If the item is already selected, do not update the selection.
# Multi-selections should not be cleared if a selected item is clicked.
if not self.IsSelected(item) and not event.LeftDown():
# print "selectitem"
self.DoSelectItem(item, True, False)
nevent = TreeEvent(wxEVT_TREE_ITEM_RIGHT_CLICK, self.GetId())
nevent._item = item
nevent._pointDrag = self.CalcScrolledPosition(pt)
nevent.SetEventObject(self)
event.Skip(not self.GetEventHandler().ProcessEvent(nevent))
# Consistent with MSW (for now), send the ITEM_MENU *after*
# the RIGHT_CLICK event. TODO: This behaviour may change.
nevent2 = TreeEvent(wxEVT_TREE_ITEM_MENU, self.GetId())
nevent2._item = item
nevent2._pointDrag = self.CalcScrolledPosition(pt)
nevent2.SetEventObject(self)
self.GetEventHandler().ProcessEvent(nevent2)
elif event.LeftUp():
if self._selectedNodeWhileMousePressed is item:
# this facilitates multiple-item drag-and-drop
if self.HasFlag(TR_MULTIPLE):
selections = self.GetSelections()
if len(selections) > 1 and not event.CmdDown() and not event.ShiftDown():
self.DoSelectItem(item, True, False)
else:
is_multiple, extended_select, unselect_others = EventFlagsToSelType(self.GetTreeStyle(),
event.ShiftDown(),
event.CmdDown())
self._selectedNodeWhileMousePressed = None
if flags & TREE_HITTEST_ONITEM:
# how should the selection work for this event?
if item.IsHyperText():
self.SetItemVisited(item, True)
self.DoSelectItem(item, unselect_others, extended_select)
if self._lastOnSame:
if item == self._current and (flags & TREE_HITTEST_ONITEMLABEL) and self.HasFlag(TR_EDIT_LABELS):
if self._renameTimer:
if self._renameTimer.IsRunning():
self._renameTimer.Stop()
else:
self._renameTimer = TreeRenameTimer(self)
self._renameTimer.Start(_DELAY, True)
self._lastOnSame = False
else: # !RightDown() && !LeftUp() ==> LeftDown() || LeftDClick()
if not item or not item.IsEnabled():
if self._textCtrl is not None and item != self._textCtrl.item():
self._textCtrl.StopEditing()
return
if self._textCtrl != None and item != self._textCtrl.item():
self._textCtrl.StopEditing()
self._hasFocus = True
self.SetFocusIgnoringChildren()
if event.LeftDown():
self._lastOnSame = item == self._current
self._selectedNodeWhileMousePressed = item
# print "event.LeftDown()", repr(item.GetText())
if flags & TREE_HITTEST_ONITEMBUTTON:
# only toggle the item for a single click, double click on
# the button doesn't do anything (it toggles the item twice)
if event.LeftDown():
self.Toggle(item)
# don't select the item if the button was clicked
return
if item.GetType() > 0 and (flags & TREE_HITTEST_ONITEMCHECKICON):
if event.LeftDown():
self.CheckItem(item, not self.IsItemChecked(item))
return
# clear the previously selected items, if the
# user clicked outside of the present selection.
# otherwise, perform the deselection on mouse-up.
# this allows multiple drag and drop to work.
# but if Cmd is down, toggle selection of the clicked item
# if not self.IsSelected(item) or event.CmdDown():
# print "not self.IsSelected(item)"
# self._dropTarget.SetHilight(False)
# self.RefreshLine(self._dropTarget)
# if not (self.GetTreeStyle() & TR_MULTIPLE):
# self._oldSelection = self.GetSelection()
#
# if self._oldSelection:
# self._oldSelection.SetHilight(False)
# self.RefreshLine(self._oldSelection)
#
# item.SetHilight(True)
# self.RefreshLine(item)
#
# item.SetHilight(False)
# if self._oldSelection:
# self._oldSelection.SetHilight(True)
if event.CmdDown():
if flags & TREE_HITTEST_ONITEM:
# how should the selection work for this event?
if item.IsHyperText():
self.SetItemVisited(item, True)
is_multiple, extended_select, unselect_others = EventFlagsToSelType(self.GetTreeStyle(),
event.ShiftDown(),
event.CmdDown())
self.DoSelectItem(item, unselect_others, extended_select)
# For some reason, Windows isn't recognizing a left double-click,
# so we need to simulate it here. Allow 200 milliseconds for now.
if event.LeftDClick():
# double clicking should not start editing the item label
if self._renameTimer:
self._renameTimer.Stop()
self._lastOnSame = False
# send activate event first
nevent = TreeEvent(wxEVT_TREE_ITEM_ACTIVATED, self.GetId())
nevent._item = item
nevent._pointDrag = self.CalcScrolledPosition(pt)
nevent.SetEventObject(self)
if not self.GetEventHandler().ProcessEvent(nevent):
# if the user code didn't process the activate event,
# handle it ourselves by toggling the item when it is
# double clicked
## if item.HasPlus():
self.Toggle(item)
def OnInternalIdle(self):
"""Performs operations in idle time (essentially drawing)."""
# # Check if we need to select the root item
# # because nothing else has been selected.
# # Delaying it means that we can invoke event handlers
# # as required, when a first item is selected.
# if not self.HasFlag(TR_MULTIPLE) and not self.GetSelection():
#
# if self._select_me:
# self.SelectItem(self._select_me)
# elif self.GetRootItem():
# self.SelectItem(self.GetRootItem())
# after all changes have been done to the tree control,
# we actually redraw the tree when everything is over
if not self._dirty:
return
if self._freezeCount:
return
self._dirty = False
self.CalculatePositions()
self.Refresh()
self.AdjustMyScrollbars()
def CalculateSize(self, item, dc):
"""Calculates overall position and size of an item."""
attr = item.GetAttributes()
if attr and attr.HasFont():
dc.SetFont(attr.GetFont())
elif item.IsBold():
dc.SetFont(self._boldFont)
else:
dc.SetFont(self._normalFont)
text_w, text_h, dummy = dc.GetMultiLineTextExtent(item.GetText())
text_h+=2
# restore normal font
dc.SetFont(self._normalFont)
image_w, image_h = 0, 0
image = item.GetCurrentImage()
if image != _NO_IMAGE:
if self._imageListNormal:
image_w, image_h = self._imageListNormal.GetSize(image)
image_w += 4
total_h = ((image_h > text_h) and [image_h] or [text_h])[0]
checkimage = item.GetCurrentCheckedImage()
if checkimage is not None:
wcheck, hcheck = self._imageListCheck.GetSize(checkimage)
wcheck += 4
else:
wcheck = 0
# if total_h < 30:
# total_h += 2 # at least 2 pixels
# else:
# total_h += total_h/10 # otherwise 10% extra spacing
if total_h > self._lineHeight:
self._lineHeight = total_h
if not item.GetWindow():
item.SetWidth(image_w+text_w+wcheck+2)
item.SetHeight(total_h)
else:
item.SetWidth(item.GetWindowSize()[0]+image_w+text_w+wcheck+2)
item.SetHeight(max(total_h, item.GetWindowSize()[1]))
def CalculateLevel(self, item, dc, level, y):
"""Calculates the level of an item."""
x = level*self._indent
if not self.HasFlag(TR_HIDE_ROOT):
x += self._indent
elif level == 0:
# a hidden root is not evaluated, but its
# children are always calculated
children = item.GetChildren()
count = len(children)
level = level + 1
for n in xrange(count):
y = self.CalculateLevel(children[n], dc, level, y) # recurse
return y
self.CalculateSize(item, dc)
# set its position
item.SetX(x+self._spacing)
item.SetY(y)
y += self.GetLineHeight(item)
if not item.IsExpanded():
# we don't need to calculate collapsed branches
return y
children = item.GetChildren()
count = len(children)
level = level + 1
for n in xrange(count):
y = self.CalculateLevel(children[n], dc, level, y) # recurse
return y
def CalculatePositions(self):
"""Calculates all the positions of the visible items."""
if not self._anchor:
return
dc = wx.ClientDC(self)
self.PrepareDC(dc)
dc.SetFont(self._normalFont)
dc.SetPen(self._dottedPen)
y = 2
y = self.CalculateLevel(self._anchor, dc, 0, y) # start recursion
def RefreshSubtree(self, item):
"""Refreshes a damaged subtree of an item."""
if self._dirty:
return
if self._freezeCount:
return
client = self.GetClientSize()
rect = wx.Rect()
x, rect.y = self.CalcScrolledPosition(0, item.GetY())
rect.width = client.x
rect.height = client.y
self.Refresh(True, rect)
self.AdjustMyScrollbars()
def RefreshLine(self, item):
"""Refreshes a damaged item line."""
if self._dirty:
return
if self._freezeCount:
return
rect = wx.Rect()
x, rect.y = self.CalcScrolledPosition(0, item.GetY())
rect.width = self.GetClientSize().x
rect.height = self.GetLineHeight(item)
self.Refresh(True, rect)
def RefreshSelected(self):
"""Refreshes a damaged selected item line."""
if self._freezeCount:
return
# TODO: this is awfully inefficient, we should keep the list of all
# selected items internally, should be much faster
if self._anchor:
self.RefreshSelectedUnder(self._anchor)
def RefreshSelectedUnder(self, item):
"""Refreshes the selected items under the given item."""
if self._freezeCount:
return
if item.IsSelected():
self.RefreshLine(item)
children = item.GetChildren()
for child in children:
self.RefreshSelectedUnder(child)
def Freeze(self):
"""Freeze CustomTreeCtrl."""
self._freezeCount = self._freezeCount + 1
def Thaw(self):
"""Thaw CustomTreeCtrl."""
if self._freezeCount == 0:
raise Exception("\nERROR: Thawing Unfrozen Tree Control?")
self._freezeCount = self._freezeCount - 1
if not self._freezeCount:
self.Refresh()
# ----------------------------------------------------------------------------
# changing colours: we need to refresh the tree control
# ----------------------------------------------------------------------------
def SetBackgroundColour(self, colour):
"""Changes the background colour of CustomTreeCtrl."""
self._backgroundColour = colour
if not wx.Window.SetBackgroundColour(self, colour):
return False
if self._freezeCount:
return True
self.Refresh()
return True
def SetForegroundColour(self, colour):
"""Changes the foreground colour of CustomTreeCtrl."""
if not wx.Window.SetForegroundColour(self, colour):
return False
if self._freezeCount:
return True
self.Refresh()
return True
def OnGetToolTip(self, event):
"""
Process the tooltip event, to speed up event processing. Does not actually
get a tooltip.
"""
event.Veto()
def DoGetBestSize(self):
"""Something is better than nothing..."""
# something is better than nothing...
# 100x80 is what the MSW version will get from the default
# wxControl::DoGetBestSize
return wx.Size(100, 80)
def GetClassDefaultAttributes(self):
"""Gets the class default attributes."""
attr = wx.VisualAttributes()
attr.colFg = wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOWTEXT)
attr.colBg = wx.SystemSettings_GetColour(wx.SYS_COLOUR_LISTBOX)
attr.font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
return attr
GetClassDefaultAttributes = classmethod(GetClassDefaultAttributes)
| 34.305638
| 176
| 0.549462
|
958f3426c0591400e09c20a6c87ddc9089a3eb71
| 618
|
py
|
Python
|
part03/part03-e05_correlation/src/correlation.py
|
davide-butera/data-analysis-with-python
|
78ba3d3d060ddb305bfd84b9a122409c15c47006
|
[
"MIT"
] | null | null | null |
part03/part03-e05_correlation/src/correlation.py
|
davide-butera/data-analysis-with-python
|
78ba3d3d060ddb305bfd84b9a122409c15c47006
|
[
"MIT"
] | null | null | null |
part03/part03-e05_correlation/src/correlation.py
|
davide-butera/data-analysis-with-python
|
78ba3d3d060ddb305bfd84b9a122409c15c47006
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import scipy.stats
import numpy as np
def load2():
"""This loads the data from the internet. Does not work well on the TMC server."""
import seaborn as sns
return sns.load_dataset('iris').drop('species', axis=1).values
def load():
import pandas as pd
return pd.read_csv("src/iris.csv").drop('species', axis=1).values
def lengths():
iris = load()
return scipy.stats.pearsonr(iris[:,0],iris[:,2])[0]
def correlations():
return np.corrcoef(load(),rowvar=False)
def main():
print(lengths())
print(correlations())
if __name__ == "__main__":
main()
| 21.310345
| 86
| 0.660194
|
6a53c5af61b07b29d543bbd1654cc8429660c496
| 13,059
|
py
|
Python
|
config/settings/base.py
|
bogolla/mfl_api
|
c5dff1857a94e1272d0663804c2339fa88cb7be3
|
[
"MIT"
] | null | null | null |
config/settings/base.py
|
bogolla/mfl_api
|
c5dff1857a94e1272d0663804c2339fa88cb7be3
|
[
"MIT"
] | null | null | null |
config/settings/base.py
|
bogolla/mfl_api
|
c5dff1857a94e1272d0663804c2339fa88cb7be3
|
[
"MIT"
] | 1
|
2019-02-06T19:23:49.000Z
|
2019-02-06T19:23:49.000Z
|
import os
import environ
BASE_DIR = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Override in production via env
env = environ.Env(
DATABASE_URL=(str, 'postgres://mfl:mfl@localhost:5432/mfl'),
DEBUG=(bool, True),
FRONTEND_URL=(str, "http://localhost:8062"),
REALTIME_INDEX=(bool, False)
)
env.read_env(os.path.join(BASE_DIR, '.env'))
DEBUG = env('DEBUG')
SECRET_KEY = env(
'SECRET_KEY', default='p!ci1&ni8u98vvd#%18yp)aqh+m_8o565g*@!8@1wb$j#pj4d8')
ENV_DB = env.db()
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'HOST': ENV_DB['HOST'],
'NAME': ENV_DB['NAME'],
'PASSWORD': ENV_DB['PASSWORD'],
'PORT': ENV_DB['PORT'],
'USER': ENV_DB['USER'],
}
} # Env should have DATABASE_URL
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'reversion.middleware.RevisionMiddleware'
)
EMAIL_HOST = env('EMAIL_HOST', default='localhost')
EMAIL_HOST_USER = env('EMAIL_HOST_USER', default=487)
EMAIL_HOST_PASSWORD = env('EMAIL_HOST_PASSWORD', default='notarealpassword')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = '[Master Facility List] '
ALLOWED_HOSTS = ['.ehealth.or.ke', '.slade360.co.ke', '.localhost']
INSTALLED_APPS = (
'django.contrib.sites',
'users',
'django.contrib.admin',
'common',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'oauth2_provider',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'allauth',
'allauth.account',
'rest_auth.registration',
'corsheaders',
'rest_framework_swagger',
'django.contrib.gis',
'reversion',
'gunicorn',
# 'debug_toolbar',
'facilities',
'data_bootstrap',
'chul',
'data',
'mfl_gis',
'search',
'reporting'
)
# LOCAL_APPS is now just a convenience setting for the metadata API
# It is *NOT* appended to INSTALLED_APPS ( **deliberate** DRY violation )
# This was forced by the need to override rest_framework templates in common
# It is a list because order matters
LOCAL_APPS = [
'users',
'common',
'facilities',
'chul',
'mfl_gis',
'data_bootstrap',
'data',
]
CORS_ALLOW_CREDENTIALS = False
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_HEADERS = (
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'if-modified-since',
'if-none-match',
'cache-control'
)
AUTH_USER_MODEL = 'users.MflUser'
ROOT_URLCONF = 'config.urls'
WSGI_APPLICATION = 'config.wsgi.application'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC' # This is INTENTIONAL
USE_TZ = True
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
CSRF_COOKIE_HTTPONLY = False
CSRF_COOKIE_SECURE = False
SECURE_SSL_REDIRECT = False # Turn on in production
REST_FRAMEWORK = {
'DEFAULT_THROTTLE_CLASSES': (
'common.utilities.throttling.ThrottlingBySession',
),
'DEFAULT_THROTTLE_RATES': {
'rating': '1/day'
},
'DEFAULT_PERMISSION_CLASSES': (
'users.permissions.MFLModelPermissions',
),
'DEFAULT_FILTER_BACKENDS': (
'rest_framework.filters.DjangoFilterBackend',
'rest_framework.filters.OrderingFilter',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework_xml.parsers.XMLParser',
),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.BrowsableAPIRenderer',
'rest_framework.renderers.JSONRenderer',
'rest_framework_xml.renderers.XMLRenderer',
'common.renderers.CSVRenderer',
'common.renderers.ExcelRenderer',
),
'EXCEPTION_HANDLER': 'exception_handler.handler.custom_exception_handler',
'DEFAULT_PAGINATION_CLASS': 'common.paginator.MflPaginationSerializer',
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'oauth2_provider.ext.rest_framework.OAuth2Authentication',
),
'PAGINATE_BY': 30,
'PAGINATE_BY_PARAM': 'page_size',
# Should be able to opt in to see all wards at once
'MAX_PAGINATE_BY': 15000,
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
'TEST_REQUEST_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
),
'DATETIME_FORMAT': 'iso-8601',
'DATE_FORMAT': 'iso-8601',
'TIME_FORMAT': 'iso-8601'
}
SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': '2.0',
'api_path': '/',
'enabled_methods': [
'get',
'post',
'put',
'patch',
'delete'
],
'api_key': '',
'is_authenticated': True,
'is_superuser': False,
'info': {
'contact': 'developers@savannahinformatics.com',
'description': 'Explore the MFL v2 API',
'license': 'MIT License',
'licenseUrl': 'http://choosealicense.com/licenses/mit/',
'title': 'MFL v2 API',
},
'doc_expansion': 'full',
}
REST_FRAMEWORK_EXTENSIONS = {
'DEFAULT_CACHE_RESPONSE_TIMEOUT': 60 * 60 # One hour
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s: %(asctime)s [%(module)s] %(message)s' # NOQA
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
'level': 'ERROR',
},
'django.request': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': False,
},
'rest_framework': {
'handlers': ['console'],
'level': 'ERROR'
},
'common': {
'handlers': ['console'],
'level': 'ERROR'
},
'facilities': {
'handlers': ['console'],
'level': 'ERROR'
},
'users': {
'handlers': ['console'],
'level': 'ERROR'
},
'data_bootstrap': {
'handlers': ['console'],
'level': 'INFO'
},
'mfl_gis': {
'handlers': ['console'],
'level': 'ERROR'
},
'exception_handler': {
'handlers': ['console'],
'level': 'ERROR'
}
}
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': os.path.join(BASE_DIR, '/common/templates/'),
'APP_DIRS': True,
},
]
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"COMPRESS_MIN_LEN": 10,
"IGNORE_EXCEPTIONS": True,
}
}
}
CACHE_MIDDLEWARE_SECONDS = 15 # Intentionally conservative by default
# cache for the gis views
GIS_BORDERS_CACHE_SECONDS = (60 * 60 * 24 * 366)
# django-allauth related settings
# some of these settings take into account that the target audience
# of this system is not super-savvy
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
LOGIN_REDIRECT_URL = '/api/'
SEARCH = {
"ELASTIC_URL": "http://localhost:9200/",
"INDEX_NAME": "mfl_index",
"REALTIME_INDEX": env('REALTIME_INDEX', False),
"SEARCH_RESULT_SIZE": 50,
"NON_INDEXABLE_MODELS": [
"mfl_gis.FacilityCoordinates",
"mfl_gis.WorldBorder",
"mfl_gis.CountyBoundary",
"mfl_gis.ConstituencyBoundary",
"mfl_gis.WardBoundary",
"users.CustomGroup",
"users.ProxyGroup"
],
"STOP_WORDS": [
"centre", "center", "health", "hospital", "clinic", "district",
"sub-district", "dispensary"
],
"FULL_TEXT_SEARCH_FIELDS": {
"models": [
{
"name": "facility",
"fields": [
"name", "county", "constituency", "ward_name",
"facility_services.service_name",
"facility_services.service_name",
"facility_services.category_name",
"facility_physical_address.town",
"facility_physical_address.nearest_landmark",
"facility_physical_address.nearest_landmark"
]
}
]
},
"AUTOCOMPLETE_MODEL_FIELDS": [
{
"app": "facilities",
"models": [
{
"name": "facility",
"fields": ["name", "ward_name"],
"boost": ["name"],
},
{
"name": "owner",
"fields": ["name"]
},
{
"name": "OwnerType",
"fields": ["name"]
},
{
"name": "JobTitle",
"fields": ["name"]
},
{
"name": "Officer",
"fields": ["name"]
},
{
"name": "FacilityStatus",
"fields": ["name"]
},
{
"name": "FacilityType",
"fields": ["name"]
},
{
"name": "RegulationStatus",
"fields": ["name"]
},
{
"name": "Option",
"fields": ["name"]
},
{
"name": "ServiceCategory",
"fields": ["name"]
},
{
"name": "Service",
"fields": ["name"]
}
]
},
{
"app": "common",
"models": [
{
"name": "County",
"fields": ["name"]
},
{
"name": "Consituency",
"fields": ["name"]
},
{
"name": "Ward",
"fields": ["name"]
},
{
"name": "ContactType",
"fields": ["name"]
},
{
"name": "Contact",
"fields": ["contact"]
},
{
"name": "Town",
"fields": ["name"]
},
]
},
{
"app": "mfl_gis",
"models": [
{
"name": "GeoCodeSource",
"fields": ["name"]
},
{
"name": "GeoCodeMethod",
"fields": ["name"]
}
]
},
{
"app": "users",
"models": [
{
"name": "MflUser",
"fields": ["full_name", "email"]
}
]
}
]
}
OAUTH2_PROVIDER_APPLICATION_MODEL = 'users.MFLOAuthApplication'
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'none'
ACCOUNT_EMAIL_SUBJECT_PREFIX = EMAIL_SUBJECT_PREFIX
ACCOUNT_LOGOUT_ON_GET = True
ACCOUNT_LOGOUT_REDIRECT_URL = '/api/'
ACCOUNT_SESSION_REMEMBER = True
# django_rest_auth settings
OLD_PASSWORD_FIELD_ENABLED = True
REST_AUTH_SERIALIZERS = {
'USER_DETAILS_SERIALIZER': 'users.serializers.MflUserSerializer',
'PASSWORD_CHANGE_SERIALIZER':
'users.serializers.MflPasswordChangeSerializer'
}
# django-allauth forces this atrocity on us ( true at the time of writing )
SITE_ID = 1
EXCEL_EXCEPT_FIELDS = [
'id', 'updated', 'created', 'created_by', 'updated_by', 'active',
'deleted', 'search'
]
FRONTEND_URL = env("FRONTEND_URL")
PASSWORD_RESET_URL = "%s/#/reset_pwd_confirm/{uid}/{token}" % FRONTEND_URL
| 28.701099
| 83
| 0.536182
|
0ea010c743dbf9b5c6e3948920b4ac0cdc19ac0b
| 699
|
py
|
Python
|
pyspeechkit/tts.py
|
pystorage/pyspeechkit
|
6ce5c91dfccfadd0f6ab8e997cf26b3e3c8ac43f
|
[
"MIT"
] | 1
|
2020-04-23T16:41:20.000Z
|
2020-04-23T16:41:20.000Z
|
pyspeechkit/tts.py
|
pystorage/pyspeechkit
|
6ce5c91dfccfadd0f6ab8e997cf26b3e3c8ac43f
|
[
"MIT"
] | null | null | null |
pyspeechkit/tts.py
|
pystorage/pyspeechkit
|
6ce5c91dfccfadd0f6ab8e997cf26b3e3c8ac43f
|
[
"MIT"
] | null | null | null |
from .api import API
from .utils import create_voice_file
class TTS(API):
def __init__(self, oauth_token: str, folder_id: str):
super().__init__(oauth_token)
self.folder_id = folder_id
def synthesize(self, **parameters: str):
try:
parameters.update({'folderId': self.folder_id})
self.synthesized = super().tts_request(parameters)
except Exception as error:
print(error)
def voice_source(self):
return self.synthesized
def save_voice(self, path: str):
try:
create_voice_file(self.synthesized, path)
except Exception as error:
print(error)
| 27.96
| 63
| 0.606581
|
7c6376fcc5e6bc6f7678b4c213d616413a8d52b0
| 1,440
|
py
|
Python
|
setup.py
|
OpenVoiceOS/ovos-bus
|
c170fc8acdfa78ceaf8483626dac45b6a791860d
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
OpenVoiceOS/ovos-bus
|
c170fc8acdfa78ceaf8483626dac45b6a791860d
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
OpenVoiceOS/ovos-bus
|
c170fc8acdfa78ceaf8483626dac45b6a791860d
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import os.path
from setuptools import setup
BASEDIR = os.path.abspath(os.path.dirname(__file__))
def required(requirements_file):
""" Read requirements file and remove comments and empty lines. """
with open(os.path.join(BASEDIR, requirements_file), 'r') as f:
requirements = f.read().splitlines()
if 'MYCROFT_LOOSE_REQUIREMENTS' in os.environ:
print('USING LOOSE REQUIREMENTS!')
requirements = [r.replace('==', '>=').replace('~=', '>=') for r in requirements]
return [pkg for pkg in requirements
if pkg.strip() and not pkg.startswith("#")]
setup(
name='ovos-bus',
version="0.0.2",
license='Apache-2.0',
url='https://github.com/OpenVoiceOS/ovos-core',
description='ovos-core metapackage for bus daemon',
include_package_data=True,
install_requires=required('requirements.txt')
)
| 35.121951
| 92
| 0.697222
|
16e7f024ea6284c3ffcf0c05041191cdf8688c67
| 18,121
|
py
|
Python
|
project_automation/licenses/gnu2.py
|
Guigui14460/project-automation
|
98f9b73be2000b0ecb07b1cca758693c29032947
|
[
"Apache-2.0"
] | null | null | null |
project_automation/licenses/gnu2.py
|
Guigui14460/project-automation
|
98f9b73be2000b0ecb07b1cca758693c29032947
|
[
"Apache-2.0"
] | 2
|
2021-01-17T16:04:03.000Z
|
2021-08-13T13:00:49.000Z
|
project_automation/licenses/gnu2.py
|
Guigui14460/project-automation
|
98f9b73be2000b0ecb07b1cca758693c29032947
|
[
"Apache-2.0"
] | null | null | null |
CONTENT = """ GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) year_to_add username_to_add
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year_to_add username_to_add
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.
"""
| 53.140762
| 77
| 0.785773
|
634efbe66a6f4ddc1f51c545036850c6482ca709
| 10,178
|
py
|
Python
|
api/python/SSDB.py
|
qwang2505/ssdb-source-comments
|
0ee33b7305656581190e2971f9185f612c258ea0
|
[
"BSD-3-Clause"
] | null | null | null |
api/python/SSDB.py
|
qwang2505/ssdb-source-comments
|
0ee33b7305656581190e2971f9185f612c258ea0
|
[
"BSD-3-Clause"
] | null | null | null |
api/python/SSDB.py
|
qwang2505/ssdb-source-comments
|
0ee33b7305656581190e2971f9185f612c258ea0
|
[
"BSD-3-Clause"
] | null | null | null |
# encoding=utf-8
# Generated by cpy
# 2015-04-15 20:07:01.541685
import os, sys
from sys import stdin, stdout
import socket
class SSDB_Response(object):
pass
def __init__(this, code='', data_or_message=None):
pass
this.code = code
this.data = None
this.message = None
if code=='ok':
pass
this.data = data_or_message
else:
pass
if isinstance(data_or_message, list):
pass
if len(data_or_message)>0:
pass
this.message = data_or_message[0]
else:
pass
this.message = data_or_message
def __repr__(this):
pass
return ((((str(this.code) + ' ') + str(this.message)) + ' ') + str(this.data))
def ok(this):
pass
return this.code=='ok'
def not_found(this):
pass
return this.code=='not_found'
class SSDB(object):
pass
def __init__(this, host, port):
pass
this.recv_buf = ''
this._closed = False
this.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
this.sock.connect(tuple([host, port]))
this.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
def close(this):
pass
if not (this._closed):
pass
this.sock.close()
this._closed = True
def closed(this):
pass
return this._closed
def request(this, cmd, params=None):
pass
if params==None:
pass
params = []
params = ([cmd] + params)
this.send(params)
resp = this.recv()
if resp==None:
pass
return SSDB_Response('error', 'Unknown error')
if len(resp)==0:
pass
return SSDB_Response('disconnected', 'Connection closed')
# {{{ switch: cmd
_continue_1 = False
while True:
if False or ((cmd) == 'ping') or ((cmd) == 'qset') or ((cmd) == 'set') or ((cmd) == 'zset') or ((cmd) == 'hset') or ((cmd) == 'qpush') or ((cmd) == 'qpush_front') or ((cmd) == 'qpush_back') or ((cmd) == 'del') or ((cmd) == 'zdel') or ((cmd) == 'hdel') or ((cmd) == 'multi_set') or ((cmd) == 'multi_del') or ((cmd) == 'multi_hset') or ((cmd) == 'multi_hdel') or ((cmd) == 'multi_zset') or ((cmd) == 'multi_zdel'):
pass
if resp[0]=='ok':
pass
if len(resp)>1:
pass
return SSDB_Response(resp[0], int(resp[1]))
else:
pass
return SSDB_Response(resp[0], 1)
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'substr') or ((cmd) == 'get') or ((cmd) == 'getset') or ((cmd) == 'hget') or ((cmd) == 'qfront') or ((cmd) == 'qback') or ((cmd) == 'qget'):
pass
if resp[0]=='ok':
pass
if len(resp)==2:
pass
return SSDB_Response('ok', resp[1])
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'qpop') or ((cmd) == 'qpop_front') or ((cmd) == 'qpop_back'):
pass
if resp[0]=='ok':
pass
size = 1
try:
pass
size = int(params[2])
except Exception , e:
pass
if size==1:
pass
if len(resp)==2:
pass
return SSDB_Response('ok', resp[1])
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response('ok', resp[1 : ])
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'dbsize') or ((cmd) == 'getbit') or ((cmd) == 'setbit') or ((cmd) == 'countbit') or ((cmd) == 'bitcount') or ((cmd) == 'strlen') or ((cmd) == 'ttl') or ((cmd) == 'expire') or ((cmd) == 'setnx') or ((cmd) == 'incr') or ((cmd) == 'decr') or ((cmd) == 'zincr') or ((cmd) == 'zdecr') or ((cmd) == 'hincr') or ((cmd) == 'hdecr') or ((cmd) == 'hsize') or ((cmd) == 'zsize') or ((cmd) == 'qsize') or ((cmd) == 'zget') or ((cmd) == 'zrank') or ((cmd) == 'zrrank') or ((cmd) == 'zsum') or ((cmd) == 'zcount') or ((cmd) == 'zavg') or ((cmd) == 'zremrangebyrank') or ((cmd) == 'zremrangebyscore') or ((cmd) == 'hclear') or ((cmd) == 'zclear') or ((cmd) == 'qclear') or ((cmd) == 'qpush') or ((cmd) == 'qpush_front') or ((cmd) == 'qpush_back') or ((cmd) == 'qtrim_front') or ((cmd) == 'qtrim_back'):
pass
if resp[0]=='ok':
pass
if len(resp)==2:
pass
try:
pass
if cmd=='zavg':
pass
val = float(resp[1])
else:
pass
val = int(resp[1])
return SSDB_Response('ok', val)
except Exception , e:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'keys') or ((cmd) == 'rkeys') or ((cmd) == 'zkeys') or ((cmd) == 'zrkeys') or ((cmd) == 'hkeys') or ((cmd) == 'hrkeys') or ((cmd) == 'list') or ((cmd) == 'hlist') or ((cmd) == 'hrlist') or ((cmd) == 'zlist') or ((cmd) == 'zrlist'):
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'scan') or ((cmd) == 'rscan') or ((cmd) == 'hgetall') or ((cmd) == 'hscan') or ((cmd) == 'hrscan'):
pass
if resp[0]=='ok':
pass
if len(resp) % 2==1:
pass
data = {'index': [],'items': {},}
i = 1
while i<len(resp):
pass
k = resp[i]
v = resp[(i + 1)]
data['index'].append(k)
data['items'][k] = v
pass
i += 2
return SSDB_Response('ok', data)
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'zscan') or ((cmd) == 'zrscan') or ((cmd) == 'zrange') or ((cmd) == 'zrrange') or ((cmd) == 'zpop_front') or ((cmd) == 'zpop_back'):
pass
if resp[0]=='ok':
pass
if len(resp) % 2==1:
pass
data = {'index': [],'items': {},}
i = 1
while i<len(resp):
pass
k = resp[i]
v = resp[(i + 1)]
try:
pass
v = int(v)
except Exception , e:
pass
v = - (1)
data['index'].append(k)
data['items'][k] = v
pass
i += 2
return SSDB_Response('ok', data)
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'auth') or ((cmd) == 'exists') or ((cmd) == 'hexists') or ((cmd) == 'zexists'):
pass
if resp[0]=='ok':
pass
data = False
if len(resp)>=2:
pass
if resp[1]=='1':
pass
data = True
return SSDB_Response(resp[0], data)
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'multi_exists') or ((cmd) == 'multi_hexists') or ((cmd) == 'multi_zexists'):
pass
if resp[0]=='ok':
pass
data = {}
if len(resp) % 2==1:
pass
i = 1
while i<len(resp):
pass
k = resp[i]
if resp[(i + 1)]=='1':
pass
v = True
else:
pass
v = False
data[k] = v
pass
i += 2
return SSDB_Response('ok', data)
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'multi_get') or ((cmd) == 'multi_hget'):
pass
if resp[0]=='ok':
pass
if len(resp) % 2==1:
pass
data = {}
i = 1
while i<len(resp):
pass
k = resp[i]
v = resp[(i + 1)]
data[k] = v
pass
i += 2
return SSDB_Response('ok', data)
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
if False or ((cmd) == 'multi_hsize') or ((cmd) == 'multi_zsize') or ((cmd) == 'multi_zget'):
pass
if resp[0]=='ok':
pass
if len(resp) % 2==1:
pass
data = {}
i = 1
while i<len(resp):
pass
k = resp[i]
v = int(resp[(i + 1)])
data[k] = v
pass
i += 2
return SSDB_Response('ok', data)
else:
pass
return SSDB_Response('server_error', 'Invalid response')
else:
pass
return SSDB_Response(resp[0], resp[1 : ])
break
### default
return SSDB_Response(resp[0], resp[1 : ])
break
break
if _continue_1:
continue
# }}} switch
return SSDB_Response('error', 'Unknown error')
def send(this, data):
pass
ps = []
_cpy_r_0 = _cpy_l_1 = data
if type(_cpy_r_0).__name__ == 'dict': _cpy_b_3=True; _cpy_l_1=_cpy_r_0.iterkeys()
else: _cpy_b_3=False;
for _cpy_k_2 in _cpy_l_1:
if _cpy_b_3: p=_cpy_r_0[_cpy_k_2]
else: p=_cpy_k_2
pass
p = str(p)
ps.append(str(len(p)))
ps.append(p)
nl = '\n'
s = (nl.join(ps) + '\n\n')
try:
pass
while True:
pass
ret = this.sock.send(s)
if ret==0:
pass
return - (1)
s = s[ret : ]
if len(s)==0:
pass
break
except socket.error , e:
pass
return - (1)
return ret
def net_read(this):
pass
try:
pass
data = this.sock.recv(1024 * 8)
except Exception , e:
pass
data = ''
if data=='':
pass
this.close()
return 0
this.recv_buf += data
return len(data)
def recv(this):
pass
while True:
pass
ret = this.parse()
if ret==None:
pass
if this.net_read()==0:
pass
return []
else:
pass
return ret
def parse(this):
pass
ret = []
spos = 0
epos = 0
while True:
pass
spos = epos
epos = this.recv_buf.find('\n', spos)
if epos==- (1):
pass
break
epos += 1
line = this.recv_buf[spos : epos]
spos = epos
if line.strip()=='':
pass
if len(ret)==0:
pass
continue
else:
pass
this.recv_buf = this.recv_buf[spos : ]
return ret
try:
pass
num = int(line)
except Exception , e:
pass
return []
epos = (spos + num)
if epos>len(this.recv_buf):
pass
break
data = this.recv_buf[spos : epos]
ret.append(data)
spos = epos
epos = this.recv_buf.find('\n', spos)
if epos==- (1):
pass
break
epos += 1
return None
| 21.609342
| 812
| 0.524366
|
9228ef62976493d0b7958bdc07021a695d980bb1
| 1,887
|
py
|
Python
|
config/urls.py
|
abought/locuszoom-hosted
|
5cb635b18287d15610df0da6c85b477a3eaaaabb
|
[
"MIT"
] | null | null | null |
config/urls.py
|
abought/locuszoom-hosted
|
5cb635b18287d15610df0da6c85b477a3eaaaabb
|
[
"MIT"
] | null | null | null |
config/urls.py
|
abought/locuszoom-hosted
|
5cb635b18287d15610df0da6c85b477a3eaaaabb
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.urls import include, path
from django.conf.urls.static import static
from django.contrib import admin
from django.views import defaults as default_views
from rest_framework.documentation import include_docs_urls
from . import basic_views
urlpatterns = [
path("", basic_views.HomeView.as_view(), name="home"),
path("profile/", basic_views.ProfileView.as_view(), name="profile"),
path(
"gwas/",
include("locuszoom_plotting_service.gwas.urls", namespace="gwas")
),
# Django Admin, use {% url 'admin:index' %}
path(settings.ADMIN_URL, admin.site.urls),
path(
"api/v1/",
include("locuszoom_plotting_service.api.urls", namespace="apiv1"),
),
path(
"api-docs/",
include_docs_urls(title='GWAS API Docs')
),
# User management
path("accounts/", include("allauth.urls")),
# Your stuff: custom urls includes go here
] + static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
| 31.45
| 85
| 0.6354
|
0e1c7b9deb8e01c7ee06831d00dde92c8482143b
| 2,229
|
py
|
Python
|
examples/ad_manager/v201902/base_rate_service/create_product_template_base_rates.py
|
nlynch504/googleads-python-lib
|
8f7bd7f987498c4651c969a7dc73e1d5fc965be2
|
[
"Apache-2.0"
] | null | null | null |
examples/ad_manager/v201902/base_rate_service/create_product_template_base_rates.py
|
nlynch504/googleads-python-lib
|
8f7bd7f987498c4651c969a7dc73e1d5fc965be2
|
[
"Apache-2.0"
] | null | null | null |
examples/ad_manager/v201902/base_rate_service/create_product_template_base_rates.py
|
nlynch504/googleads-python-lib
|
8f7bd7f987498c4651c969a7dc73e1d5fc965be2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a product template base rate.
To determine which base rates exist, run get_all_base_rates.py.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
PRODUCT_TEMPLATE_ID = 'INSERT_PRODUCT_TEMPLATE_ID_HERE'
RATE_CARD_ID = 'INSERT_RATE_CARD_ID_HERE'
def main(client, product_template_id, rate_card_id):
# Initialize appropriate service.
base_rate_service = client.GetService(
'BaseRateService', version='v201902')
# Create a product template base rate.
product_template_base_rate = {
'xsi_type': 'ProductTemplateBaseRate',
# Set the rate card ID that the product template base rate belongs to.
'rateCardId': rate_card_id,
# Set the product template the base rate will be applied to.
'productTemplateId': product_template_id,
# Set the rate to be $2.
'rate': {
'currencyCode': 'USD',
'microAmount': 2000000
}
}
# Create the product template item base rate on the server.
base_rates = base_rate_service.createBaseRates(
[product_template_base_rate])
if base_rates:
for base_rate in base_rates:
print ('A product template base rate with ID "%s" and rate \'%.2f\' %s'
' was created.' % (base_rate['id'],
base_rate['rate']['microAmount'],
base_rate['rate']['currencyCode']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, PRODUCT_TEMPLATE_ID, RATE_CARD_ID)
| 33.772727
| 77
| 0.705698
|
38af833578d49c06a32edbf7663ddc6a1bc9ee05
| 20,084
|
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/consul.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/consul.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/consul.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# (c) 2015, Steve Gargan <steve.gargan@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: consul
short_description: "Add, modify & delete services within a consul cluster."
description:
- Registers services and checks for an agent with a consul cluster.
A service is some process running on the agent node that should be advertised by
consul's discovery mechanism. It may optionally supply a check definition,
a periodic service test to notify the consul cluster of service's health.
- "Checks may also be registered per node e.g. disk usage, or cpu usage and
notify the health of the entire node to the cluster.
Service level checks do not require a check name or id as these are derived
by Consul from the Service name and id respectively by appending 'service:'
Node level checks require a I(check_name) and optionally a I(check_id)."
- Currently, there is no complete way to retrieve the script, interval or ttl
metadata for a registered check. Without this metadata it is not possible to
tell if the data supplied with ansible represents a change to a check. As a
result this does not attempt to determine changes and will always report a
changed occurred. An API method is planned to supply this metadata so at that
stage change management will be added.
- "See U(http://consul.io) for more details."
requirements:
- python-consul
- requests
author: "Steve Gargan (@sgargan)"
options:
state:
description:
- register or deregister the consul service, defaults to present
default: present
choices: ['present', 'absent']
service_name:
type: str
description:
- Unique name for the service on a node, must be unique per node,
required if registering a service. May be omitted if registering
a node level check
service_id:
type: str
description:
- the ID for the service, must be unique per node. If I(state=absent),
defaults to the service name if supplied.
host:
type: str
description:
- host of the consul agent defaults to localhost
default: localhost
port:
type: int
description:
- the port on which the consul agent is running
default: 8500
scheme:
type: str
description:
- the protocol scheme on which the consul agent is running
default: http
validate_certs:
description:
- whether to verify the TLS certificate of the consul agent
type: bool
default: 'yes'
notes:
type: str
description:
- Notes to attach to check when registering it.
service_port:
type: int
description:
- the port on which the service is listening. Can optionally be supplied for
registration of a service, i.e. if I(service_name) or I(service_id) is set
service_address:
type: str
description:
- the address to advertise that the service will be listening on.
This value will be passed as the I(address) parameter to Consul's
U(/v1/agent/service/register) API method, so refer to the Consul API
documentation for further details.
tags:
type: list
description:
- tags that will be attached to the service registration.
script:
type: str
description:
- the script/command that will be run periodically to check the health
of the service. Scripts require I(interval) and vice versa.
interval:
type: str
description:
- the interval at which the service check will be run. This is a number
with a s or m suffix to signify the units of seconds or minutes e.g
C(15s) or C(1m). If no suffix is supplied, m will be used by default e.g.
C(1) will be C(1m). Required if the I(script) parameter is specified.
check_id:
type: str
description:
- an ID for the service check. If I(state=absent), defaults to
I(check_name). Ignored if part of a service definition.
check_name:
type: str
description:
- a name for the service check. Required if standalone, ignored if
part of service definition.
ttl:
type: str
description:
- checks can be registered with a ttl instead of a I(script) and I(interval)
this means that the service will check in with the agent before the
ttl expires. If it doesn't the check will be considered failed.
Required if registering a check and the script an interval are missing
Similar to the interval this is a number with a s or m suffix to
signify the units of seconds or minutes e.g C(15s) or C(1m). If no suffix
is supplied, C(m) will be used by default e.g. C(1) will be C(1m)
http:
type: str
description:
- checks can be registered with an HTTP endpoint. This means that consul
will check that the http endpoint returns a successful HTTP status.
I(interval) must also be provided with this option.
timeout:
type: str
description:
- A custom HTTP check timeout. The consul default is 10 seconds.
Similar to the interval this is a number with a C(s) or C(m) suffix to
signify the units of seconds or minutes, e.g. C(15s) or C(1m).
token:
type: str
description:
- the token key identifying an ACL rule set. May be required to register services.
'''
EXAMPLES = '''
- name: register nginx service with the local consul agent
consul:
service_name: nginx
service_port: 80
- name: register nginx service with curl check
consul:
service_name: nginx
service_port: 80
script: curl http://localhost
interval: 60s
- name: register nginx with an http check
consul:
service_name: nginx
service_port: 80
interval: 60s
http: http://localhost:80/status
- name: register external service nginx available at 10.1.5.23
consul:
service_name: nginx
service_port: 80
service_address: 10.1.5.23
- name: register nginx with some service tags
consul:
service_name: nginx
service_port: 80
tags:
- prod
- webservers
- name: remove nginx service
consul:
service_name: nginx
state: absent
- name: register celery worker service
consul:
service_name: celery-worker
tags:
- prod
- worker
- name: create a node level check to test disk usage
consul:
check_name: Disk usage
check_id: disk_usage
script: /opt/disk_usage.py
interval: 5m
- name: register an http check against a service that's already registered
consul:
check_name: nginx-check2
check_id: nginx-check2
service_id: nginx
interval: 60s
http: http://localhost:80/morestatus
'''
try:
import consul
from requests.exceptions import ConnectionError
class PatchedConsulAgentService(consul.Consul.Agent.Service):
def deregister(self, service_id, token=None):
params = {}
if token:
params['token'] = token
return self.agent.http.put(consul.base.CB.bool(),
'/v1/agent/service/deregister/%s' % service_id,
params=params)
python_consul_installed = True
except ImportError:
python_consul_installed = False
from ansible.module_utils.basic import AnsibleModule
def register_with_consul(module):
state = module.params.get('state')
if state == 'present':
add(module)
else:
remove(module)
def add(module):
''' adds a service or a check depending on supplied configuration'''
check = parse_check(module)
service = parse_service(module)
if not service and not check:
module.fail_json(msg='a name and port are required to register a service')
if service:
if check:
service.add_check(check)
add_service(module, service)
elif check:
add_check(module, check)
def remove(module):
''' removes a service or a check '''
service_id = module.params.get('service_id') or module.params.get('service_name')
check_id = module.params.get('check_id') or module.params.get('check_name')
if not (service_id or check_id):
module.fail_json(msg='services and checks are removed by id or name. please supply a service id/name or a check id/name')
if service_id:
remove_service(module, service_id)
else:
remove_check(module, check_id)
def add_check(module, check):
''' registers a check with the given agent. currently there is no way
retrieve the full metadata of an existing check through the consul api.
Without this we can't compare to the supplied check and so we must assume
a change. '''
if not check.name and not check.service_id:
module.fail_json(msg='a check name is required for a node level check, one not attached to a service')
consul_api = get_consul_api(module)
check.register(consul_api)
module.exit_json(changed=True,
check_id=check.check_id,
check_name=check.name,
script=check.script,
interval=check.interval,
ttl=check.ttl,
http=check.http,
timeout=check.timeout,
service_id=check.service_id)
def remove_check(module, check_id):
''' removes a check using its id '''
consul_api = get_consul_api(module)
if check_id in consul_api.agent.checks():
consul_api.agent.check.deregister(check_id)
module.exit_json(changed=True, id=check_id)
module.exit_json(changed=False, id=check_id)
def add_service(module, service):
''' registers a service with the current agent '''
result = service
changed = False
consul_api = get_consul_api(module)
existing = get_service_by_id_or_name(consul_api, service.id)
# there is no way to retrieve the details of checks so if a check is present
# in the service it must be re-registered
if service.has_checks() or not existing or not existing == service:
service.register(consul_api)
# check that it registered correctly
registered = get_service_by_id_or_name(consul_api, service.id)
if registered:
result = registered
changed = True
module.exit_json(changed=changed,
service_id=result.id,
service_name=result.name,
service_port=result.port,
checks=[check.to_dict() for check in service.checks],
tags=result.tags)
def remove_service(module, service_id):
''' deregister a service from the given agent using its service id '''
consul_api = get_consul_api(module)
service = get_service_by_id_or_name(consul_api, service_id)
if service:
consul_api.agent.service.deregister(service_id, token=module.params.get('token'))
module.exit_json(changed=True, id=service_id)
module.exit_json(changed=False, id=service_id)
def get_consul_api(module, token=None):
consulClient = consul.Consul(host=module.params.get('host'),
port=module.params.get('port'),
scheme=module.params.get('scheme'),
verify=module.params.get('validate_certs'),
token=module.params.get('token'))
consulClient.agent.service = PatchedConsulAgentService(consulClient)
return consulClient
def get_service_by_id_or_name(consul_api, service_id_or_name):
''' iterate the registered services and find one with the given id '''
for name, service in consul_api.agent.services().items():
if service['ID'] == service_id_or_name or service['Service'] == service_id_or_name:
return ConsulService(loaded=service)
def parse_check(module):
if len([p for p in (module.params.get('script'), module.params.get('ttl'), module.params.get('http')) if p]) > 1:
module.fail_json(
msg='checks are either script, http or ttl driven, supplying more than one does not make sense')
if module.params.get('check_id') or module.params.get('script') or module.params.get('ttl') or module.params.get('http'):
return ConsulCheck(
module.params.get('check_id'),
module.params.get('check_name'),
module.params.get('check_node'),
module.params.get('check_host'),
module.params.get('script'),
module.params.get('interval'),
module.params.get('ttl'),
module.params.get('notes'),
module.params.get('http'),
module.params.get('timeout'),
module.params.get('service_id'),
)
def parse_service(module):
if module.params.get('service_name'):
return ConsulService(
module.params.get('service_id'),
module.params.get('service_name'),
module.params.get('service_address'),
module.params.get('service_port'),
module.params.get('tags'),
)
elif not module.params.get('service_name'):
module.fail_json(msg="service_name is required to configure a service.")
class ConsulService():
def __init__(self, service_id=None, name=None, address=None, port=-1,
tags=None, loaded=None):
self.id = self.name = name
if service_id:
self.id = service_id
self.address = address
self.port = port
self.tags = tags
self.checks = []
if loaded:
self.id = loaded['ID']
self.name = loaded['Service']
self.port = loaded['Port']
self.tags = loaded['Tags']
def register(self, consul_api):
optional = {}
if self.port:
optional['port'] = self.port
if len(self.checks) > 0:
optional['check'] = self.checks[0].check
consul_api.agent.service.register(
self.name,
service_id=self.id,
address=self.address,
tags=self.tags,
**optional)
def add_check(self, check):
self.checks.append(check)
def checks(self):
return self.checks
def has_checks(self):
return len(self.checks) > 0
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.id == other.id and
self.name == other.name and
self.port == other.port and
self.tags == other.tags)
def __ne__(self, other):
return not self.__eq__(other)
def to_dict(self):
data = {'id': self.id, "name": self.name}
if self.port:
data['port'] = self.port
if self.tags and len(self.tags) > 0:
data['tags'] = self.tags
if len(self.checks) > 0:
data['check'] = self.checks[0].to_dict()
return data
class ConsulCheck(object):
def __init__(self, check_id, name, node=None, host='localhost',
script=None, interval=None, ttl=None, notes=None, http=None, timeout=None, service_id=None):
self.check_id = self.name = name
if check_id:
self.check_id = check_id
self.service_id = service_id
self.notes = notes
self.node = node
self.host = host
self.interval = self.validate_duration('interval', interval)
self.ttl = self.validate_duration('ttl', ttl)
self.script = script
self.http = http
self.timeout = self.validate_duration('timeout', timeout)
self.check = None
if script:
self.check = consul.Check.script(script, self.interval)
if ttl:
self.check = consul.Check.ttl(self.ttl)
if http:
if interval is None:
raise Exception('http check must specify interval')
self.check = consul.Check.http(http, self.interval, self.timeout)
def validate_duration(self, name, duration):
if duration:
duration_units = ['ns', 'us', 'ms', 's', 'm', 'h']
if not any((duration.endswith(suffix) for suffix in duration_units)):
duration = "{0}s".format(duration)
return duration
def register(self, consul_api):
consul_api.agent.check.register(self.name, check_id=self.check_id, service_id=self.service_id,
notes=self.notes,
check=self.check)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.check_id == other.check_id and
self.service_id == other.service_id and
self.name == other.name and
self.script == other.script and
self.interval == other.interval)
def __ne__(self, other):
return not self.__eq__(other)
def to_dict(self):
data = {}
self._add(data, 'id', attr='check_id')
self._add(data, 'name', attr='check_name')
self._add(data, 'script')
self._add(data, 'node')
self._add(data, 'notes')
self._add(data, 'host')
self._add(data, 'interval')
self._add(data, 'ttl')
self._add(data, 'http')
self._add(data, 'timeout')
self._add(data, 'service_id')
return data
def _add(self, data, key, attr=None):
try:
if attr is None:
attr = key
data[key] = getattr(self, attr)
except Exception:
pass
def test_dependencies(module):
if not python_consul_installed:
module.fail_json(msg="python-consul required for this module. see https://python-consul.readthedocs.io/en/latest/#installation")
def main():
module = AnsibleModule(
argument_spec=dict(
host=dict(default='localhost'),
port=dict(default=8500, type='int'),
scheme=dict(required=False, default='http'),
validate_certs=dict(required=False, default=True, type='bool'),
check_id=dict(required=False),
check_name=dict(required=False),
check_node=dict(required=False),
check_host=dict(required=False),
notes=dict(required=False),
script=dict(required=False),
service_id=dict(required=False),
service_name=dict(required=False),
service_address=dict(required=False, type='str', default=None),
service_port=dict(required=False, type='int', default=None),
state=dict(default='present', choices=['present', 'absent']),
interval=dict(required=False, type='str'),
ttl=dict(required=False, type='str'),
http=dict(required=False, type='str'),
timeout=dict(required=False, type='str'),
tags=dict(required=False, type='list'),
token=dict(required=False, no_log=True)
),
supports_check_mode=False,
)
test_dependencies(module)
try:
register_with_consul(module)
except ConnectionError as e:
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
module.params.get('host'), module.params.get('port'), str(e)))
except Exception as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| 34.868056
| 136
| 0.620345
|
3d0901302eca67449cd5b8e4e8da93f2e0fca26d
| 3,752
|
py
|
Python
|
velruse/providers/identica_.py
|
maparent/velruse
|
07c600afc6408ce7e5de1045b2bc8a682f3b3631
|
[
"MIT"
] | null | null | null |
velruse/providers/identica_.py
|
maparent/velruse
|
07c600afc6408ce7e5de1045b2bc8a682f3b3631
|
[
"MIT"
] | null | null | null |
velruse/providers/identica_.py
|
maparent/velruse
|
07c600afc6408ce7e5de1045b2bc8a682f3b3631
|
[
"MIT"
] | null | null | null |
#XXX This module needs updating to use the patterns in e.g. the 'facebook'
# provider.
import urlparse
from routes import Mapper
import httplib2
import oauth2 as oauth
import webob.exc as exc
import velruse.utils as utils
log = __import__('logging').getLogger(__name__)
REQUEST_URL = 'https://identi.ca/api/oauth/request_token'
ACCESS_URL = 'https://identi.ca/api/oauth/access_token'
AUTHORIZE_URL = 'https://identi.ca/api/oauth/authorize'
class IdenticaResponder(utils.RouteResponder):
"""Handle Identi.ca OAuth login/authentication"""
map = Mapper()
map.connect('login', '/auth', action='login', requirements=dict(method='POST'))
map.connect('process', '/process', action='process')
def __init__(self, storage, consumer_key, consumer_secret):
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.storage = storage
self._consumer = oauth.Consumer(consumer_key, consumer_secret)
self._sigmethod = oauth.SignatureMethod_HMAC_SHA1()
@classmethod
def parse_config(cls, config):
"""Parse config data from a config file"""
key_map = {'Consumer Key': 'consumer_key', 'Consumer Secret': 'consumer_secret'}
identica_vals = config['Identica']
params = {}
for k, v in key_map.items():
params[v] = identica_vals[k]
params['storage'] = config['UserStore']
return params
def login(self, req):
end_point = req.POST['end_point']
# Create the consumer and client, make the request
client = oauth.Client(self._consumer)
params = {'oauth_callback': req.link('process', qualified=True)}
# We go through some shennanigans here to specify a callback url
request = oauth.Request.from_consumer_and_token(self._consumer,
http_url=REQUEST_URL, parameters=params)
request.sign_request(self._sigmethod, self._consumer, None)
resp, content = httplib2.Http.request(client, REQUEST_URL, method='GET',
headers=request.to_header())
if resp['status'] != '200':
log.debug("Identi.ca oauth failed: %r %r", resp, content)
return self._error_redirect(3, end_point)
request_token = oauth.Token.from_string(content)
req.session['token'] = content
req.session['end_point'] = end_point
req.session.save()
# Send the user to identica to authorize us
request = oauth.Request.from_token_and_callback(token=request_token, http_url=AUTHORIZE_URL)
return exc.HTTPFound(location=request.to_url())
def process(self, req):
end_point = req.session['end_point']
request_token = oauth.Token.from_string(req.session['token'])
verifier = req.GET.get('oauth_verifier')
if not verifier:
return self._error_redirect(1, end_point)
request_token.set_verifier(verifier)
client = oauth.Client(self._consumer, request_token)
resp, content = client.request(ACCESS_URL, "POST")
if resp['status'] != '200':
return self._error_redirect(2, end_point)
access_token = dict(urlparse.parse_qsl(content))
# Setup the normalized contact info
profile = {}
profile['providerName'] = 'Identica'
profile['displayName'] = access_token['screen_name']
profile['identifier'] = 'http://identi.ca/%s' % access_token['user_id']
result_data = {'status': 'ok', 'profile': profile}
cred = {'oauthAccessToken': access_token['oauth_token'],
'oauthAccessTokenSecret': access_token['oauth_token_secret']}
result_data['credentials'] = cred
return self._success_redirect(result_data, end_point)
| 38.680412
| 100
| 0.665778
|
421277f835132c156213a4565837357ef3e31441
| 113
|
py
|
Python
|
mmdet/version.py
|
FinalFlowers/pedestrian_tracking
|
8eb2c330f26a1cf030e535aa29a38288ff031f11
|
[
"MIT"
] | 6
|
2020-05-21T07:33:48.000Z
|
2022-03-12T14:30:10.000Z
|
mmdet/version.py
|
FinalFlowers/pedestrian_tracking
|
8eb2c330f26a1cf030e535aa29a38288ff031f11
|
[
"MIT"
] | 4
|
2020-10-10T13:23:35.000Z
|
2022-01-13T03:25:31.000Z
|
mmdet/version.py
|
FinalFlowers/pedestrian_tracking
|
8eb2c330f26a1cf030e535aa29a38288ff031f11
|
[
"MIT"
] | 1
|
2020-09-25T04:08:45.000Z
|
2020-09-25T04:08:45.000Z
|
# GENERATED VERSION FILE
# TIME: Tue May 5 22:34:46 2020
__version__ = '1.1.0+5cbe3a1'
short_version = '1.1.0'
| 18.833333
| 32
| 0.690265
|
0233ca889667f61a88e6f5a1adfc8057a2011f90
| 103
|
py
|
Python
|
others/car/test3.py
|
dongmeng168/raspi_robot_car
|
893698cd8649ec7d51a4716f5acf415fe2f2d2f5
|
[
"BSD-2-Clause"
] | null | null | null |
others/car/test3.py
|
dongmeng168/raspi_robot_car
|
893698cd8649ec7d51a4716f5acf415fe2f2d2f5
|
[
"BSD-2-Clause"
] | null | null | null |
others/car/test3.py
|
dongmeng168/raspi_robot_car
|
893698cd8649ec7d51a4716f5acf415fe2f2d2f5
|
[
"BSD-2-Clause"
] | null | null | null |
import wificar,time
wc=wificar.WifiCar()
for i in range(50):
print wc.distance
time.sleep(0.2)
| 10.3
| 20
| 0.699029
|
bfb6f0fd2818bbc2d76ee9d2ade34d12a18ef47f
| 1,603
|
py
|
Python
|
tcex/pleb/env_path.py
|
GShepherdTC/tcex
|
70b1199b8bb9e63f53e2ba792489267108c909cd
|
[
"Apache-2.0"
] | null | null | null |
tcex/pleb/env_path.py
|
GShepherdTC/tcex
|
70b1199b8bb9e63f53e2ba792489267108c909cd
|
[
"Apache-2.0"
] | null | null | null |
tcex/pleb/env_path.py
|
GShepherdTC/tcex
|
70b1199b8bb9e63f53e2ba792489267108c909cd
|
[
"Apache-2.0"
] | null | null | null |
"""ENV Str"""
# standard library
import os
import re
from pathlib import Path
from typing import Any, Dict, Union
class _EnvPath(type(Path()), Path): # pylint: disable=E0241
"""A stub of Path with additional attribute."""
# store for the original value passed to EnvPath
original_value = None
class EnvPath(Path):
"""EnvPath custom pydantic model type."""
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
"""."""
field_schema.update(format='file-path')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator': # noqa: F821
"""."""
yield cls.validate
@classmethod
def validate(cls, value: Union[str, Path]) -> Path:
"""Replace any environment variables in the tcex.json file."""
if isinstance(value, Path):
return value
string = str(value)
for m in re.finditer(r'\${(env|envs|local|remote):(.*?)}', string):
try:
full_match = m.group(0)
env_type = m.group(1)
env_key = m.group(2)
if env_type != 'env':
raise ValueError(f'Invalid environment type found ({env_type})')
env_value = os.getenv(env_key)
if env_value is not None:
string = string.replace(full_match, env_value)
except IndexError:
return string
# convert value to Path and return original value
p = _EnvPath(os.path.expanduser(string))
p.original_value = value
return p
| 29.685185
| 84
| 0.583281
|
3f87ad2df121561161142a648b2afc185ade6aa8
| 6,786
|
py
|
Python
|
pybt/position.py
|
izzudinhafiz/trade-ml
|
db5d263a9a9e4bf3376068977df23c4ba3679a0b
|
[
"MIT"
] | 1
|
2021-03-12T13:39:36.000Z
|
2021-03-12T13:39:36.000Z
|
pybt/position.py
|
webclinic017/pybt
|
db5d263a9a9e4bf3376068977df23c4ba3679a0b
|
[
"MIT"
] | 3
|
2021-01-12T17:52:55.000Z
|
2021-01-21T18:16:37.000Z
|
pybt/position.py
|
webclinic017/pybt
|
db5d263a9a9e4bf3376068977df23c4ba3679a0b
|
[
"MIT"
] | 1
|
2021-11-08T02:51:07.000Z
|
2021-11-08T02:51:07.000Z
|
from pybt.commons import Money
from pybt.commons.helper import get_caller
import inspect
import warnings
class Position:
commission_rate = 0.01
def __init__(self, portfolio):
self.portfolio = portfolio
self.debug_mode = self.portfolio.debug_mode
self.symbol = None
self.size = None
self.position_type = None
self.active = None
self.open_price = None
self.open_value = None
self.open_time = None
self.open_commission = Money(0)
self.current_value = None
self.current_price = None
self.close_value = None
self.close_time = None
self.close_price = None
self.close_commission = Money(0)
self.close_type = None # Closing reason ie of type [incomplete_price, take_profit, stop_loss, manual_close]
self.take_profit = None
self.stop_loss = None
self.gain = None
self.total_commission = self.open_commission + self.close_commission
@classmethod
def open_by_value(cls, symbol, value, take_profit=None, stop_loss=None):
if type(value) != Money:
value = Money(value)
position = cls(get_caller(inspect.stack()[1][0])) # get_caller gets the portfolio object that called this position.
market = position.portfolio.market
current_price = market.get_current_price(symbol)
if current_price is None:
return None
size = value / current_price
is_open = position.open_position(symbol, size, current_price, take_profit, stop_loss)
if is_open:
return position
else:
return None
def open_position(self, symbol: str, size: Money, open_price: Money, take_profit=None, stop_loss=None):
if size == 0:
return None
self.symbol = symbol
self.size = size
self.open_price = open_price
self.current_price = open_price
self.open_value = self.open_price * self.size
self.open_time = self.portfolio.market.time_now
self.active = True
self.position_type = "long" if size > 0 else "short"
self.open_commission = self.open_value.abs() * self.commission_rate
self.total_commission = self.open_commission
self.current_value = self.current_price * self.size
self.gain = self.current_value - self.open_value
self.process_tp_sl(take_profit, stop_loss)
if self.debug_mode:
print(f"[{self.open_time}][{self.symbol}] {self.position_type} position opened. Price: {self.open_price} TP: {self.take_profit} SL: {self.stop_loss}")
return self
def close_position(self, close_type=None):
self.close_time = self.portfolio.market.time_now
self.close_price = self.portfolio.market.get_current_price(self.symbol)
self.current_price = self.close_price if self.close_price else self.current_price # Handles incomplete_price scenario
self.current_value = self.current_price * self.size
self.close_commission = Money(self.commission_rate * self.current_value).abs()
self.total_commission = self.open_commission + self.close_commission
self.close_value = self.current_value
self.gain = self.current_value - self.open_value
self.active = False
if close_type is None:
self.close_type = "manual_close"
else:
self.close_type = close_type
if self.debug_mode >= 1:
print(f"[{self.close_time}][{self.symbol}][{self.close_type}] position closed. Price {self.close_price} Nett Gain {self.gain - self.total_commission}")
return self
def process_tp_sl(self, take_profit, stop_loss):
if isinstance(take_profit, (int, float, Money)):
self.take_profit = take_profit if isinstance(take_profit, Money) else Money(take_profit)
elif isinstance(take_profit, tuple):
if take_profit[0] == "percent":
if take_profit[1] < 1:
raise ValueError(f"take_profit percentage mode requires value greater than 1")
if self.position_type == "long":
profit_level = self.open_price * take_profit[1]
else:
profit_level = self.open_price - self.open_price * (take_profit[1] - 1)
self.take_profit = profit_level
else:
raise ValueError(f"{take_profit[0]} unsupported. take_profit only supports 'percent' mode")
else:
self.take_profit = None
if isinstance(stop_loss, (int, float, Money)):
self.stop_loss = stop_loss if isinstance(stop_loss, Money) else Money(stop_loss)
elif isinstance(stop_loss, tuple):
if stop_loss[0] == "percent":
if stop_loss[1] > 1:
raise ValueError(f"stop_loss percentage mode requires value less than 1")
if self.position_type == "long":
loss_level = self.open_price * stop_loss[1]
else:
loss_level = self.open_price - self.open_price * (stop_loss[1] - 1)
self.stop_loss = loss_level
else:
raise ValueError(f"{stop_loss[0]} unsupported. stop_loss only supports 'percent' mode")
else:
self.stop_loss = None
def update(self):
if not self.active:
return
price = self.portfolio.market.get_current_price(self.symbol)
if price is None:
self.terminate_early()
return
self.current_price = price
should_close = self.should_close()
if should_close:
return self.close_position(should_close)
self.current_value = self.current_price * self.size
self.gain = self.current_value - self.open_value
return self.gain
def terminate_early(self):
warnings.warn(f"{self.symbol} Opened on: {self.open_time} Terminated on {self.portfolio.market.time_now} due to insufficient price data")
return self.close_position("incomplete_price")
def should_close(self):
if self.take_profit is not None:
if self.position_type == "long":
if self.current_price >= self.take_profit:
return "take_profit"
else:
if self.current_price <= self.take_profit:
return "take_profit"
if self.stop_loss is not None:
if self.position_type == "long":
if self.current_price <= self.stop_loss:
return "stop_loss"
else:
if self.current_price >= self.stop_loss:
return "stop_loss"
return False
| 39.453488
| 163
| 0.62349
|
b0c51a6c48c089459a20d5d037d8a6e11e732ed8
| 951
|
py
|
Python
|
data/hhess_full_2nm/prediction_inputr2.py
|
DerThorsten/pc
|
41d7474ceff8de7b95be5d4fbc42a40e89799e34
|
[
"MIT"
] | null | null | null |
data/hhess_full_2nm/prediction_inputr2.py
|
DerThorsten/pc
|
41d7474ceff8de7b95be5d4fbc42a40e89799e34
|
[
"MIT"
] | null | null | null |
data/hhess_full_2nm/prediction_inputr2.py
|
DerThorsten/pc
|
41d7474ceff8de7b95be5d4fbc42a40e89799e34
|
[
"MIT"
] | null | null | null |
predictionSettingsDict = {
"predictionInput" : {
"hhess" : {
"data":{
"raw" : {
"file" : [
"/home/tbeier/raw_sub.h5",
"data"
]
},
"pmap" : {
"file" : [
"/media/tbeier/4cf81285-be72-45f5-8c63-fb8e9ff4476c/pc_out/2nm/prediction_semantic_full.h5",
"data"
]
}
},
"prediction" :{
"file" : [
"/media/tbeier/4cf81285-be72-45f5-8c63-fb8e9ff4476c/pc_out/2nm/prediction_semantic_r2_full.h5",
"data"
],
# must be either 'float32','float64', or 'uint8'
"dtype" : "uint8"
}
}
}
,
"setup" : {
"blockShape" : [60,60,60]
}
}
| 28.818182
| 116
| 0.345952
|
1bbf400a8b5568ff33e8101db7d96e0b17af8885
| 2,637
|
bzl
|
Python
|
go/private/rules/wrappers.bzl
|
hiro511/rules_go
|
a9a8548ceb9abdd7293df1a77e3917238ddfdccd
|
[
"Apache-2.0"
] | null | null | null |
go/private/rules/wrappers.bzl
|
hiro511/rules_go
|
a9a8548ceb9abdd7293df1a77e3917238ddfdccd
|
[
"Apache-2.0"
] | null | null | null |
go/private/rules/wrappers.bzl
|
hiro511/rules_go
|
a9a8548ceb9abdd7293df1a77e3917238ddfdccd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load("@io_bazel_rules_go//go/private:rules/binary.bzl", "go_binary")
load("@io_bazel_rules_go//go/private:rules/library.bzl", "go_library")
load("@io_bazel_rules_go//go/private:rules/test.bzl", "go_test")
load("@io_bazel_rules_go//go/private:rules/cgo.bzl", "setup_cgo_library")
_CGO_ATTRS = {
"srcs": None,
"cdeps": [],
"copts": [],
"clinkopts": [],
}
def _deprecate(attr, name, ruletype, kwargs, message):
value = kwargs.pop(attr, None)
if value and native.repository_name() == "@":
print("\nDEPRECATED: //{}:{} : the {} attribute on {} is deprecated. {}".format(native.package_name(), name, attr, ruletype, message))
return value
#TODO(#1208): Remove library attribute
def _deprecate_library(name, ruletype, kwargs):
value = _deprecate("library", name, ruletype, kwargs, "Please migrate to embed.")
if value:
kwargs["embed"] = kwargs.get("embed", []) + [value]
#TODO(#1207): Remove importpath
def _deprecate_importpath(name, ruletype, kwargs):
_deprecate("importpath", name, ruletype, kwargs, "")
def _cgo(name, kwargs):
cgo = kwargs.pop("cgo", False)
if not cgo: return
cgo_attrs = {"name":name}
for key, default in _CGO_ATTRS.items():
cgo_attrs[key] = kwargs.pop(key, default)
cgo_embed = setup_cgo_library(**cgo_attrs)
kwargs["embed"] = kwargs.get("embed", []) + [cgo_embed]
def go_library_macro(name, **kwargs):
"""See go/core.rst#go_library for full documentation."""
_deprecate_library(name, "go_library", kwargs)
_cgo(name, kwargs)
go_library(name = name, **kwargs)
def go_binary_macro(name, **kwargs):
"""See go/core.rst#go_binary for full documentation."""
_deprecate_library(name, "go_binary", kwargs)
_deprecate_importpath(name, "go_binary", kwargs)
_cgo(name, kwargs)
go_binary(name = name, **kwargs)
def go_test_macro(name, **kwargs):
"""See go/core.rst#go_test for full documentation."""
_deprecate_library(name, "go_test", kwargs)
_deprecate_importpath(name, "go_test", kwargs)
_cgo(name, kwargs)
go_test(name = name, **kwargs)
| 37.140845
| 138
| 0.712552
|
1b77bacb5b5171916c0a1302185e4c5ac6d6bd90
| 2,532
|
py
|
Python
|
test_image_augmentation.py
|
vonholst/traq-deep-learning2
|
9191363091ce9c304022e00ff168868d31fb42b9
|
[
"MIT"
] | null | null | null |
test_image_augmentation.py
|
vonholst/traq-deep-learning2
|
9191363091ce9c304022e00ff168868d31fb42b9
|
[
"MIT"
] | null | null | null |
test_image_augmentation.py
|
vonholst/traq-deep-learning2
|
9191363091ce9c304022e00ff168868d31fb42b9
|
[
"MIT"
] | null | null | null |
from preprocessing import BatchGenerator, parse_annotation
from backend import MobileNetFeature
import json
import cv2
import numpy as np
import matplotlib.pyplot as plt
with open("config.json") as config_buffer:
config = json.loads(config_buffer.read())
train_imgs, train_labels = parse_annotation(config['train']['train_annot_folder'],
config['train']['train_image_folder'],
config['model']['labels'])
feature_extractor = MobileNetFeature(config['model']['input_size'])
generator_config = {
'IMAGE_H': config['model']['input_size'],
'IMAGE_W': config['model']['input_size'],
'GRID_H': 7,
'GRID_W': 7,
'BOX': 5,
'LABELS': ["volvo_logo", "driver_controls", "pilot_assist"],
'CLASS': 3,
'ANCHORS': config['model']['anchors'],
'BATCH_SIZE': 32,
'TRUE_BOX_BUFFER': 10,
}
train_batch = BatchGenerator(train_imgs,
generator_config,
norm=None)
obj_h_list = []
obj_w_list = []
for img in train_imgs:
h = float(img['height'])
w = float(img['width'])
for obj in img['object']:
x_min = obj['xmin']
x_max = obj['xmax']
y_min = obj['ymin']
y_max = obj['ymax']
name = obj['name']
obj_w = float(x_max - x_min)
obj_h = float(y_max - y_min)
norm_w = obj_w/w*7 # unit cell
norm_h = obj_h/h*7 # unit cell
obj_w_list.append(norm_w)
obj_h_list.append(norm_h)
plt.figure(1)
plt.plot(obj_w_list, obj_h_list, 'r*')
mean_w = np.mean(obj_w_list)
mean_h = np.mean(obj_h_list)
max_h = np.max(obj_h_list)
max_w = np.max(obj_w_list)
min_h = np.min(obj_h_list)
min_w = np.min(obj_w_list)
print("Max w/h: {max_w}/{max_h}, Min w/h: {min_w}/{min_h}, Mean w/h: {mean_w}/{mean_h}".format(max_w=max_w, max_h=max_h,
min_w=min_w, min_h=min_h,
mean_w=mean_w,
mean_h=mean_h))
plt.figure(2)
batch = train_batch.__getitem__(0)
[x_batch, b_batch], y_batch = batch
for idx in range(32 ):
print("Showing image {}".format(idx))
image = x_batch[idx, ...]
plt.imshow(image/255.0)
plt.show()
# cv2.imshow('image',image)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
| 31.259259
| 120
| 0.545024
|
db87f88648144ccb3b21a98635080936846adbe3
| 945
|
py
|
Python
|
src/app/migrations/0033_auto_20190708_2016.py
|
deadlock-delegate/arkdelegates
|
8a5262f51b519ba3bc10094756c8866fc550df65
|
[
"MIT"
] | 2
|
2018-05-22T13:47:09.000Z
|
2018-05-23T12:45:05.000Z
|
src/app/migrations/0033_auto_20190708_2016.py
|
deadlock-delegate/arkdelegates
|
8a5262f51b519ba3bc10094756c8866fc550df65
|
[
"MIT"
] | 21
|
2018-05-08T12:56:46.000Z
|
2020-06-05T18:59:38.000Z
|
src/app/migrations/0033_auto_20190708_2016.py
|
deadlock-delegate/arkdelegates
|
8a5262f51b519ba3bc10094756c8866fc550df65
|
[
"MIT"
] | 4
|
2018-05-04T15:00:59.000Z
|
2019-02-13T02:39:07.000Z
|
# Generated by Django 2.2.2 on 2019-07-08 20:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0032_auto_20190708_1932'),
]
operations = [
migrations.AlterModelOptions(
name='delegate',
options={'permissions': [('payout_change', 'Can change payouts info of all delegates')]},
),
migrations.AlterField(
model_name='historicalcontribution',
name='history_change_reason',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='historicaldelegate',
name='history_change_reason',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='historicalstatusupdate',
name='history_change_reason',
field=models.TextField(null=True),
),
]
| 28.636364
| 101
| 0.596825
|
3ce8ad1c73402d628c21dd86a073c7a635e98ecf
| 486
|
py
|
Python
|
jquant/config/__init__.py
|
thlynn/Jquant
|
44d9f36a8b06d9f7cc8870258360d3fed3e27cae
|
[
"MIT"
] | null | null | null |
jquant/config/__init__.py
|
thlynn/Jquant
|
44d9f36a8b06d9f7cc8870258360d3fed3e27cae
|
[
"MIT"
] | 8
|
2020-03-31T03:27:28.000Z
|
2020-03-31T03:51:28.000Z
|
jquant/config/__init__.py
|
thlynn/Jquant
|
44d9f36a8b06d9f7cc8870258360d3fed3e27cae
|
[
"MIT"
] | null | null | null |
import configparser
import os
class Keys:
def __init__(self):
self.config = configparser.ConfigParser()
self.config.read(os.path.dirname(os.path.abspath(__file__)) + '/key.ini', encoding='utf-8')
def get_key(self, name):
access_key = self.config[name]['ACCESS_KEY']
secret_key = self.config[name]['SECRET_KEY']
return access_key, secret_key
def get_base_url(self, name):
url = self.config[name]['URL']
return url
| 25.578947
| 99
| 0.646091
|
42b325bd1446fcc90111131b5b0f449956c2f273
| 26,668
|
py
|
Python
|
django/test/client.py
|
vincepandolfo/django
|
67cf5efa31acb2916034afb15610b700695dfcb0
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2017-01-11T06:27:15.000Z
|
2017-01-11T06:27:15.000Z
|
django/test/client.py
|
vincepandolfo/django
|
67cf5efa31acb2916034afb15610b700695dfcb0
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/test/client.py
|
vincepandolfo/django
|
67cf5efa31acb2916034afb15610b700695dfcb0
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
filename = os.path.basename(file.name) if hasattr(file, 'name') else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(force_str(path))
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user and apps.is_installed('django.contrib.sessions'):
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if 'application/json' not in response.get('Content-Type'):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
return json.loads(response.content.decode(), **extra)
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
| 38.316092
| 112
| 0.606607
|
0cd699a785ea53f97ebd67455b27c3ad2e9d8764
| 3,164
|
py
|
Python
|
audioguide/routebuilder/tests.py
|
daveclifton/audioguide
|
cb23e5f5f022d8bc86bd23e0361a07accf727341
|
[
"MIT"
] | null | null | null |
audioguide/routebuilder/tests.py
|
daveclifton/audioguide
|
cb23e5f5f022d8bc86bd23e0361a07accf727341
|
[
"MIT"
] | null | null | null |
audioguide/routebuilder/tests.py
|
daveclifton/audioguide
|
cb23e5f5f022d8bc86bd23e0361a07accf727341
|
[
"MIT"
] | null | null | null |
import datetime
from django.utils import timezone
from django.test import TestCase
from django.core.urlresolvers import reverse
from .models import *
class ReverseUrlTests(TestCase):
def test_reverse_url(self):
self.assertEqual( reverse('index'), "/routebuilder/" )
def test_reverse_url_route_1(self):
self.assertEqual( reverse('route',args=(1,)), "/routebuilder/route/1" )
def test_reverse_url_route_add(self):
self.assertEqual( reverse('route_add'), "/routebuilder/route/add" )
def test_reverse_url_route_edit_1(self):
self.assertEqual( reverse('route_edit',args=(1,) ), "/routebuilder/route/edit/1" )
def test_reverse_url_route_delete(self):
self.assertEqual( reverse('route_delete' ), "/routebuilder/route/delete" )
def test_reverse_url_waypoint_1_1(self):
self.assertEqual( reverse('waypoint',args=(1,1) ), "/routebuilder/waypoint/1/1" )
def test_reverse_url_waypoint_add_1(self):
self.assertEqual( reverse('waypoint_add', args=(1,) ), "/routebuilder/waypoint/add/1" )
def test_reverse_url_waypoint_edit_1_1(self):
self.assertEqual( reverse('waypoint_edit', args=(1,1) ), "/routebuilder/waypoint/edit/1/1" )
def test_reverse_url_waypoint_delete(self):
self.assertEqual( reverse('waypoint_delete' ), "/routebuilder/waypoint/delete" )
class RenderTemplateTests(TestCase):
def test_templates(self):
response = self.client.get('/routebuilder/')
self.assertTemplateUsed(response, 'index.html')
def test_templates_index(self):
response = self.client.get( reverse('index') )
self.assertTemplateUsed(response, 'base.html')
def test_templates_route_1(self):
response = self.client.get( reverse('route',args=(1,)) )
self.assertTemplateUsed(response, 'base.html')
def test_templates_route_add(self):
response = self.client.get( reverse('route_add') )
self.assertTemplateUsed(response, 'base.html')
def test_templates_route_edit(self):
response = self.client.get( reverse('route_edit',args=(1,) ) )
self.assertTemplateUsed(response, 'base.html')
def test_templates_waypoint_1_1(self):
response = self.client.get( reverse('waypoint',args=(1,1) ) )
self.assertTemplateUsed(response, 'base.html')
def test_templates_waypoint_add(self):
response = self.client.get( reverse('waypoint_add', args=(1,) ) )
self.assertTemplateUsed(response, 'base.html')
def test_templates_waypoint_edit(self):
response = self.client.get( reverse('waypoint_edit', args=(1,1) ) )
self.assertTemplateUsed(response, 'base.html')
class RedirectTests(TestCase):
def test_redirect_route_delete(self):
response = self.client.post( reverse('route_delete' ), {'id':1} )
self.assertRedirects(response, '/routebuilder/')
def test_redirect_waypoint_delete(self):
response = self.client.post( reverse('waypoint_delete' ), {'id':1} )
self.assertRedirects(response, '/routebuilder/route/1')
| 37.666667
| 100
| 0.676991
|
c0c50146890830978ab1d51b0e4d2bc5fc60aeca
| 965
|
py
|
Python
|
questionnaire/urls.py
|
affan2/ed-questionnaire
|
d210b5784c30c238dd17a351ace8848025fff936
|
[
"BSD-3-Clause"
] | null | null | null |
questionnaire/urls.py
|
affan2/ed-questionnaire
|
d210b5784c30c238dd17a351ace8848025fff936
|
[
"BSD-3-Clause"
] | null | null | null |
questionnaire/urls.py
|
affan2/ed-questionnaire
|
d210b5784c30c238dd17a351ace8848025fff936
|
[
"BSD-3-Clause"
] | 1
|
2020-01-08T09:08:12.000Z
|
2020-01-08T09:08:12.000Z
|
# vim: set fileencoding=utf-8
from django.conf.urls import patterns, url
from views import questionnaire, export_csv, get_async_progress, use_session, redirect_to_prev_questionnaire
urlpatterns = patterns(
'',
url(r'^$',
questionnaire, name='questionnaire_noargs'),
url(r'^csv/(?P<qid>\d+)/(?P<only_complete>\d)/$',
export_csv, name='export_csv'),
url(r'^(?P<runcode>[^/]+)/progress/$',
get_async_progress, name='progress'),
url(r'^(?P<runcode>[^/]+)/(?P<qs>[-]{0,1}\d+)/$',
questionnaire, name='questionset'),
)
if not use_session:
urlpatterns += patterns(
'',
url(r'^(?P<runcode>[^/]+)/$',
questionnaire, name='questionnaire'),
)
else:
urlpatterns += patterns(
'',
url(r'^$',
questionnaire, name='questionnaire'),
url(r'^prev/$',
redirect_to_prev_questionnaire,
name='redirect_to_prev_questionnaire')
)
| 28.382353
| 108
| 0.587565
|
bfe65e04c281479a75fbae37521f244718e2a910
| 1,804
|
py
|
Python
|
setup.py
|
jaryP/ContinualAI
|
7d9b7614066d219ebd72049692da23ad6ec132b0
|
[
"MIT"
] | null | null | null |
setup.py
|
jaryP/ContinualAI
|
7d9b7614066d219ebd72049692da23ad6ec132b0
|
[
"MIT"
] | null | null | null |
setup.py
|
jaryP/ContinualAI
|
7d9b7614066d219ebd72049692da23ad6ec132b0
|
[
"MIT"
] | null | null | null |
import os
import setuptools
import codecs
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
print(rel_path)
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
try:
with open('README.md') as file:
long_description = file.read() # now assign long_description=long_description below
except IOError: # file not found
pass
setuptools.setup(
name='continual_learning',
version=get_version("continual_learning/__init__.py"),
author="Jary Pomponi",
author_email="jarypomponi@gmail.org",
description="A base CL framework to speed-up prototyping and testing",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/jaryP/ContinualAI",
packages=setuptools.find_packages(),
classifiers=[
"Intended Audience :: Science/Research",
"Intended Audience :: Education",
"Intended Audience :: Other Audience",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 1 - Planning",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Environment :: Console",
],
keywords=["continual learning"],
python_requires='>=3.7',
install_requires=[
'torch',
'torchvision',
'numpy',
'quadprog',
'scipy'
]
)
| 29.096774
| 92
| 0.640798
|
461b16ea59f595be8d448442b5da0dff45df3d59
| 106,683
|
py
|
Python
|
FusionIIIT/applications/eis/views.py
|
ssaksham9/Fusion
|
f1e405b457dba399411a2ddb79a9068746c05057
|
[
"bzip2-1.0.6"
] | 2
|
2020-01-24T16:34:54.000Z
|
2020-08-01T05:09:24.000Z
|
FusionIIIT/applications/eis/views.py
|
ssaksham9/Fusion
|
f1e405b457dba399411a2ddb79a9068746c05057
|
[
"bzip2-1.0.6"
] | null | null | null |
FusionIIIT/applications/eis/views.py
|
ssaksham9/Fusion
|
f1e405b457dba399411a2ddb79a9068746c05057
|
[
"bzip2-1.0.6"
] | 5
|
2020-01-21T11:27:06.000Z
|
2020-02-07T13:53:49.000Z
|
import csv
from cgi import escape
from io import BytesIO
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import (get_object_or_404, redirect, render,
render_to_response)
from django.template import loader
from django.template.loader import get_template
from django.urls import reverse_lazy
from django.views import generic
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from xhtml2pdf import pisa
from applications.eis import admin
from applications.globals.models import ExtraInfo, HoldsDesignation
from django.http.response import JsonResponse
from applications.globals.models import (DepartmentInfo, ExtraInfo,
HoldsDesignation)
from .forms import *
from .models import *
from django.core.files.storage import FileSystemStorage
countries = {
'AF': 'Afghanistan',
'AX': 'Aland Islands',
'AL': 'Albania',
'DZ': 'Algeria',
'AS': 'American Samoa',
'AD': 'Andorra',
'AO': 'Angola',
'AI': 'Anguilla',
'AQ': 'Antarctica',
'AG': 'Antigua And Barbuda',
'AR': 'Argentina',
'AM': 'Armenia',
'AW': 'Aruba',
'AU': 'Australia',
'AT': 'Austria',
'AZ': 'Azerbaijan',
'BS': 'Bahamas',
'BH': 'Bahrain',
'BD': 'Bangladesh',
'BB': 'Barbados',
'BY': 'Belarus',
'BE': 'Belgium',
'BZ': 'Belize',
'BJ': 'Benin',
'BM': 'Bermuda',
'BT': 'Bhutan',
'BO': 'Bolivia',
'BA': 'Bosnia And Herzegovina',
'BW': 'Botswana',
'BV': 'Bouvet Island',
'BR': 'Brazil',
'IO': 'British Indian Ocean Territory',
'BN': 'Brunei Darussalam',
'BG': 'Bulgaria',
'BF': 'Burkina Faso',
'BI': 'Burundi',
'KH': 'Cambodia',
'CM': 'Cameroon',
'CA': 'Canada',
'CV': 'Cape Verde',
'KY': 'Cayman Islands',
'CF': 'Central African Republic',
'TD': 'Chad',
'CL': 'Chile',
'CN': 'China',
'CX': 'Christmas Island',
'CC': 'Cocos (Keeling) Islands',
'CO': 'Colombia',
'KM': 'Comoros',
'CG': 'Congo',
'CD': 'Congo, Democratic Republic',
'CK': 'Cook Islands',
'CR': 'Costa Rica',
'CI': 'Cote D\'Ivoire',
'HR': 'Croatia',
'CU': 'Cuba',
'CY': 'Cyprus',
'CZ': 'Czech Republic',
'DK': 'Denmark',
'DJ': 'Djibouti',
'DM': 'Dominica',
'DO': 'Dominican Republic',
'EC': 'Ecuador',
'EG': 'Egypt',
'SV': 'El Salvador',
'GQ': 'Equatorial Guinea',
'ER': 'Eritrea',
'EE': 'Estonia',
'ET': 'Ethiopia',
'FK': 'Falkland Islands (Malvinas)',
'FO': 'Faroe Islands',
'FJ': 'Fiji',
'FI': 'Finland',
'FR': 'France',
'GF': 'French Guiana',
'PF': 'French Polynesia',
'TF': 'French Southern Territories',
'GA': 'Gabon',
'GM': 'Gambia',
'GE': 'Georgia',
'DE': 'Germany',
'GH': 'Ghana',
'GI': 'Gibraltar',
'GR': 'Greece',
'GL': 'Greenland',
'GD': 'Grenada',
'GP': 'Guadeloupe',
'GU': 'Guam',
'GT': 'Guatemala',
'GG': 'Guernsey',
'GN': 'Guinea',
'GW': 'Guinea-Bissau',
'GY': 'Guyana',
'HT': 'Haiti',
'HM': 'Heard Island & Mcdonald Islands',
'VA': 'Holy See (Vatican City State)',
'HN': 'Honduras',
'HK': 'Hong Kong',
'HU': 'Hungary',
'IS': 'Iceland',
'IN': 'India',
'ID': 'Indonesia',
'IR': 'Iran, Islamic Republic Of',
'IQ': 'Iraq',
'IE': 'Ireland',
'IM': 'Isle Of Man',
'IL': 'Israel',
'IT': 'Italy',
'JM': 'Jamaica',
'JP': 'Japan',
'JE': 'Jersey',
'JO': 'Jordan',
'KZ': 'Kazakhstan',
'KE': 'Kenya',
'KI': 'Kiribati',
'KR': 'Korea',
'KW': 'Kuwait',
'KG': 'Kyrgyzstan',
'LA': 'Lao People\'s Democratic Republic',
'LV': 'Latvia',
'LB': 'Lebanon',
'LS': 'Lesotho',
'LR': 'Liberia',
'LY': 'Libyan Arab Jamahiriya',
'LI': 'Liechtenstein',
'LT': 'Lithuania',
'LU': 'Luxembourg',
'MO': 'Macao',
'MK': 'Macedonia',
'MG': 'Madagascar',
'MW': 'Malawi',
'MY': 'Malaysia',
'MV': 'Maldives',
'ML': 'Mali',
'MT': 'Malta',
'MH': 'Marshall Islands',
'MQ': 'Martinique',
'MR': 'Mauritania',
'MU': 'Mauritius',
'YT': 'Mayotte',
'MX': 'Mexico',
'FM': 'Micronesia, Federated States Of',
'MD': 'Moldova',
'MC': 'Monaco',
'MN': 'Mongolia',
'ME': 'Montenegro',
'MS': 'Montserrat',
'MA': 'Morocco',
'MZ': 'Mozambique',
'MM': 'Myanmar',
'NA': 'Namibia',
'NR': 'Nauru',
'NP': 'Nepal',
'NL': 'Netherlands',
'AN': 'Netherlands Antilles',
'NC': 'New Caledonia',
'NZ': 'New Zealand',
'NI': 'Nicaragua',
'NE': 'Niger',
'NG': 'Nigeria',
'NU': 'Niue',
'NF': 'Norfolk Island',
'MP': 'Northern Mariana Islands',
'NO': 'Norway',
'OM': 'Oman',
'PK': 'Pakistan',
'PW': 'Palau',
'PS': 'Palestinian Territory, Occupied',
'PA': 'Panama',
'PG': 'Papua New Guinea',
'PY': 'Paraguay',
'PE': 'Peru',
'PH': 'Philippines',
'PN': 'Pitcairn',
'PL': 'Poland',
'PT': 'Portugal',
'PR': 'Puerto Rico',
'QA': 'Qatar',
'RE': 'Reunion',
'RO': 'Romania',
'RU': 'Russian Federation',
'RW': 'Rwanda',
'BL': 'Saint Barthelemy',
'SH': 'Saint Helena',
'KN': 'Saint Kitts And Nevis',
'LC': 'Saint Lucia',
'MF': 'Saint Martin',
'PM': 'Saint Pierre And Miquelon',
'VC': 'Saint Vincent And Grenadines',
'WS': 'Samoa',
'SM': 'San Marino',
'ST': 'Sao Tome And Principe',
'SA': 'Saudi Arabia',
'SN': 'Senegal',
'RS': 'Serbia',
'SC': 'Seychelles',
'SL': 'Sierra Leone',
'SG': 'Singapore',
'SK': 'Slovakia',
'SI': 'Slovenia',
'SB': 'Solomon Islands',
'SO': 'Somalia',
'ZA': 'South Africa',
'GS': 'South Georgia And Sandwich Isl.',
'ES': 'Spain',
'LK': 'Sri Lanka',
'SD': 'Sudan',
'SR': 'Suriname',
'SJ': 'Svalbard And Jan Mayen',
'SZ': 'Swaziland',
'SE': 'Sweden',
'CH': 'Switzerland',
'SY': 'Syrian Arab Republic',
'TW': 'Taiwan',
'TJ': 'Tajikistan',
'TZ': 'Tanzania',
'TH': 'Thailand',
'TL': 'Timor-Leste',
'TG': 'Togo',
'TK': 'Tokelau',
'TO': 'Tonga',
'TT': 'Trinidad And Tobago',
'TN': 'Tunisia',
'TR': 'Turkey',
'TM': 'Turkmenistan',
'TC': 'Turks And Caicos Islands',
'TV': 'Tuvalu',
'UG': 'Uganda',
'UA': 'Ukraine',
'AE': 'United Arab Emirates',
'GB': 'United Kingdom',
'US': 'United States',
'UM': 'United States Outlying Islands',
'UY': 'Uruguay',
'UZ': 'Uzbekistan',
'VU': 'Vanuatu',
'VE': 'Venezuela',
'VN': 'Viet Nam',
'VG': 'Virgin Islands, British',
'VI': 'Virgin Islands, U.S.',
'WF': 'Wallis And Futuna',
'EH': 'Western Sahara',
'YE': 'Yemen',
'ZM': 'Zambia',
'ZW': 'Zimbabwe',
'KP': 'Korea (Democratic Peoples Republic of)',
}
# Create your views here
# Main profile landing view
def profile(request, username=None):
user = get_object_or_404(User, username=username) if username else request.user
extra_info = get_object_or_404(ExtraInfo, user=user)
if extra_info.user_type != 'faculty':
return redirect('/')
pf = extra_info.id
form = ConfrenceForm()
journal = emp_research_papers.objects.filter(pf_no=pf, rtype='Journal').order_by('-year')
conference = emp_research_papers.objects.filter(pf_no=pf, rtype='Conference').order_by('-year')
books = emp_published_books.objects.filter(pf_no=pf).order_by('-pyear')
projects = emp_research_projects.objects.filter(pf_no=pf).order_by('-start_date')
consultancy = emp_consultancy_projects.objects.filter(pf_no=pf).order_by('-date_entry')
patents = emp_patents.objects.filter(pf_no=pf).order_by('-date_entry')
techtransfers = emp_techtransfer.objects.filter(pf_no=pf).order_by('-date_entry')
mtechs = emp_mtechphd_thesis.objects.filter(pf_no=pf, degree_type=1).order_by('-date_entry')
phds = emp_mtechphd_thesis.objects.filter(pf_no=pf, degree_type=2).order_by('-date_entry')
fvisits = emp_visits.objects.filter(pf_no=pf, v_type=2).order_by('-entry_date')
ivisits = emp_visits.objects.filter(pf_no=pf, v_type=1).order_by('-entry_date')
for fvisit in fvisits:
fvisit.countryfull = countries[fvisit.country]
consymps = emp_confrence_organised.objects.filter(pf_no=pf).order_by('-date_entry')
awards = emp_achievement.objects.filter(pf_no=pf).order_by('-date_entry')
talks = emp_expert_lectures.objects.filter(pf_no=pf).order_by('-date_entry')
chairs = emp_session_chair.objects.filter(pf_no=pf).order_by('-date_entry')
keynotes = emp_keynote_address.objects.filter(pf_no=pf).order_by('-date_entry')
events = emp_event_organized.objects.filter(pf_no=pf).order_by('-start_date')
y=[]
for r in range(1995, (datetime.datetime.now().year + 1)):
y.append(r)
try:
pers = get_object_or_404(faculty_about, user = user)
except:
pers = None
# edited 26March
a1 = HoldsDesignation.objects.filter(working = user)
flag_rspc = 0
for i in a1:
if(str(i.designation)=='Dean (RSPC)'):
flag_rspc = 1
# done edit
design = HoldsDesignation.objects.filter(working=user)
desig=[]
for i in design:
desig.append(str(i.designation))
context = {'user': user,
'desig':desig,
'pf':pf,
'flag_rspc':flag_rspc,
'journal':journal,
'conference': conference,
'books': books,
'projects': projects,
'form':form,
'consultancy':consultancy,
'patents':patents,
'techtransfers':techtransfers,
'mtechs':mtechs,
'phds':phds,
'fvisits':fvisits,
'ivisits': ivisits,
'consymps':consymps,
'awards':awards,
'talks':talks,
'chairs':chairs,
'keynotes':keynotes,
'events':events,
'year_range':y,
'pers':pers
}
return render(request, 'eisModulenew/profile.html', context)
# Dean RSPC Profile
def rspc_profile(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
form = ConfrenceForm()
journal = emp_research_papers.objects.filter(rtype='Journal').order_by('-year', '-a_month')
conference = emp_research_papers.objects.filter(rtype='Conference').order_by('-year', '-a_month')
books = emp_published_books.objects.all().order_by('-pyear', '-a_month')
projects = emp_research_projects.objects.all().order_by('-start_date')
consultancy = emp_consultancy_projects.objects.all().order_by('-start_date')
patents = emp_patents.objects.all().order_by('-p_year', '-a_month')
techtransfers = emp_techtransfer.objects.all().order_by('-date_entry')
mtechs = emp_mtechphd_thesis.objects.filter(degree_type=1).order_by('-s_year', '-a_month')
phds = emp_mtechphd_thesis.objects.filter(degree_type=2).order_by('-s_year', '-a_month')
fvisits = emp_visits.objects.filter(v_type=2).order_by('-start_date')
ivisits = emp_visits.objects.filter(v_type=1).order_by('-start_date')
for fvisit in fvisits:
fvisit.countryfull = countries[fvisit.country]
consymps = emp_confrence_organised.objects.all().order_by('-start_date')
awards = emp_achievement.objects.all().order_by('-a_year', '-a_month')
talks = emp_expert_lectures.objects.all().order_by('-l_year', '-a_month')
chairs = emp_session_chair.objects.all().order_by('-start_date')
keynotes = emp_keynote_address.objects.all().order_by('-start_date')
events = emp_event_organized.objects.all().order_by('-start_date')
y=[]
for r in range(1995, (datetime.datetime.now().year + 1)):
y.append(r)
pers = get_object_or_404(faculty_about, user = request.user)
design = HoldsDesignation.objects.filter(working=request.user)
desig=[]
for i in design:
desig.append(str(i.designation))
context = {'user': user,
'desig':desig,
'pf':pf,
'journal':journal,
'conference': conference,
'books': books,
'projects': projects,
'form':form,
'consultancy':consultancy,
'patents':patents,
'techtransfers':techtransfers,
'mtechs':mtechs,
'phds':phds,
'fvisits':fvisits,
'ivisits': ivisits,
'consymps':consymps,
'awards':awards,
'talks':talks,
'chairs':chairs,
'keynotes':keynotes,
'events':events,
'year_range':y,
'pers':pers
}
return render(request, 'eisModulenew/rspc_profile.html', context)
# View for editing persnal Information
def persinfo(request):
if request.method == 'POST':
try:
print(request.user)
faculty = get_object_or_404(faculty_about, user = request.user)
contact = request.POST['contact']
contact = contact[6:]
faculty.contact = contact
print(contact)
faculty.about = request.POST['about']
faculty.interest = request.POST['interest']
faculty.education = request.POST['education']
faculty.linkedin = request.POST['linkedin']
faculty.github = request.POST['github']
faculty.save()
return JsonResponse({'x' : 'Your data is updated '})
except:
return JsonResponse({'x' : 'You are not authorized to update '})
# Views for deleting the EIS fields
def achievementDelete(request, pk):
instance = emp_achievement.objects.get(pk=pk)
instance.delete()
return redirect('/profile/?page14=1')
def emp_confrence_organisedDelete(request, pk):
instance = emp_confrence_organised.objects.get(pk=pk)
instance.delete()
return redirect('globals:profile')
def emp_consymDelete(request, pk, sr, mark):
instance = emp_confrence_organised.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = ""
if mark== '13':
url = '/profile/?page13='+str(page)
print(url)
instance.delete()
return redirect(url)
def emp_consultancy_projectsDelete(request, pk,sr,mark):
instance = emp_consultancy_projects.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = '/profile/?page5='+str(page)
instance.delete()
return redirect(url)
def emp_event_organizedDelete(request, pk, sr, mark):
instance = emp_event_organized.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = ""
if mark== '12':
url = '/profile/?page12='+str(page)
print(url)
instance.delete()
return redirect(url)
def emp_expert_lecturesDelete(request, pk):
instance = emp_expert_lectures.objects.get(pk=pk)
instance.delete()
return redirect('/profile/?page15=1')
def emp_keynote_addressDelete(request, pk):
instance = emp_keynote_address.objects.get(pk=pk)
instance.delete()
return redirect('eis:profile')
def emp_mtechphd_thesisDelete(request, pk, sr,mark):
instance = emp_mtechphd_thesis.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = ""
if mark == 1:
url = '/profile/?page8='+str(page)
else :
url = '/profile/?page9='+str(page)
instance.delete()
return redirect(url)
def emp_patentsDelete(request, pk,sr,mark):
instance = emp_patents.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = '/profile/?page6='+str(page)
instance.delete()
return redirect(url)
def emp_published_booksDelete(request, pk, sr, mark):
instance = emp_published_books.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = '/profile/?page2='+str(page)
print("-------------"+url)
instance.delete()
return redirect(url)
def emp_research_papersDelete(request, pk, sr,mark):
instance = emp_research_papers.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = ""
if mark== '1':
url = '/profile/?page='+str(page)
if mark== '2':
url = '/profile/?page3='+str(page)
print(url)
instance.delete()
return redirect(url)
def emp_research_projectsDelete(request, pk,sr,mark):
instance = emp_research_projects.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = '/profile/?page4='+str(page)
instance.delete()
return redirect(url)
def emp_session_chairDelete(request, pk):
instance = emp_session_chair.objects.get(pk=pk)
instance.delete()
return redirect('eis:profile')
def emp_techtransferDelete(request, pk,sr,mark):
instance = emp_techtransfer.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = '/profile/?page7='+str(page)
instance.delete()
return redirect(url)
def emp_visitsDelete(request, pk, sr, mark):
instance = emp_visits.objects.get(pk=pk)
page = int(sr)//10
page = page+1
url = ""
if mark== '10':
url = '/profile/?page10='+str(page)
if mark== '11':
url = '/profile/?page11='+str(page)
print(url)
instance.delete()
return redirect(url)
# Views for inserting fields in EIS
def pg_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('pg_id')==None or request.POST.get('pg_id')==""):
eis = emp_mtechphd_thesis()
else:
eis = get_object_or_404(emp_mtechphd_thesis, id=request.POST.get('pg_id'))
eis.pf_no = pf
eis.title = request.POST.get('title')
eis.s_year = request.POST.get('s_year')
eis.a_month = request.POST.get('month')
eis.supervisors = request.POST.get('sup')
eis.rollno = request.POST.get('roll')
eis.s_name = request.POST.get('name')
eis.save()
return redirect('/profile/?page8=1')
def phd_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('phd_id')==None or request.POST.get('phd_id')==""):
eis = emp_mtechphd_thesis()
else:
eis = get_object_or_404(emp_mtechphd_thesis, id=request.POST.get('phd_id'))
eis.pf_no = pf
eis.degree_type = 2
eis.title = request.POST.get('title')
eis.s_year = request.POST.get('s_year')
eis.a_month = request.POST.get('month')
eis.supervisors = request.POST.get('sup')
eis.rollno = request.POST.get('roll')
eis.s_name = request.POST.get('name')
eis.save()
return redirect('/profile/?page9=1')
def fvisit_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('fvisit_id')==None or request.POST.get('fvisit_id')==""):
eis = emp_visits()
else:
eis = get_object_or_404(emp_visits, id=request.POST.get('fvisit_id'))
eis.pf_no = pf
eis.v_type = 2
eis.country = request.POST.get('country').upper()
eis.place = request.POST.get('place')
eis.purpose = request.POST.get('purpose')
try:
eis.start_date = datetime.datetime.strptime(request.POST.get('start_date'), "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(request.POST.get('start_date'), "%b. %d, %Y")
try:
eis.end_date = datetime.datetime.strptime(request.POST.get('end_date'), "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(request.POST.get('end_date'), "%b. %d, %Y")
eis.save()
return redirect('/profile/?page10=1')
def ivisit_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('ivisit_id')==None or request.POST.get('ivisit_id')==""):
eis = emp_visits()
else:
eis = get_object_or_404(emp_visits, id=request.POST.get('ivisit_id'))
eis.pf_no = pf
eis.v_type = 1
eis.country = request.POST.get('country2')
eis.place = request.POST.get('place2')
eis.purpose = request.POST.get('purpose2')
print(".............",request.POST.get('start_date2'))
try:
eis.start_date = datetime.datetime.strptime(request.POST.get('start_date2'), "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(request.POST.get('start_date2'), "%b. %d, %Y")
try:
eis.end_date = datetime.datetime.strptime(request.POST.get('end_date2'), "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(request.POST.get('end_date2'), "%b. %d, %Y")
eis.save()
return redirect('/profile/?page11=1')
#Function to save journal of employee
def journal_insert(request):
print("intered")
user = get_object_or_404(ExtraInfo, user=request.user)
eis = emp_research_papers.objects.create(pf_no = user.id)
eis.rtype = 'Journal'
eis.authors = request.POST.get('authors')
eis.title_paper = request.POST.get('title')
try:
myfile = request.FILES['journal']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
print(uploaded_file_url)
eis.paper=uploaded_file_url
except:
eis.paper = None
eis.co_authors = request.POST.get('co_author')
eis.name = request.POST.get('name')
eis.doc_id = request.POST.get('doc_id')
eis.doc_description = request.POST.get('doc_description')
eis.status = request.POST.get('status')
eis.reference_number = request.POST.get('ref')
eis.is_sci = request.POST.get('sci')
volume_no = request.POST.get('volume')
page_no = request.POST.get('page')
year = request.POST.get('year')
if volume_no != '':
eis.volume_no=volume_no
if page_no != '':
eis.page_no=page_no
if year != '':
eis.year = year
if(request.POST.get('doi') != None and request.POST.get('doi') != '' and request.POST.get('doi') != 'None'):
try:
eis.doi = datetime.datetime.strptime(
request.POST.get('doi'), "%B %d, %Y")
except:
try:
eis.doi = datetime.datetime.strptime(
request.POST.get('doi'), "%b. %d, %Y")
except:
eis.doi = request.POST.get('doi')
if (request.POST.get('doa') != None and request.POST.get('doa') != '' and request.POST.get('doa') != 'None'):
try:
eis.date_acceptance = datetime.datetime.strptime(
request.POST.get('doa'), "%B %d, %Y")
except:
eis.date_acceptance = datetime.datetime.strptime(
request.POST.get('doa'), "%b. %d, %Y")
if (request.POST.get('dop') != None and request.POST.get('dop') != '' and request.POST.get('dop') != 'None'):
try:
eis.date_publication = datetime.datetime.strptime(
request.POST.get('dop'), "%B %d, %Y")
except:
eis.date_publication = datetime.datetime.strptime(
request.POST.get('dop'), "%b. %d, %Y")
if (request.POST.get('dos') != None and request.POST.get('dos') != '' and request.POST.get('dos') != 'None'):
try:
eis.date_submission = datetime.datetime.strptime(
request.POST.get('dos'), "%B %d, %Y")
except:
eis.date_submission = datetime.datetime.strptime(
request.POST.get('dos'), "%b. %d, %Y")
eis.save()
return redirect('/profile/?page=1')
def editjournal(request):
eis = emp_research_papers.objects.get(pk=request.POST.get('journalpk'))
eis.authors = request.POST.get('authors')
eis.title_paper = request.POST.get('title')
try:
myfile = request.FILES['journal']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
print(uploaded_file_url)
eis.paper=uploaded_file_url
except:
print("nothing,.........")
eis.co_authors = request.POST.get('co_author')
eis.name = request.POST.get('name')
eis.doc_id = request.POST.get('doc_id')
eis.doc_description = request.POST.get('doc_description')
eis.status = request.POST.get('status')
eis.reference_number = request.POST.get('ref')
eis.is_sci = request.POST.get('sci')
volume_no = request.POST.get('volume')
eis.page_no = request.POST.get('page')
eis.year = request.POST.get('year')
if(request.POST.get('doi') != None and request.POST.get('doi') != '' and request.POST.get('doi') != 'None'):
x = request.POST.get('doi')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.doi = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
try:
eis.doi = datetime.datetime.strptime(
x, "%b. %d, %Y")
except:
eis.doi = x
if (request.POST.get('doa') != None and request.POST.get('doa') != '' and request.POST.get('doa') != 'None'):
x = request.POST.get('doa')
if x[:-10] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_acceptance = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_acceptance = datetime.datetime.strptime(
x, "%b. %d, %Y")
if (request.POST.get('dop') != None and request.POST.get('dop') != '' and request.POST.get('dop') != 'None'):
x = request.POST.get('dop')
if x[:-10] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_publication = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_publication = datetime.datetime.strptime(
x, "%b. %d, %Y")
if (request.POST.get('dos') != None and request.POST.get('dos') != '' and request.POST.get('dos') != 'None'):
x = request.POST.get('dos')
print(x[-10:])
if x[-10:] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_submission = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_submission = datetime.datetime.strptime(
x, "%b. %d, %Y")
eis.save()
page = int(request.POST.get('index'))//10
page = page+1
url = "/profile/?page="+str(page)
print(url)
return redirect(url)
def editforeignvisit(request):
print("its coming here")
eis = emp_visits.objects.get(pk=request.POST.get('foreignvisitpk'))
eis.country = request.POST.get('country')
eis.place = request.POST.get('place')
eis.purpose = request.POST.get('purpose')
x = request.POST.get('start_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
print(x,"/////////////")
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('end_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.end_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(x, "%b. %d, %Y")
#eis.end_date = request.POST.get('end_date')
eis.save()
page = int(request.POST.get('index10'))//10
page = page+1
url = "/profile/?page10="+str(page)
print(url)
return redirect(url)
def editindianvisit(request):
print("its coming here")
eis = emp_visits.objects.get(pk=request.POST.get('indianvisitpk'))
eis.country = request.POST.get('country2')
eis.place = request.POST.get('place2')
eis.purpose = request.POST.get('purpose2')
x = request.POST.get('start_date2')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('end_date2')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.end_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(x, "%b. %d, %Y")
#eis.end_date = request.POST.get('end_date')
eis.save()
page = int(request.POST.get('index11'))//10
page = page+1
url = "/profile/?page11="+str(page)
print(url)
return redirect(url)
def conference_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
eis = emp_research_papers()
eis.pf_no = pf
eis.rtype = 'Conference'
eis.authors = request.POST.get('authors3')
eis.co_authors = request.POST.get('co_author3')
eis.title_paper = request.POST.get('title3')
try:
myfile = request.FILES['journal3']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
eis.paper=uploaded_file_url
except:
print("nothing")
eis.name = request.POST.get('name3')
eis.venue = request.POST.get('venue3')
if request.POST.get('page_no3') != '':
eis.page_no = request.POST.get('page_no3')
if request.POST.get('isbn_no3') != '':
eis.isbn_no = request.POST.get('isbn_no3')
if request.POST.get('year3') != '':
eis.year = request.POST.get('year3')
eis.status = request.POST.get('status3')
if(request.POST.get('doi3') != None and request.POST.get('doi3') != '' and request.POST.get('doi3') != 'None'):
x = request.POST.get('doi3')
if x[:-10] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.doi = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
try:
eis.doi = datetime.datetime.strptime(
x, "%b. %d, %Y")
except:
eis.doi = x
if (request.POST.get('doa3') != None and request.POST.get('doa3') != '' and request.POST.get('doa3') != 'None'):
x = request.POST.get('doa3')
if x[:-10] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_acceptance = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_acceptance = datetime.datetime.strptime(
x, "%b. %d, %Y")
if (request.POST.get('dop3') != None and request.POST.get('dop3') != '' and request.POST.get('dop3') != 'None'):
x = request.POST.get('dop3')
if x[:-10] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_publication = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_publication = datetime.datetime.strptime(
x, "%b. %d, %Y")
if (request.POST.get('dos3') != None and request.POST.get('dos3') != '' and request.POST.get('dos3') != 'None'):
x = request.POST.get('dos3')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_submission = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_submission = datetime.datetime.strptime(
x, "%b. %d, %Y")
eis.save()
return redirect('/profile/?page3=1')
def editconference(request):
eis = emp_research_papers.objects.get(pk=request.POST.get('conferencepk'))
eis.authors = request.POST.get('authors3')
eis.co_authors = request.POST.get('co_author3')
eis.title_paper = request.POST.get('title3')
try:
myfile = request.FILES['journal3']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
eis.paper=uploaded_file_url
except:
print("nothing")
eis.name = request.POST.get('name3')
eis.venue = request.POST.get('venue3')
isbn = request.POST.get('isbn_no3')
print(1)
eis.page_no = request.POST.get('page_no3')
print(2)
eis.year = request.POST.get('year3')
eis.status = request.POST.get('status3')
if(request.POST.get('doi3') != None and request.POST.get('doi3') != '' and request.POST.get('doi3') != 'None'):
x = request.POST.get('doi3')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.doi = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
try:
eis.doi = datetime.datetime.strptime(
x, "%b. %d, %Y")
except:
eis.doi = x
if (request.POST.get('doa3') != None and request.POST.get('doa3') != '' and request.POST.get('doa3') != 'None'):
x = request.POST.get('doa3')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_acceptance = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_acceptance = datetime.datetime.strptime(
x, "%b. %d, %Y")
if (request.POST.get('dop3') != None and request.POST.get('dop3') != '' and request.POST.get('dop3') != 'None'):
x = request.POST.get('dop3')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_publication = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_publication = datetime.datetime.strptime(
x, "%b. %d, %Y")
if (request.POST.get('dos3') != None and request.POST.get('dos3') != '' and request.POST.get('dos3') != 'None'):
x = request.POST.get('dos3')
if x[-10:] == ', midnight':
x = x[0:-10]
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.date_submission = datetime.datetime.strptime(
x, "%B %d, %Y")
except:
eis.date_submission = datetime.datetime.strptime(
x, "%b. %d, %Y")
eis.save()
page = int(request.POST.get('index3'))//10
page = page+1
url = "/profile/?page3="+str(page)
print(url)
return redirect(url)
def book_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
eis = emp_published_books()
eis.pf_no = pf
eis.p_type = request.POST.get('book_p_type')
eis.title = request.POST.get('book_title')
eis.publisher = request.POST.get('book_publisher')
eis.pyear = request.POST.get('book_year')
eis.authors = request.POST.get('book_author')
eis.save()
return redirect('/profile/?page2=1')
def editbooks(request):
print("++++++++++++++"+ request.POST.get('bookspk2'))
print("--------------"+ str(request))
eis = emp_published_books.objects.get(pk=request.POST.get('bookspk2'))
eis.p_type = request.POST.get('book_p_type')
eis.title = request.POST.get('book_title')
eis.publisher = request.POST.get('book_publisher')
eis.pyear = request.POST.get('book_')
eis.authors = request.POST.get('book_author')
eis.save()
page = int(request.POST.get('index15'))//10
page = page+1
url = "/profile/?page2="+str(page)
print(url)
return redirect(url)
def consym_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
eis = emp_confrence_organised()
eis.pf_no = pf
eis.name = request.POST.get('conference_name')
eis.venue = request.POST.get('conference_venue')
eis.role1 = request.POST.get('conference_role')
if(eis.role1 == 'Any Other'):
eis.role2 = request.POST.get('conference_organised')
if(eis.role1 == 'Organised'):
if(request.POST.get('conference_organised') == 'Any Other'):
eis.role2 = request.POST.get('myDIV1')
else:
eis.role2 = request.POST.get('conference_organised')
if (eis.role1 == "" or eis.role1==None):
eis.role1 = "Any Other"
eis.role2 = "Any Other"
x = request.POST.get('conference_start_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('conference_end_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.end_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(x, "%b. %d, %Y")
eis.save()
return redirect('/profile/?page13=1')
def editconsym(request):
eis = emp_confrence_organised.objects.get(pk=request.POST.get('conferencepk2'))
eis.name = request.POST.get('conference_name')
eis.venue = request.POST.get('conference_venue')
eis.role1 = request.POST.get('conference_role')
if(eis.role1 == 'Any Other'):
eis.role2 = request.POST.get('conference_organised')
if(eis.role1 == 'Organised'):
if(request.POST.get('conference_organised') == 'Any Other'):
eis.role2 = request.POST.get('myDIV1')
else:
eis.role2 = request.POST.get('conference_organised')
if (eis.role1 == "" or eis.role1==None):
eis.role1 = "Any Other"
eis.role2 = "Any Other"
x = request.POST.get('conference_start_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('conference_end_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.end_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(x, "%b. %d, %Y")
eis.save()
page = int(request.POST.get('index13'))//10
page = page+1
url = "/profile/?page13="+str(page)
print(url)
return redirect('/profile/?page13=1')
def event_insert(request):
print(request)
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('event_id')==None or request.POST.get('event_id')==""):
eis = emp_event_organized()
else:
eis = get_object_or_404(emp_event_organized, id=request.POST.get('event_id'))
eis.pf_no = pf
eis.type = request.POST.get('event_type')
if(eis.type == 'Any Other'):
if(request.POST.get('myDIV')!= None or request.POST.get('myDIV') != ""):
eis.type = request.POST.get('myDIV')
eis.sponsoring_agency = request.POST.get('sponsoring_agency')
eis.name = request.POST.get('event_name')
eis.venue = request.POST.get('event_venue')
eis.role = request.POST.get('event_role')
try:
eis.start_date = datetime.datetime.strptime(request.POST.get('event_start_date'), "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(request.POST.get('event_start_date'), "%b. %d, %Y")
try:
eis.end_date = datetime.datetime.strptime(request.POST.get('event_end_date'), "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(request.POST.get('event_end_date'), "%b. %d, %Y")
eis.save()
return redirect('/profile/?page12=1')
def editevent(request):
eis = emp_event_organized.objects.get(pk=request.POST.get('eventpk'))
eis.type = request.POST.get('event_type')
if(eis.type == 'Any Other'):
if(request.POST.get('myDIV')!= None or request.POST.get('myDIV') != ""):
eis.type = request.POST.get('myDIV')
eis.sponsoring_agency = request.POST.get('sponsoring_agency')
eis.name = request.POST.get('event_name')
eis.venue = request.POST.get('event_venue')
eis.role = request.POST.get('event_role')
x = request.POST.get('event_start_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('event_end_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.end_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(x, "%b. %d, %Y")
eis.save()
page = int(request.POST.get('index12'))//10
page = page+1
url = "/profile/?page12="+str(page)
return redirect(url)
def award_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('ach_id')==None or request.POST.get('ach_id')==""):
eis = emp_achievement()
else:
eis = get_object_or_404(emp_achievement, id=request.POST.get('ach_id'))
eis.pf_no = pf
eis.a_type = request.POST.get('type')
if(request.POST.get('a_day') != None and request.POST.get('a_day') != ""):
eis.a_day = request.POST.get('a_day')
if(request.POST.get('a_month') != None and request.POST.get('a_month') != ""):
eis.a_month = request.POST.get('a_month')
if(request.POST.get('a_year') != None and request.POST.get('a_year') != ""):
eis.a_year = request.POST.get('a_year')
eis.details = request.POST.get('details')
eis.save()
return redirect('/profile/?page14=1')
def talk_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('lec_id')==None or request.POST.get('lec_id')==""):
eis = emp_expert_lectures()
else:
eis = get_object_or_404(emp_expert_lectures, id=request.POST.get('lec_id'))
eis.pf_no = pf
eis.l_type = request.POST.get('type')
eis.place = request.POST.get('place')
eis.title = request.POST.get('title')
x = request.POST.get('l_date')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
try:
eis.l_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.l_date = datetime.datetime.strptime(x, "%b. %d, %Y")
eis.save()
return redirect('/profile/?page15=1')
def chaired_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('ses_id')==None or request.POST.get('ses_id')==""):
eis = emp_session_chair()
else:
eis = get_object_or_404(emp_session_chair, id=request.POST.get('ses_id'))
eis.pf_no = pf
eis.event = request.POST.get('event')
eis.name = request.POST.get('name')
eis.s_year = request.POST.get('s_year')
try:
eis.start_date = datetime.datetime.strptime(request.POST.get('start'), "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(request.POST.get('start'), "%b. %d, %Y")
try:
eis.end_date = datetime.datetime.strptime(request.POST.get('end'), "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(request.POST.get('end'), "%b. %d, %Y")
eis.save()
return redirect('eis:profile')
def keynote_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('keyid')==None or request.POST.get('keyid')==""):
eis = emp_keynote_address()
else:
eis = get_object_or_404(emp_keynote_address, id=request.POST.get('keyid'))
eis.pf_no = pf
eis.type = request.POST.get('type')
eis.name = request.POST.get('name')
eis.title = request.POST.get('title')
eis.venue = request.POST.get('venue')
eis.page_no = request.POST.get('page_no')
eis.isbn_no = request.POST.get('isbn_no')
eis.k_year = request.POST.get('k_year')
try:
eis.start_date = datetime.datetime.strptime(request.POST.get('start'), "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(request.POST.get('start'), "%b. %d, %Y")
eis.save()
return redirect('eis:profile')
def project_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('project_id')==None or request.POST.get('project_id')==""):
eis = emp_research_projects()
else:
eis = get_object_or_404(emp_research_projects, id=request.POST.get('project_id'))
eis.pf_no = pf
eis.pi = request.POST.get('pi')
eis.co_pi = request.POST.get('co_pi')
eis.title = request.POST.get('title')
eis.financial_outlay = request.POST.get('financial_outlay')
eis.funding_agency = request.POST.get('funding_agency')
eis.status = request.POST.get('status')
x = request.POST.get('start')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
if (request.POST.get('start') != None and request.POST.get('start') != '' and request.POST.get('start') != 'None'):
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('end')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
if (request.POST.get('end') != None and request.POST.get('end') != '' and request.POST.get('end') != 'None'):
try:
eis.finish_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.finish_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('sub')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
if (request.POST.get('sub') != None and request.POST.get('sub') != '' and request.POST.get('sub') != 'None'):
try:
eis.date_submission = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.date_submission = datetime.datetime.strptime(x, "%b. %d, %Y")
eis.save()
return redirect('/profile/?page4=1')
def consult_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('consultancy_id')==None or request.POST.get('consultancy_id')==""):
eis = emp_consultancy_projects()
else:
eis = get_object_or_404(emp_consultancy_projects, id=request.POST.get('consultancy_id'))
eis.pf_no = pf
eis.consultants = request.POST.get('consultants')
eis.client = request.POST.get('client')
eis.title = request.POST.get('title')
eis.financial_outlay = request.POST.get('financial_outlay')
x = request.POST.get('start')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
if (request.POST.get('start') != None and request.POST.get('start') != '' and request.POST.get('start') != 'None'):
try:
eis.start_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.start_date = datetime.datetime.strptime(x, "%b. %d, %Y")
x = request.POST.get('end')
if x[:5] == "Sept." :
x = "Sep." + x[5:]
if (request.POST.get('end') != None and request.POST.get('end') != '' and request.POST.get('end') != 'None'):
try:
eis.end_date = datetime.datetime.strptime(x, "%B %d, %Y")
except:
eis.end_date = datetime.datetime.strptime(x, "%b. %d, %Y")
eis.save()
return redirect('/profile/?page5=1')
def patent_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('patent_id')==None or request.POST.get('patent_id')==""):
eis = emp_patents()
else:
eis = get_object_or_404(emp_patents, id=request.POST.get('patent_id'))
eis.pf_no = pf
eis.p_no = request.POST.get('p_no')
eis.earnings = request.POST.get('earnings')
eis.title = request.POST.get('title')
eis.p_year = request.POST.get('year')
eis.status = request.POST.get('status')
eis.a_month = request.POST.get('month')
eis.save()
return redirect('/profile/?page6=1')
def transfer_insert(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
if (request.POST.get('tech_id')==None or request.POST.get('tech_id')==""):
eis = emp_techtransfer()
else:
eis = get_object_or_404(emp_techtransfer, id=request.POST.get('tech_id'))
eis.pf_no = pf
eis.details = request.POST.get('details')
eis.save()
return redirect('/profile/?page7=1')
def achievements(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_achievement()
e.pf_no = row['pf_no']
e.details = row['details']
if (row['a_type'] == '1'):
e.a_type = 'Award'
elif (row['a_type'] == '2'):
e.a_type = 'Honour'
elif (row['a_type'] == '3'):
e.a_type = 'Prize'
elif (row['a_type'] == '4'):
e.a_type = 'Other'
if (row['a_day'] != '0' and row['a_day'] != None and row['a_day'] != ''):
e.a_day = int(row['a_day'])
if (row['a_month'] != '0' and row['a_month'] != None and row['a_month'] != ''):
e.a_month = int(row['a_month'])
if (row['a_year'] != '0' and row['a_year'] != None and row['a_year'] != ''):
e.a_year = int(row['a_year'])
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def confrence(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_confrence_organised()
e.pf_no = row['pf_no']
e.venue = row['venue']
e.name = row['name']
e.k_year = int(row['k_year'])
e.role1 = row['role1']
e.role2 = row['role2']
try:
if (row['start_date'] == ' ' or row['start_date'] == ''):
a=1
elif (row['start_date'] != '0000-00-00 00:00:00' and row['start_date'] != '0000-00-00'):
e.start_date = row['start_date']
e.start_date = e.start_date[:10]
if (row['start_date'] != '0000-00-00'):
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['end_date'] == ' ' or row['end_date'] == ''):
a = 1
elif (row['end_date']!='0000-00-00 00:00:00' and row['end_date'] != '0000-00-00'):
e.end_date = row['end_date']
e.end_date = e.end_date[:10]
if (row['end_date'] != '0000-00-00'):
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def consultancy(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_consultancy_projects()
e.pf_no = row['pf_no']
e.consultants = row['consultants']
e.title = row['title']
e.client = row['client']
e.financial_outlay = row['financial_outlay']
e.duration = row['duration']
try:
if (row['start_date'] == ' ' or row['start_date'] == ''):
a=1
elif (row['start_date'] != '0000-00-00 00:00:00' and row['start_date'] != '0000-00-00'):
e.start_date = row['start_date']
e.start_date = e.start_date[:10]
if (row['start_date'] != '0000-00-00'):
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['end_date'] == ' ' or row['end_date'] == ''):
a = 1
elif (row['end_date']!='0000-00-00 00:00:00' and row['end_date'] != '0000-00-00'):
e.end_date = row['end_date']
e.end_date = e.end_date[:10]
if (row['end_date'] != '0000-00-00'):
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def event(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_event_organized()
e.pf_no = row['pf_no']
e.type = row['type']
e.name = row['name']
e.sponsoring_agency = row['sponsoring_agency']
e.venue = row['venue']
e.role = row['role']
try:
if (row['start_date'] != '0000-00-00 00:00:00' and row['start_date'] != '0000-00-00'):
e.start_date = row['start_date']
e.start_date = e.start_date[:10]
if (row['start_date'] != '0000-00-00'):
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['end_date']!='0000-00-00 00:00:00' and row['end_date'] != '0000-00-00'):
e.end_date = row['end_date']
e.end_date = e.end_date[:10]
if (row['end_date'] != '0000-00-00'):
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def lectures(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_expert_lectures()
e.pf_no = row['pf_no']
e.l_type = row['l_type']
e.title = row['title']
e.place = row['place']
e.l_year = row['l_year']
try:
if (row['l_date'] != '0000-00-00 00:00:00' and row['l_date'] != '0000-00-00'):
e.l_date = row['l_date']
e.l_date = e.l_date[:10]
if (row['l_date'] != '0000-00-00'):
e.l_date = datetime.datetime.strptime(e.l_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def keynote(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_keynote_address()
e.pf_no = row['pf_no']
e.type = row['type']
e.title = row['title']
e.name = row['name']
e.venue = row['venue']
e.page_no = row['page_no']
e.isbn_no = row['isbn_no']
e.k_year = int(row['k_year'])
try:
if (row['start_date'] != '0000-00-00 00:00:00' and row['start_date'] != '0000-00-00'):
e.start_date = row['start_date']
e.start_date = e.start_date[:10]
if (row['start_date'] != '0000-00-00'):
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['end_date']!='0000-00-00 00:00:00' and row['end_date'] != '0000-00-00'):
e.end_date = row['end_date']
e.end_date = e.end_date[:10]
if (row['end_date'] != '0000-00-00'):
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def thesis(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_mtechphd_thesis()
e.pf_no = row['pf_no']
e.degree_type = int(row['degree_type'])
e.title = row['title']
e.supervisors = row['supervisors']
e.co_supervisors = row['co_supervisors']
e.rollno = row['rollno']
e.s_name = row['s_name']
if(row['s_year'] != '0'):
e.s_year = int(row['s_year'])
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def patents(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_patents()
e.pf_no = row['pf_no']
e.p_no = row['p_no']
e.title = row['title']
e.earnings = row['earnings']
e.p_year = int(row['p_year'])
if(row['status'] == '1'):
e.status = 'Filed'
elif(row['status'] == '2'):
e.status = 'Granted'
elif(row['status'] == '3'):
e.status = 'Published'
elif(row['status'] == '4'):
e.status = 'Owned'
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def published_books(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_published_books()
e.pf_no = row['pf_no']
e.title = row['title']
e.publisher = row['publisher']
e.co_authors = row['co_authors']
e.pyear = int(row['pyear'])
if(row['p_type'] == '1'):
e.p_type = 'Book'
elif(row['p_type'] == '2'):
e.p_type = 'Monograph'
elif(row['p_type'] == '3'):
e.p_type = 'Book Chapter'
elif(row['p_type'] == '4'):
e.p_type = 'Handbook'
elif(row['p_type'] == '5'):
e.p_type = 'Technical Report'
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def papers(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
c=1
for row in reader:
e = emp_research_papers()
e.pf_no = row['pf_no']
e.authors = row['authors']
e.rtype = row['rtype']
e.title_paper = row['title_paper']
e.name_journal = row['name_journal']
e.volume_no = row['volume_no']
e.venue = row['venue']
e.page_no = row['page_no']
e.issn_no = row['issn_no']
e.doi = row['doi']
e.doc_id = row['doc_id']
e.doc_description = row['doc_description']
e.reference_number = row['reference_number']
e.year = int(row['year'])
if(row['is_sci'] == 'Yes' or row['is_sci'] == 'No'):
e.is_sci = row['is_sci']
if (row['status'] == 'Published' or row['status'] == 'Accepted' or row['status'] == 'Communicated'):
e.status = row['status']
try:
if(str(row['date_submission']) == ' ' or str(row['date_submission']) == '' or row['date_submission'] == None):
a=1
else:
if (row['date_submission'] != '0000-00-00 00:00:00' and row['date_submission'] != '0000-00-00'):
e.date_submission = row['date_submission']
e.date_submission = datetime.datetime.strptime(e.date_submission, "%Y-%m-%d").date()
except:
a=1
try:
if (row['start_date'] != '0000-00-00 00:00:00' and row['start_date'] != '0000-00-00'):
e.start_date = row['start_date']
e.start_date = e.start_date[:10]
if (row['start_date'] != '0000-00-00'):
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['end_date']!='0000-00-00 00:00:00' and row['end_date'] != '0000-00-00'):
e.end_date = row['end_date']
e.end_date = e.end_date[:10]
if (row['end_date'] != '0000-00-00'):
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
e.save()
try:
if (row['date_acceptance'] == ' ' or row['date_acceptance'] == ''):
a = 1
else:
if (row['date_acceptance'] != '0000-00-00 00:00:00'):
e.date_acceptance = row['date_acceptance']
e.date_acceptance = e.date_acceptance[:10]
e.date_acceptance = datetime.datetime.strptime(e.date_acceptance, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_publication'] == ' ' or row['date_publication'] == ''):
a = 1
else:
if(row['date_publication']!='0000-00-00 00:00:00'):
e.date_publication = row['date_publication']
e.date_publication = e.end_date[:10]
e.date_publication = row['date_publication']
e.date_publication = datetime.datetime.strptime(e.date_publication, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
a = e.start_date
b = e.end_date
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def projects(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
for row in reader:
e = emp_research_projects()
e.pf_no = row['pf_no']
e.pi = row['pi']
e.co_pi = row['co_pi']
e.title = row['title']
e.funding_agency = row['funding_agency']
e.financial_outlay = row['financial_outlay']
e.status = row['status']
try:
if(str(row['date_submission']) == ' ' or str(row['date_submission']) == '' or row['date_submission'] == None):
a=1
else:
if (row['date_submission'] != '0000-00-00 00:00:00'):
e.date_submission = row['date_submission']
e.date_submission = datetime.datetime.strptime(e.date_submission, "%Y-%m-%d").date()
except:
a=1
try:
if (row['start_date'] != '0000-00-00 00:00:00'):
e.start_date = row['start_date']
e.start_date = e.start_date[:10]
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if(row['finish_date']!='0000-00-00 00:00:00'):
e.finish_date = row['finish_date']
e.finish_date = e.finish_date[:10]
e.finish_date = row['finish_date']
e.finish_date = datetime.datetime.strptime(e.finish_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def visits(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
for row in reader:
e = emp_visits()
e.pf_no = row['pf_no']
e.v_type = row['v_type']
e.country = row['country']
e.place = row['place']
e.purpose = row['purpose']
try:
if(str(row['v_date']) == ' ' or str(row['v_date']) == '' or row['v_date'] == None):
a=1
else:
e.v_date = row['v_date']
e.v_date = datetime.datetime.strptime(e.v_date, "%Y-%m-%d").date()
except:
a=1
try:
e.start_date = row['start_date']
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if(row['end_date']!='0000-00-00 00:00:00'):
e.end_date = row['end_date']
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
e.entry_date = row['date_entry']
e.entry_date=e.entry_date[:10]
e.entry_date = datetime.datetime.strptime(e.entry_date, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def upload_file(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
file = request.FILES['fileUpload']
decoded_file = file.read().decode('utf-8').splitlines()
reader = csv.DictReader(decoded_file)
for row in reader:
e = emp_session_chair()
e.pf_no = row['pf_no']
e.name = row['name']
e.event = row['event']
e.place = row['s_year']
e.s_year=int(row['s_year'])
try:
if (row['start_date'] != '0000-00-00 00:00:00'):
e.start_date = row['start_date']
e.start_date = datetime.datetime.strptime(e.start_date, "%Y-%m-%d").date()
except:
a=1
try:
if(row['end_date']!='0000-00-00 00:00:00'):
e.end_date = row['end_date']
e.end_date = datetime.datetime.strptime(e.end_date, "%Y-%m-%d").date()
except:
a=1
try:
if (row['date_entry'] == ' ' or row['date_entry'] == ''):
a = 1
else:
if (row['start_date'] != '0000-00-00 00:00:00'):
e.date_entry = row['date_entry']
e.date_entry=e.date_entry[:10]
e.date_entry = datetime.datetime.strptime(e.date_entry, "%Y-%m-%d").date()
except:
a=1
e.save()
return HttpResponseRedirect('DONE')
else:
form = UploadFileForm()
return render(request, 'eisModulenew/upload.html', {'form': form})
def render_to_pdf(template_src, context_dict):
template = get_template(template_src)
html = template.render(context_dict)
result = BytesIO()
pdf = pisa.pisaDocument(BytesIO(html.encode("UTF-8")), result)
if not pdf.err:
return HttpResponse(result.getvalue(), content_type='application/pdf')
return HttpResponse('We had some errors<pre>%s</pre>' % escape(html))
def generate_report(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
start = request.POST.get('syear')
star_date = start+'-01-01'
end = request.POST.get('lyear')
star = request.POST.get('smonth')
star_date = start + '-01-01'
en = request.POST.get('lmonth')
if(request.POST.get('journal_select')=="journal"):
journal = emp_research_papers.objects.filter(pf_no=pf, rtype='Journal').filter(year__range=[start,end]).order_by('-date_entry')
journal_req="1"
else:
journal=""
journal_req="0"
if (request.POST.get('conference_select') == "conference"):
conference = emp_research_papers.objects.filter(pf_no=pf, rtype='Conference').filter(year__range=[start,end]).order_by('-date_entry')
conference_req = "1"
else:
conference=""
conference_req = "0"
if (request.POST.get('books_select') == "books"):
books = emp_published_books.objects.order_by('-date_entry')
books_req = "1"
else:
books=""
books_req = "0"
if (request.POST.get('projects_select') == "projects"):
projects = emp_research_projects.objects.filter(pf_no=pf).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-start_date')
projects_req = "1"
else:
projects = ""
projects_req = "0"
if (request.POST.get('consultancy_select') == "consultancy"):
consultancy = emp_consultancy_projects.objects.filter(pf_no=pf).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
consultancy_req = "1"
else:
consultancy = ""
consultancy_req = "0"
if (request.POST.get('patents_select') == "patents"):
patents = emp_patents.objects.filter(pf_no=pf).filter(p_year__range=[start,end]).filter(a_month__range=[star,en]).order_by('-date_entry')
patents_req = "1"
else:
patents = ""
patents_req = "0"
if (request.POST.get('techtransfers_select') == "techtransfers"):
techtransfers = emp_techtransfer.objects.filter(pf_no=pf).filter(date_entry__year__range=[start,end]).filter(date_entry__month__range=[star,en]).order_by('-date_entry')
techtransfers_req = "1"
else:
techtransfers=""
techtransfers_req = "0"
if (request.POST.get('mtechs_select') == "mtechs"):
mtechs = emp_mtechphd_thesis.objects.filter(pf_no=pf, degree_type=1).filter(s_year__range=[start,end]).filter(a_month__range=[star,en]).order_by('-date_entry')
mtechs_req = "1"
else:
mtechs=""
mtechs_req = "0"
if (request.POST.get('phds_select') == "phds"):
phds = emp_mtechphd_thesis.objects.filter(pf_no=pf, degree_type=2).filter(s_year__range=[start,end]).filter(a_month__range=[star,en]).order_by('-date_entry')
phds_req = "1"
else:
phds=""
phds_req = "0"
if (request.POST.get('fvisits_select') == "fvisits"):
fvisits = emp_visits.objects.filter(pf_no=pf, v_type=2).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-entry_date')
fvisits_req = "1"
else:
fvisits=""
fvisits_req = "0"
countries = {
'AF': 'Afghanistan',
'AX': 'Aland Islands',
'AL': 'Albania',
'DZ': 'Algeria',
'AS': 'American Samoa',
'AD': 'Andorra',
'AO': 'Angola',
'AI': 'Anguilla',
'AQ': 'Antarctica',
'AG': 'Antigua And Barbuda',
'AR': 'Argentina',
'AM': 'Armenia',
'AW': 'Aruba',
'AU': 'Australia',
'AT': 'Austria',
'AZ': 'Azerbaijan',
'BS': 'Bahamas',
'BH': 'Bahrain',
'BD': 'Bangladesh',
'BB': 'Barbados',
'BY': 'Belarus',
'BE': 'Belgium',
'BZ': 'Belize',
'BJ': 'Benin',
'BM': 'Bermuda',
'BT': 'Bhutan',
'BO': 'Bolivia',
'BA': 'Bosnia And Herzegovina',
'BW': 'Botswana',
'BV': 'Bouvet Island',
'BR': 'Brazil',
'IO': 'British Indian Ocean Territory',
'BN': 'Brunei Darussalam',
'BG': 'Bulgaria',
'BF': 'Burkina Faso',
'BI': 'Burundi',
'KH': 'Cambodia',
'CM': 'Cameroon',
'CA': 'Canada',
'CV': 'Cape Verde',
'KY': 'Cayman Islands',
'CF': 'Central African Republic',
'TD': 'Chad',
'CL': 'Chile',
'CN': 'China',
'CX': 'Christmas Island',
'CC': 'Cocos (Keeling) Islands',
'CO': 'Colombia',
'KM': 'Comoros',
'CG': 'Congo',
'CD': 'Congo, Democratic Republic',
'CK': 'Cook Islands',
'CR': 'Costa Rica',
'CI': 'Cote D\'Ivoire',
'HR': 'Croatia',
'CU': 'Cuba',
'CY': 'Cyprus',
'CZ': 'Czech Republic',
'DK': 'Denmark',
'DJ': 'Djibouti',
'DM': 'Dominica',
'DO': 'Dominican Republic',
'EC': 'Ecuador',
'EG': 'Egypt',
'SV': 'El Salvador',
'GQ': 'Equatorial Guinea',
'ER': 'Eritrea',
'EE': 'Estonia',
'ET': 'Ethiopia',
'FK': 'Falkland Islands (Malvinas)',
'FO': 'Faroe Islands',
'FJ': 'Fiji',
'FI': 'Finland',
'FR': 'France',
'GF': 'French Guiana',
'PF': 'French Polynesia',
'TF': 'French Southern Territories',
'GA': 'Gabon',
'GM': 'Gambia',
'GE': 'Georgia',
'DE': 'Germany',
'GH': 'Ghana',
'GI': 'Gibraltar',
'GR': 'Greece',
'GL': 'Greenland',
'GD': 'Grenada',
'GP': 'Guadeloupe',
'GU': 'Guam',
'GT': 'Guatemala',
'GG': 'Guernsey',
'GN': 'Guinea',
'GW': 'Guinea-Bissau',
'GY': 'Guyana',
'HT': 'Haiti',
'HM': 'Heard Island & Mcdonald Islands',
'VA': 'Holy See (Vatican City State)',
'HN': 'Honduras',
'HK': 'Hong Kong',
'HU': 'Hungary',
'IS': 'Iceland',
'IN': 'India',
'ID': 'Indonesia',
'IR': 'Iran, Islamic Republic Of',
'IQ': 'Iraq',
'IE': 'Ireland',
'IM': 'Isle Of Man',
'IL': 'Israel',
'IT': 'Italy',
'JM': 'Jamaica',
'JP': 'Japan',
'JE': 'Jersey',
'JO': 'Jordan',
'KZ': 'Kazakhstan',
'KE': 'Kenya',
'KI': 'Kiribati',
'KR': 'Korea',
'KW': 'Kuwait',
'KG': 'Kyrgyzstan',
'LA': 'Lao People\'s Democratic Republic',
'LV': 'Latvia',
'LB': 'Lebanon',
'LS': 'Lesotho',
'LR': 'Liberia',
'LY': 'Libyan Arab Jamahiriya',
'LI': 'Liechtenstein',
'LT': 'Lithuania',
'LU': 'Luxembourg',
'MO': 'Macao',
'MK': 'Macedonia',
'MG': 'Madagascar',
'MW': 'Malawi',
'MY': 'Malaysia',
'MV': 'Maldives',
'ML': 'Mali',
'MT': 'Malta',
'MH': 'Marshall Islands',
'MQ': 'Martinique',
'MR': 'Mauritania',
'MU': 'Mauritius',
'YT': 'Mayotte',
'MX': 'Mexico',
'FM': 'Micronesia, Federated States Of',
'MD': 'Moldova',
'MC': 'Monaco',
'MN': 'Mongolia',
'ME': 'Montenegro',
'MS': 'Montserrat',
'MA': 'Morocco',
'MZ': 'Mozambique',
'MM': 'Myanmar',
'NA': 'Namibia',
'NR': 'Nauru',
'NP': 'Nepal',
'NL': 'Netherlands',
'AN': 'Netherlands Antilles',
'NC': 'New Caledonia',
'NZ': 'New Zealand',
'NI': 'Nicaragua',
'NE': 'Niger',
'NG': 'Nigeria',
'NU': 'Niue',
'NF': 'Norfolk Island',
'MP': 'Northern Mariana Islands',
'NO': 'Norway',
'OM': 'Oman',
'PK': 'Pakistan',
'PW': 'Palau',
'PS': 'Palestinian Territory, Occupied',
'PA': 'Panama',
'PG': 'Papua New Guinea',
'PY': 'Paraguay',
'PE': 'Peru',
'PH': 'Philippines',
'PN': 'Pitcairn',
'PL': 'Poland',
'PT': 'Portugal',
'PR': 'Puerto Rico',
'QA': 'Qatar',
'RE': 'Reunion',
'RO': 'Romania',
'RU': 'Russian Federation',
'RW': 'Rwanda',
'BL': 'Saint Barthelemy',
'SH': 'Saint Helena',
'KN': 'Saint Kitts And Nevis',
'LC': 'Saint Lucia',
'MF': 'Saint Martin',
'PM': 'Saint Pierre And Miquelon',
'VC': 'Saint Vincent And Grenadines',
'WS': 'Samoa',
'SM': 'San Marino',
'ST': 'Sao Tome And Principe',
'SA': 'Saudi Arabia',
'SN': 'Senegal',
'RS': 'Serbia',
'SC': 'Seychelles',
'SL': 'Sierra Leone',
'SG': 'Singapore',
'SK': 'Slovakia',
'SI': 'Slovenia',
'SB': 'Solomon Islands',
'SO': 'Somalia',
'ZA': 'South Africa',
'GS': 'South Georgia And Sandwich Isl.',
'ES': 'Spain',
'LK': 'Sri Lanka',
'SD': 'Sudan',
'SR': 'Suriname',
'SJ': 'Svalbard And Jan Mayen',
'SZ': 'Swaziland',
'SE': 'Sweden',
'CH': 'Switzerland',
'SY': 'Syrian Arab Republic',
'TW': 'Taiwan',
'TJ': 'Tajikistan',
'TZ': 'Tanzania',
'TH': 'Thailand',
'TL': 'Timor-Leste',
'TG': 'Togo',
'TK': 'Tokelau',
'TO': 'Tonga',
'TT': 'Trinidad And Tobago',
'TN': 'Tunisia',
'TR': 'Turkey',
'TM': 'Turkmenistan',
'TC': 'Turks And Caicos Islands',
'TV': 'Tuvalu',
'UG': 'Uganda',
'UA': 'Ukraine',
'AE': 'United Arab Emirates',
'GB': 'United Kingdom',
'US': 'United States',
'UM': 'United States Outlying Islands',
'UY': 'Uruguay',
'UZ': 'Uzbekistan',
'VU': 'Vanuatu',
'VE': 'Venezuela',
'VN': 'Viet Nam',
'VG': 'Virgin Islands, British',
'VI': 'Virgin Islands, U.S.',
'WF': 'Wallis And Futuna',
'EH': 'Western Sahara',
'YE': 'Yemen',
'ZM': 'Zambia',
'ZW': 'Zimbabwe',
'KP': 'Korea (Democratic Peoples Republic of)',
}
if (request.POST.get('ivisits_select') == "ivisits"):
ivisits = emp_visits.objects.filter(pf_no=pf, v_type=1).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-entry_date')
ivisits_req = "1"
else:
ivisits=""
ivisits_req = "0"
for fvisit in fvisits:
fvisit.countryfull = countries[fvisit.country]
if (request.POST.get('consymps_select') == "consymps"):
consymps = emp_confrence_organised.objects.filter(pf_no=pf).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
consymps_req = "1"
else:
consymps=""
consymps_req = "0"
if (request.POST.get('awards_select') == "awards"):
awards = emp_achievement.objects.filter(pf_no=pf).filter(a_year__range=[start,end]).order_by('-date_entry')
awards_req = "1"
else:
awards=""
awards_req = "0"
if (request.POST.get('talks_select') == "talks"):
talks = emp_expert_lectures.objects.filter(pf_no=pf).filter(l_date__year__range=[start,end]).filter(l_date__month__range=[star,en]).order_by('-date_entry')
talks_req = "1"
else:
talks=""
talks_req = "0"
if (request.POST.get('chairs_select') == "chairs"):
chairs = emp_session_chair.objects.filter(pf_no=pf).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
chairs_req = "1"
else:
chairs=""
chairs_req = "0"
if (request.POST.get('keynotes_select') == "keynotes"):
keynotes = emp_keynote_address.objects.filter(pf_no=pf).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
keynotes_req = "1"
else:
keynotes=""
keynotes_req = "0"
if (request.POST.get('events_select') == "events"):
events = emp_event_organized.objects.filter(pf_no=pf).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-start_date')
events_req = "1"
else:
events=""
events_req = "0"
pers = get_object_or_404(faculty_about, user = request.user)
design = HoldsDesignation.objects.filter(working=request.user)
desig=[]
for i in design:
desig.append(str(i.designation))
context = {'user': user,
'desig':desig,
'pf':pf,
'journal':journal,
'journal_req':journal_req,
'conference': conference,
'conference_req': conference_req,
'books': books,
'books_req': books_req,
'projects': projects,
'projects_req': projects_req,
'consultancy':consultancy,
'consultancy_req': consultancy_req,
'patents':patents,
'patents_req': patents_req,
'techtransfers':techtransfers,
'techtransfers_req': techtransfers_req,
'mtechs':mtechs,
'mtechs_req': mtechs_req,
'phds':phds,
'phds_req': phds_req,
'fvisits':fvisits,
'fvisits_req': fvisits_req,
'ivisits': ivisits,
'ivisits_req': ivisits_req,
'consymps':consymps,
'consymps_req': consymps_req,
'awards':awards,
'awards_req': awards_req,
'talks':talks,
'talks_req': talks_req,
'chairs':chairs,
'chairs_req': chairs_req,
'keynotes':keynotes,
'keynotes_req': keynotes_req,
'events':events,
'events_req': events_req,
'first_name':request.user.first_name,
'last_name': request.user.last_name,
}
return render_to_pdf('eisModulenew/generatereportshow.html', context)
# report for dean rspc
def rspc_generate_report(request):
user = get_object_or_404(ExtraInfo, user=request.user)
pf = user.id
start = request.POST.get('syear')
star_date = start+'-01-01'
end = request.POST.get('lyear')
star = request.POST.get('smonth')
star_date = start + '-01-01'
en = request.POST.get('lmonth')
if(request.POST.get('journal_select')=="journal"):
journal = emp_research_papers.objects.filter(rtype='Journal').order_by('-date_entry')
journal_req="1"
else:
journal=""
journal_req="0"
if (request.POST.get('conference_select') == "conference"):
conference = emp_research_papers.objects.filter(rtype='Conference').order_by('-date_entry')
conference_req = "1"
else:
conference=""
conference_req = "0"
if (request.POST.get('books_select') == "books"):
books = emp_published_books.objects.all().order_by('-date_entry')
books_req = "1"
else:
books=""
books_req = "0"
if (request.POST.get('projects_select') == "projects"):
projects = emp_research_projects.objects.all().filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-start_date')
projects_req = "1"
else:
projects = ""
projects_req = "0"
if (request.POST.get('consultancy_select') == "consultancy"):
consultancy = emp_consultancy_projects.objects.all().filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
consultancy_req = "1"
else:
consultancy = ""
consultancy_req = "0"
if (request.POST.get('patents_select') == "patents"):
patents = emp_patents.objects.all().filter(p_year__range=[start,end]).filter(a_month__range=[star,en]).order_by('-date_entry')
patents_req = "1"
else:
patents = ""
patents_req = "0"
if (request.POST.get('techtransfers_select') == "techtransfers"):
techtransfers = emp_techtransfer.objects.all().filter(date_entry__year__range=[start,end]).filter(date_entry__month__range=[star,en]).order_by('-date_entry')
techtransfers_req = "1"
else:
techtransfers=""
techtransfers_req = "0"
if (request.POST.get('mtechs_select') == "mtechs"):
mtechs = emp_mtechphd_thesis.objects.filter(degree_type=1).filter(s_year__range=[start,end]).filter(a_month__range=[star,en]).order_by('-date_entry')
mtechs_req = "1"
else:
mtechs=""
mtechs_req = "0"
if (request.POST.get('phds_select') == "phds"):
phds = emp_mtechphd_thesis.objects.filter(degree_type=2).filter(s_year__range=[start,end]).filter(a_month__range=[star,en]).order_by('-date_entry')
phds_req = "1"
else:
phds=""
phds_req = "0"
if (request.POST.get('fvisits_select') == "fvisits"):
fvisits = emp_visits.objects.filter(v_type=2).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-entry_date')
fvisits_req = "1"
else:
fvisits=""
fvisits_req = "0"
countries = {
'AF': 'Afghanistan',
'AX': 'Aland Islands',
'AL': 'Albania',
'DZ': 'Algeria',
'AS': 'American Samoa',
'AD': 'Andorra',
'AO': 'Angola',
'AI': 'Anguilla',
'AQ': 'Antarctica',
'AG': 'Antigua And Barbuda',
'AR': 'Argentina',
'AM': 'Armenia',
'AW': 'Aruba',
'AU': 'Australia',
'AT': 'Austria',
'AZ': 'Azerbaijan',
'BS': 'Bahamas',
'BH': 'Bahrain',
'BD': 'Bangladesh',
'BB': 'Barbados',
'BY': 'Belarus',
'BE': 'Belgium',
'BZ': 'Belize',
'BJ': 'Benin',
'BM': 'Bermuda',
'BT': 'Bhutan',
'BO': 'Bolivia',
'BA': 'Bosnia And Herzegovina',
'BW': 'Botswana',
'BV': 'Bouvet Island',
'BR': 'Brazil',
'IO': 'British Indian Ocean Territory',
'BN': 'Brunei Darussalam',
'BG': 'Bulgaria',
'BF': 'Burkina Faso',
'BI': 'Burundi',
'KH': 'Cambodia',
'CM': 'Cameroon',
'CA': 'Canada',
'CV': 'Cape Verde',
'KY': 'Cayman Islands',
'CF': 'Central African Republic',
'TD': 'Chad',
'CL': 'Chile',
'CN': 'China',
'CX': 'Christmas Island',
'CC': 'Cocos (Keeling) Islands',
'CO': 'Colombia',
'KM': 'Comoros',
'CG': 'Congo',
'CD': 'Congo, Democratic Republic',
'CK': 'Cook Islands',
'CR': 'Costa Rica',
'CI': 'Cote D\'Ivoire',
'HR': 'Croatia',
'CU': 'Cuba',
'CY': 'Cyprus',
'CZ': 'Czech Republic',
'DK': 'Denmark',
'DJ': 'Djibouti',
'DM': 'Dominica',
'DO': 'Dominican Republic',
'EC': 'Ecuador',
'EG': 'Egypt',
'SV': 'El Salvador',
'GQ': 'Equatorial Guinea',
'ER': 'Eritrea',
'EE': 'Estonia',
'ET': 'Ethiopia',
'FK': 'Falkland Islands (Malvinas)',
'FO': 'Faroe Islands',
'FJ': 'Fiji',
'FI': 'Finland',
'FR': 'France',
'GF': 'French Guiana',
'PF': 'French Polynesia',
'TF': 'French Southern Territories',
'GA': 'Gabon',
'GM': 'Gambia',
'GE': 'Georgia',
'DE': 'Germany',
'GH': 'Ghana',
'GI': 'Gibraltar',
'GR': 'Greece',
'GL': 'Greenland',
'GD': 'Grenada',
'GP': 'Guadeloupe',
'GU': 'Guam',
'GT': 'Guatemala',
'GG': 'Guernsey',
'GN': 'Guinea',
'GW': 'Guinea-Bissau',
'GY': 'Guyana',
'HT': 'Haiti',
'HM': 'Heard Island & Mcdonald Islands',
'VA': 'Holy See (Vatican City State)',
'HN': 'Honduras',
'HK': 'Hong Kong',
'HU': 'Hungary',
'IS': 'Iceland',
'IN': 'India',
'ID': 'Indonesia',
'IR': 'Iran, Islamic Republic Of',
'IQ': 'Iraq',
'IE': 'Ireland',
'IM': 'Isle Of Man',
'IL': 'Israel',
'IT': 'Italy',
'JM': 'Jamaica',
'JP': 'Japan',
'JE': 'Jersey',
'JO': 'Jordan',
'KZ': 'Kazakhstan',
'KE': 'Kenya',
'KI': 'Kiribati',
'KR': 'Korea',
'KW': 'Kuwait',
'KG': 'Kyrgyzstan',
'LA': 'Lao People\'s Democratic Republic',
'LV': 'Latvia',
'LB': 'Lebanon',
'LS': 'Lesotho',
'LR': 'Liberia',
'LY': 'Libyan Arab Jamahiriya',
'LI': 'Liechtenstein',
'LT': 'Lithuania',
'LU': 'Luxembourg',
'MO': 'Macao',
'MK': 'Macedonia',
'MG': 'Madagascar',
'MW': 'Malawi',
'MY': 'Malaysia',
'MV': 'Maldives',
'ML': 'Mali',
'MT': 'Malta',
'MH': 'Marshall Islands',
'MQ': 'Martinique',
'MR': 'Mauritania',
'MU': 'Mauritius',
'YT': 'Mayotte',
'MX': 'Mexico',
'FM': 'Micronesia, Federated States Of',
'MD': 'Moldova',
'MC': 'Monaco',
'MN': 'Mongolia',
'ME': 'Montenegro',
'MS': 'Montserrat',
'MA': 'Morocco',
'MZ': 'Mozambique',
'MM': 'Myanmar',
'NA': 'Namibia',
'NR': 'Nauru',
'NP': 'Nepal',
'NL': 'Netherlands',
'AN': 'Netherlands Antilles',
'NC': 'New Caledonia',
'NZ': 'New Zealand',
'NI': 'Nicaragua',
'NE': 'Niger',
'NG': 'Nigeria',
'NU': 'Niue',
'NF': 'Norfolk Island',
'MP': 'Northern Mariana Islands',
'NO': 'Norway',
'OM': 'Oman',
'PK': 'Pakistan',
'PW': 'Palau',
'PS': 'Palestinian Territory, Occupied',
'PA': 'Panama',
'PG': 'Papua New Guinea',
'PY': 'Paraguay',
'PE': 'Peru',
'PH': 'Philippines',
'PN': 'Pitcairn',
'PL': 'Poland',
'PT': 'Portugal',
'PR': 'Puerto Rico',
'QA': 'Qatar',
'RE': 'Reunion',
'RO': 'Romania',
'RU': 'Russian Federation',
'RW': 'Rwanda',
'BL': 'Saint Barthelemy',
'SH': 'Saint Helena',
'KN': 'Saint Kitts And Nevis',
'LC': 'Saint Lucia',
'MF': 'Saint Martin',
'PM': 'Saint Pierre And Miquelon',
'VC': 'Saint Vincent And Grenadines',
'WS': 'Samoa',
'SM': 'San Marino',
'ST': 'Sao Tome And Principe',
'SA': 'Saudi Arabia',
'SN': 'Senegal',
'RS': 'Serbia',
'SC': 'Seychelles',
'SL': 'Sierra Leone',
'SG': 'Singapore',
'SK': 'Slovakia',
'SI': 'Slovenia',
'SB': 'Solomon Islands',
'SO': 'Somalia',
'ZA': 'South Africa',
'GS': 'South Georgia And Sandwich Isl.',
'ES': 'Spain',
'LK': 'Sri Lanka',
'SD': 'Sudan',
'SR': 'Suriname',
'SJ': 'Svalbard And Jan Mayen',
'SZ': 'Swaziland',
'SE': 'Sweden',
'CH': 'Switzerland',
'SY': 'Syrian Arab Republic',
'TW': 'Taiwan',
'TJ': 'Tajikistan',
'TZ': 'Tanzania',
'TH': 'Thailand',
'TL': 'Timor-Leste',
'TG': 'Togo',
'TK': 'Tokelau',
'TO': 'Tonga',
'TT': 'Trinidad And Tobago',
'TN': 'Tunisia',
'TR': 'Turkey',
'TM': 'Turkmenistan',
'TC': 'Turks And Caicos Islands',
'TV': 'Tuvalu',
'UG': 'Uganda',
'UA': 'Ukraine',
'AE': 'United Arab Emirates',
'GB': 'United Kingdom',
'US': 'United States',
'UM': 'United States Outlying Islands',
'UY': 'Uruguay',
'UZ': 'Uzbekistan',
'VU': 'Vanuatu',
'VE': 'Venezuela',
'VN': 'Viet Nam',
'VG': 'Virgin Islands, British',
'VI': 'Virgin Islands, U.S.',
'WF': 'Wallis And Futuna',
'EH': 'Western Sahara',
'YE': 'Yemen',
'ZM': 'Zambia',
'ZW': 'Zimbabwe',
'KP': 'Korea (Democratic Peoples Republic of)',
}
if (request.POST.get('ivisits_select') == "ivisits"):
ivisits = emp_visits.objects.filter(v_type=1).filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-entry_date')
ivisits_req = "1"
else:
ivisits=""
ivisits_req = "0"
for fvisit in fvisits:
fvisit.countryfull = countries[fvisit.country]
if (request.POST.get('consymps_select') == "consymps"):
consymps = emp_confrence_organised.objects.all().filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
consymps_req = "1"
else:
consymps=""
consymps_req = "0"
if (request.POST.get('awards_select') == "awards"):
awards = emp_achievement.objects.all().filter(a_year__range=[start,end]).order_by('-date_entry')
awards_req = "1"
else:
awards=""
awards_req = "0"
if (request.POST.get('talks_select') == "talks"):
talks = emp_expert_lectures.objects.all().filter(l_date__year__range=[start,end]).filter(l_date__month__range=[star,en]).order_by('-date_entry')
talks_req = "1"
else:
talks=""
talks_req = "0"
if (request.POST.get('chairs_select') == "chairs"):
chairs = emp_session_chair.objects.all().filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
chairs_req = "1"
else:
chairs=""
chairs_req = "0"
if (request.POST.get('keynotes_select') == "keynotes"):
keynotes = emp_keynote_address.objects.all().filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-date_entry')
keynotes_req = "1"
else:
keynotes=""
keynotes_req = "0"
if (request.POST.get('events_select') == "events"):
events = emp_event_organized.objects.all().filter(start_date__year__range=[start,end]).filter(start_date__month__range=[star,en]).order_by('-start_date')
events_req = "1"
else:
events=""
events_req = "0"
pers = get_object_or_404(faculty_about, user = request.user)
design = HoldsDesignation.objects.filter(working=request.user)
desig=[]
for i in design:
desig.append(str(i.designation))
context = {'user': user,
'pf':pf,
'desig':desig,
'journal':journal,
'journal_req':journal_req,
'conference': conference,
'conference_req': conference_req,
'books': books,
'books_req': books_req,
'projects': projects,
'projects_req': projects_req,
'consultancy':consultancy,
'consultancy_req': consultancy_req,
'patents':patents,
'patents_req': patents_req,
'techtransfers':techtransfers,
'techtransfers_req': techtransfers_req,
'mtechs':mtechs,
'mtechs_req': mtechs_req,
'phds':phds,
'phds_req': phds_req,
'fvisits':fvisits,
'fvisits_req': fvisits_req,
'ivisits': ivisits,
'ivisits_req': ivisits_req,
'consymps':consymps,
'consymps_req': consymps_req,
'awards':awards,
'awards_req': awards_req,
'talks':talks,
'talks_req': talks_req,
'chairs':chairs,
'chairs_req': chairs_req,
'keynotes':keynotes,
'keynotes_req': keynotes_req,
'events':events,
'events_req': events_req,
'first_name':request.user.first_name,
'last_name': request.user.last_name,
}
return render_to_pdf('eisModulenew/rspc_generatereportshow.html', context)
| 35.87189
| 182
| 0.520645
|
a32f4da1f9b1689cca2d18bb438fe08e642adc1c
| 14,560
|
py
|
Python
|
acq4/devices/MockCamera/mock_camera.py
|
aleonlein/acq4
|
4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555
|
[
"MIT"
] | 1
|
2020-06-04T17:04:53.000Z
|
2020-06-04T17:04:53.000Z
|
acq4/devices/MockCamera/mock_camera.py
|
aleonlein/acq4
|
4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555
|
[
"MIT"
] | 24
|
2016-09-27T17:25:24.000Z
|
2017-03-02T21:00:11.000Z
|
acq4/devices/MockCamera/mock_camera.py
|
sensapex/acq4
|
9561ba73caff42c609bd02270527858433862ad8
|
[
"MIT"
] | 4
|
2016-10-19T06:39:36.000Z
|
2019-09-30T21:06:45.000Z
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import with_statement
from acq4.devices.Camera import Camera, CameraTask
from acq4.util import Qt
import six
from six.moves import range
import time, sys, traceback
import acq4.util.ptime as ptime
from acq4.util.Mutex import Mutex
from acq4.util.debug import *
import acq4.util.functions as fn
import numpy as np
import scipy
from collections import OrderedDict
import acq4.pyqtgraph as pg
class MockCamera(Camera):
def __init__(self, manager, config, name):
self.camLock = Mutex(Mutex.Recursive) ## Lock to protect access to camera
self.ringSize = 100
self.frameId = 0
self.noise = np.random.normal(size=10000000, loc=100, scale=10) ## pre-generate noise for use in images
if 'images' in config:
self.bgData = {}
self.bgInfo = {}
for obj, filename in config['images'].items():
file = manager.fileHandle(filename)
ma = file.read()
self.bgData[obj] = ma.asarray()
self.bgInfo[obj] = file.info().deepcopy()
self.bgInfo[obj]['depths'] = ma.xvals(0)
else:
self.bgData = mandelbrot(w=4000, maxIter=60).astype(np.float32)
self.bgInfo = None
self.background = None
self.params = OrderedDict([
('triggerMode', 'Normal'),
('exposure', 0.001),
#('binning', (1,1)),
#('region', (0, 0, 512, 512)),
('binningX', 1),
('binningY', 1),
('regionX', 0),
('regionY', 0),
('regionW', 512),
('regionH', 512),
('gain', 1.0),
('sensorSize', (512, 512)),
('bitDepth', 16),
])
self.paramRanges = OrderedDict([
('triggerMode', (['Normal', 'TriggerStart'], True, True, [])),
('exposure', ((0.001, 10.), True, True, [])),
#('binning', ([range(1,10), range(1,10)], True, True, [])),
#('region', ([(0, 511), (0, 511), (1, 512), (1, 512)], True, True, [])),
('binningX', (list(range(1,10)), True, True, [])),
('binningY', (list(range(1,10)), True, True, [])),
('regionX', ((0, 511), True, True, ['regionW'])),
('regionY', ((0, 511), True, True, ['regionH'])),
('regionW', ((1, 512), True, True, ['regionX'])),
('regionH', ((1, 512), True, True, ['regionY'])),
('gain', ((0.1, 10.0), True, True, [])),
('sensorSize', (None, False, True, [])),
('bitDepth', (None, False, True, [])),
])
self.groupParams = {
'binning': ('binningX', 'binningY'),
'region': ('regionX', 'regionY', 'regionW', 'regionH')
}
sig = np.random.normal(size=(512, 512), loc=1.0, scale=0.3)
sig = scipy.ndimage.gaussian_filter(sig, (3, 3))
sig[20:40, 20:40] += 1
sig[sig<0] = 0
self.signal = sig
Camera.__init__(self, manager, config, name) ## superclass will call setupCamera when it is ready.
self.acqBuffer = None
self.frameId = 0
self.lastIndex = None
self.lastFrameTime = None
self.stopOk = False
self.sigGlobalTransformChanged.connect(self.globalTransformChanged)
## generate list of mock cells
cells = np.zeros(20, dtype=[('x', float), ('y', float), ('size', float), ('value', float), ('rate', float), ('intensity', float), ('decayTau', float)])
cells['x'] = np.random.normal(size=cells.shape, scale=100e-6, loc=-1.5e-3)
cells['y'] = np.random.normal(size=cells.shape, scale=100e-6, loc=4.4e-3)
cells['size'] = np.random.normal(size=cells.shape, scale=2e-6, loc=10e-6)
cells['rate'] = np.random.lognormal(size=cells.shape, mean=0, sigma=1) * 1.0
cells['intensity'] = np.random.uniform(size=cells.shape, low=1000, high=10000)
cells['decayTau'] = np.random.uniform(size=cells.shape, low=15e-3, high=500e-3)
self.cells = cells
def setupCamera(self):
pass
def globalTransformChanged(self):
self.background = None
def startCamera(self):
self.cameraStarted = True
self.lastFrameTime = ptime.time()
def stopCamera(self):
self.cameraStopped = True
def getNoise(self, shape):
n = shape[0] * shape[1]
s = np.random.randint(len(self.noise)-n)
d = self.noise[s:s+n]
d.shape = shape
return np.abs(d)
def getBackground(self):
if self.background is None:
w,h = self.params['sensorSize']
tr = self.globalTransform()
if isinstance(self.bgData, dict):
# select data based on objective
obj = self.getObjective()
data = self.bgData[obj]
info = self.bgInfo[obj]
px = info['pixelSize']
pz = info['depths'][1] - info['depths'][0]
m = Qt.QMatrix4x4()
pos = info['transform']['pos']
m.scale(1/px[0], 1/px[1], 1/pz)
m.translate(-pos[0], -pos[1], -info['depths'][0])
tr2 = m * tr
origin = tr2.map(pg.Vector(0, 0, 0))
#print(origin)
origin = [int(origin.x()), int(origin.y()), origin.z()]
## slice data
camRect = Qt.QRect(origin[0], origin[1], w, h)
dataRect = Qt.QRect(0, 0, data.shape[1], data.shape[2])
overlap = camRect.intersected(dataRect)
tl = overlap.topLeft() - camRect.topLeft()
z = origin[2]
z1 = np.floor(z)
z2 = np.ceil(z)
s = (z-z1) / (z2-z1)
z1 = int(np.clip(z1, 0, data.shape[0]-1))
z2 = int(np.clip(z2, 0, data.shape[0]-1))
src1 = data[z1, overlap.left():overlap.left()+overlap.width(), overlap.top():overlap.top()+overlap.height()]
src2 = data[z2, overlap.left():overlap.left()+overlap.width(), overlap.top():overlap.top()+overlap.height()]
src = src1 * (1-s) + src2 * s
bg = np.empty((w, h), dtype=data.dtype)
bg[:] = 100
bg[tl.x():tl.x()+overlap.width(), tl.y():tl.y()+overlap.height()] = src
self.background = bg
#vectors = ([1, 0, 0], [0, 1, 0])
#self.background = pg.affineSlice(data, (w,h), origin, vectors, (1, 2, 0), order=1)
else:
tr = pg.SRTTransform(tr)
m = Qt.QTransform()
m.scale(3e6, 3e6)
m.translate(0.0005, 0.0005)
tr = tr * m
origin = tr.map(pg.Point(0,0))
x = (tr.map(pg.Point(1,0)) - origin)
y = (tr.map(pg.Point(0,1)) - origin)
origin = np.array([origin.x(), origin.y()])
x = np.array([x.x(), x.y()])
y = np.array([y.x(), y.y()])
## slice fractal from pre-rendered data
vectors = (x,y)
self.background = pg.affineSlice(self.bgData, (w,h), origin, vectors, (0,1), order=1)
return self.background
def pixelVectors(self):
tr = self.globalTransform()
origin = tr.map(pg.Point(0,0))
x = (tr.map(pg.Point(1,0)) - origin)
y = (tr.map(pg.Point(0,1)) - origin)
origin = np.array([origin.x(), origin.y()])
x = np.array([x.x(), x.y()])
y = np.array([y.x(), y.y()])
return x,y
def newFrames(self):
"""Return a list of all frames acquired since the last call to newFrames."""
prof = pg.debug.Profiler(disabled=True)
now = ptime.time()
dt = now - self.lastFrameTime
exp = self.getParam('exposure')
bin = self.getParam('binning')
fps = 1.0 / (exp+(40e-3/(bin[0]*bin[1])))
nf = int(dt * fps)
if nf == 0:
return []
self.lastFrameTime = now + exp
prof()
region = self.getParam('region')
prof()
bg = self.getBackground()[region[0]:region[0]+region[2], region[1]:region[1]+region[3]]
prof()
# Start with noise
shape = region[2:]
data = self.getNoise(shape)
#data = np.zeros(shape, dtype=float)
prof()
# Add specimen
data += bg * (exp * 10)
prof()
## update cells
spikes = np.random.poisson(min(dt, 0.4) * self.cells['rate'])
self.cells['value'] *= np.exp(-dt / self.cells['decayTau'])
self.cells['value'] = np.clip(self.cells['value'] + spikes * 0.2, 0, 1)
data[data<0] = 0
# draw cells
px = (self.pixelVectors()[0]**2).sum() ** 0.5
# Generate transform that maps grom global coordinates to image coordinates
cameraTr = pg.SRTTransform3D(self.inverseGlobalTransform())
# note we use binning=(1,1) here because the image is downsampled later.
frameTr = self.makeFrameTransform(region, [1, 1]).inverted()[0]
tr = pg.SRTTransform(frameTr * cameraTr)
for cell in self.cells:
w = cell['size'] / px
pos = pg.Point(cell['x'], cell['y'])
imgPos = tr.map(pos)
start = (int(imgPos.x()), int(imgPos.y()))
stop = (int(start[0]+w), int(start[1]+w))
val = cell['intensity'] * cell['value'] * self.getParam('exposure')
data[max(0,start[0]):max(0,stop[0]), max(0,start[1]):max(0,stop[1])] += val
# Binning
if bin[0] > 1:
data = fn.downsample(data, bin[0], axis=0)
if bin[1] > 1:
data = fn.downsample(data, bin[1], axis=1)
data = data.astype(np.uint16)
prof()
self.frameId += 1
frames = []
for i in range(nf):
frames.append({'data': data, 'time': now + (i / fps), 'id': self.frameId})
prof()
return frames
def quit(self):
pass
def listParams(self, params=None):
"""List properties of specified parameters, or of all parameters if None"""
if params is None:
return self.paramRanges
else:
if isinstance(params, six.string_types):
return self.paramRanges[params]
out = OrderedDict()
for k in params:
out[k] = self.paramRanges[k]
return out
def setParams(self, params, autoRestart=True, autoCorrect=True):
dp = []
ap = {}
for k in params:
if k in self.groupParams:
ap.update(dict(zip(self.groupParams[k], params[k])))
dp.append(k)
params.update(ap)
for k in dp:
del params[k]
self.params.update(params)
newVals = params
restart = True
if autoRestart and restart:
self.restart()
self.sigParamsChanged.emit(newVals)
return (newVals, restart)
def getParams(self, params=None):
if params is None:
params = list(self.listParams().keys())
vals = OrderedDict()
for k in params:
if k in self.groupParams:
vals[k] = list(self.getParams(self.groupParams[k]).values())
else:
vals[k] = self.params[k]
return vals
def setParam(self, param, value, autoRestart=True, autoCorrect=True):
return self.setParams({param: value}, autoRestart=autoRestart, autoCorrect=autoCorrect)
def getParam(self, param):
return self.getParams([param])[param]
def createTask(self, cmd, parentTask):
with self.lock:
return MockCameraTask(self, cmd, parentTask)
class MockCameraTask(CameraTask):
"""Generate exposure waveform when recording with mockcamera.
"""
def __init__(self, dev, cmd, parentTask):
CameraTask.__init__(self, dev, cmd, parentTask)
self._DAQCmd['exposure']['lowLevelConf'] = {'mockFunc': self.makeExpWave}
self.frameTimes = []
def makeExpWave(self):
## Called by DAQGeneric to simulate a read-from-DAQ
# first look up the DAQ configuration so we know the sample rate / number
daq = self.dev.listChannels()['exposure']['device']
cmd = self.parentTask().tasks[daq].cmd
start = self.parentTask().startTime
sampleRate = cmd['rate']
data = np.zeros(cmd['numPts'], dtype=np.uint8)
for f in self.frames:
t = f.info()['time']
exp = f.info()['exposure']
i0 = int((t - start) * sampleRate)
i1 = i0 + int((exp-0.1e-3) * sampleRate)
data[i0:i1] = 1
return data
def mandelbrot(w=500, h=None, maxIter=20, xRange=(-2.0, 1.0), yRange=(-1.2, 1.2)):
x0,x1 = xRange
y0,y1 = yRange
if h is None:
h = int(w * (y1-y0)/(x1-x0))
x = np.linspace(x0, x1, w).reshape(w,1)
y = np.linspace(y0, y1, h).reshape(1,h)
## speed up with a clever initial mask:
x14 = x-0.25
y2 = y**2
q = (x14)**2 + y2
mask = q * (q + x14) > 0.25 * y2
mask &= (x+1)**2 + y2 > 1/16.
mask &= x > -2
mask &= x < 0.7
mask &= y > -1.2
mask &= y < 1.2
img = np.zeros((w,h), dtype=int)
xInd, yInd = np.mgrid[0:w, 0:h]
x = x.reshape(w)[xInd]
y = y.reshape(h)[yInd]
z0 = np.empty((w,h), dtype=np.complex64)
z0.real = x
z0.imag = y
z = z0.copy()
for i in range(maxIter):
z = z[mask]
z0 = z0[mask]
xInd = xInd[mask]
yInd = yInd[mask]
z *= z
z += z0
mask = np.abs(z) < 2.
img[xInd[mask], yInd[mask]] = i % (maxIter-1)
return img
| 36.860759
| 159
| 0.495536
|
eb4d1b093256384ce66a5b5a202ee0f8bfc64c8a
| 3,486
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/helicobactermuridarum.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/helicobactermuridarum.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/helicobactermuridarum.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Helicobacter muridarum.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def HelicobacterMuridarum(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Helicobacter muridarum graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Helicobacter muridarum graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="HelicobacterMuridarum",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.2
| 223
| 0.679002
|
504bbd7c3e47a9eadb3d8fcd7e14476065c11d37
| 21,454
|
py
|
Python
|
ece2cmor3/postproc.py
|
EC-Earth/ece2cmor3
|
8df8584563d16827785d0f31e58316ba3aebba96
|
[
"Apache-2.0"
] | 9
|
2018-11-09T11:28:50.000Z
|
2022-01-17T21:28:33.000Z
|
ece2cmor3/postproc.py
|
EC-Earth/ece2cmor3
|
8df8584563d16827785d0f31e58316ba3aebba96
|
[
"Apache-2.0"
] | 502
|
2018-05-11T11:14:15.000Z
|
2022-03-30T10:04:37.000Z
|
ece2cmor3/postproc.py
|
EC-Earth/ece2cmor3
|
8df8584563d16827785d0f31e58316ba3aebba96
|
[
"Apache-2.0"
] | 4
|
2019-01-21T13:11:19.000Z
|
2022-03-31T13:22:32.000Z
|
import logging
import threading
import re
import Queue
import os
from ece2cmor3 import cmor_task
import grib_file
import cdoapi
import cmor_source
import cmor_target
# Log object
log = logging.getLogger(__name__)
# Threading parameters
cdo_threads = 4
# Flags to control whether to execute cdo.
skip = 1
append = 2
recreate = 3
modes = [skip, append, recreate]
# Mode for post-processing
mode = 3
# Post-processes a task
def post_process(task, path, do_postprocess):
command = create_command(task)
output_path = get_output_path(task, path)
if do_postprocess:
if task.status != cmor_task.status_failed:
filepath = apply_command(command, task, output_path)
else:
filepath = None
else:
filepath = 1
if filepath is not None and task.status != cmor_task.status_failed:
setattr(task, cmor_task.output_path_key, output_path)
def get_output_path(task, tmp_path):
return os.path.join(tmp_path, task.target.variable + "_" + task.target.table + ".nc") if tmp_path else None
# Checks whether the task grouping makes sense: only tasks for the same variable and frequency can be safely grouped.
def validate_task_list(tasks):
global log
freqset = set(map(lambda t: cmor_target.get_freq(t.target), tasks))
if len(freqset) != 1:
log.error("Multiple target variables joined to single cdo command: %s" % str(freqset))
return False
return True
# Creates a cdo postprocessing command for the given IFS task.
def create_command(task):
if not isinstance(task.source, cmor_source.ifs_source):
raise Exception("This function can only be used to create cdo commands for IFS tasks")
if hasattr(task, "paths") and len(getattr(task, "paths")) > 1:
raise Exception("Multiple merged cdo commands are not supported yet")
result = cdoapi.cdo_command() if hasattr(task.source, cmor_source.expression_key) else cdoapi.cdo_command(
code=task.source.get_grib_code().var_id)
add_grid_operators(result, task)
add_expr_operators(result, task)
add_time_operators(result, task)
add_level_operators(result, task)
return result
# Executes the command and replaces the path attribute for all tasks in the tasklist
# to the output of cdo. This path is constructed from the basepath and the first task.
def apply_command(command, task, output_path=None):
global log, cdo_threads, skip, append, recreate, mode
if output_path is None and mode in [skip, append]:
log.warning(
"Executing post-processing in skip/append mode without path given: this will skip the entire task.")
input_files = getattr(task, cmor_task.filter_output_key, [])
if not any(input_files):
log.error("Cannot execute cdo command %s for given task because it has no model "
"output attribute" % command.create_command())
return None
if len(input_files) > 1:
log.warning("Task %s in table %s appears to have multiple filtered output files, taking first file %s" %
(task.target.variable, task.target.table, input_files[0]))
input_file = input_files[0]
comm_string = command.create_command()
log.info("Post-processing target %s in table %s from file %s with cdo command %s" % (
task.target.variable, task.target.table, input_file, comm_string))
setattr(task, "cdo_command", comm_string)
task.next_state()
result = None
if mode != skip:
if mode == recreate or (mode == append and not os.path.exists(output_path)):
merge_expr = (cdoapi.cdo_command.set_code_operator in command.operators)
result = command.apply(input_file, output_path, cdo_threads, grib_first=merge_expr)
if not result:
task.set_failed()
else:
if os.path.exists(output_path):
result = output_path
if result is not None:
task.next_state()
return result
def mask_rhs(rhs, mask):
return rhs if mask is None else '(' + rhs + ")/(" + mask + ')'
# Checks whether the string expression denotes height level merging
def add_expr_operators(cdo, task):
missval = getattr(task, "missval", None)
if missval is not None:
cdo.add_operator(cdoapi.cdo_command.set_misstoc_operator, missval)
fillval = getattr(task, "fillval", None)
if fillval is not None:
cdo.add_operator(cdoapi.cdo_command.set_missval_operator, fillval)
input_expr = getattr(task.source, cmor_source.expression_key, None)
mask = getattr(task.source, cmor_source.mask_expression_key, None)
if input_expr is None:
if mask is None:
return
expr = '='.join([cmor_source.grib_code.to_cdo_str(task.source.get_grib_code())] * 2)
else:
expr = input_expr
groups = re.search("^var([0-9]{1,3})\=", expr.replace(" ", ""))
if groups is None:
lhs = cmor_source.grib_code.to_cdo_str(task.source.get_grib_code())
rhs = expr.replace(" ", "")
else:
lhs = groups.group(0)[:-1]
rhs = expr.replace(" ", "")[len(lhs) + 1:]
new_code = int(lhs[3:])
order = getattr(task.source, cmor_source.expression_order_key, 0)
expr_operator = cdoapi.cdo_command.post_expr_operator if order == 1 else cdoapi.cdo_command.expression_operator
if rhs.startswith("merge(") and rhs.endswith(")"):
arg = rhs[6:-1]
sub_expr_list = arg.split(',')
if not any(getattr(task.target, "z_dims", [])):
log.warning("Encountered 3d expression for variable with no z-axis: taking first field")
sub_expr = mask_rhs(sub_expr_list[0].strip(), mask)
if not re.match("var[0-9]{1,3}", sub_expr):
cdo.add_operator(expr_operator, "var" + str(new_code) + "=" + sub_expr)
else:
task.source = cmor_source.ifs_source.read(sub_expr, mask_expr=mask)
root_codes = [int(s.strip()[3:]) for s in re.findall("var[0-9]{1,3}", sub_expr)]
cdo.add_operator(cdoapi.cdo_command.select_code_operator, *root_codes)
return
else:
i = 0
for sub_expr in sub_expr_list:
i += 1
cdo.add_operator(expr_operator, "var" + str(i) + "=" + mask_rhs(sub_expr, mask))
cdo.add_operator(cdoapi.cdo_command.set_code_operator, new_code)
else:
mask_interp_expr = '='.join([lhs, mask_rhs(rhs, mask)])
cdo.add_operator(expr_operator, mask_interp_expr)
cdo.add_operator(cdoapi.cdo_command.select_code_operator, *[c.var_id for c in task.source.get_root_codes()])
operator_mapping = {"mean": cdoapi.cdo_command.mean, "maximum": cdoapi.cdo_command.max,
"minimum": cdoapi.cdo_command.min, "sum": cdoapi.cdo_command.sum}
# Adds grid remapping operators to the cdo commands for the given task
def add_grid_operators(cdo, task):
if task.target.variable.startswith("areacell"):
cdo.add_operator(cdoapi.cdo_command.area_operator)
grid = task.source.grid_id()
if grid == cmor_source.ifs_grid.spec:
cdo.add_operator(cdoapi.cdo_command.spectral_operator)
else:
grid_type = cdoapi.cdo_command.regular_grid_type
if getattr(task, "interpolate", "linear") == "nn":
grid_type = cdoapi.cdo_command.regular_grid_type_nn
cdo.add_operator(cdoapi.cdo_command.gridtype_operator, grid_type)
tgtdims = getattr(task.target, cmor_target.dims_key, "").split()
if "longitude" not in tgtdims:
operators = [str(o) for o in getattr(task.target, "longitude_operator", [])]
if len(operators) == 1 and operators[0] in operator_mapping.keys():
cdo.add_operator(cdoapi.cdo_command.zonal + operator_mapping[operators[0]])
else:
log.error("Longitude reduction operator for task %s in table %s is not supported" % (task.target.variable,
task.target.table))
task.set_failed()
if "latitude" not in tgtdims:
operators = [str(o) for o in getattr(task.target, "latitude_operator", [])]
if len(operators) == 1 and operators[0] in operator_mapping.keys():
cdo.add_operator(cdoapi.cdo_command.meridional + operator_mapping[operators[0]])
else:
log.error("Latitude reduction operator for task %s in table %s is not supported" % (task.target.variable,
task.target.table))
task.set_failed()
# Adds time averaging operators to the cdo command for the given task
def add_time_operators(cdo, task):
freq = str(getattr(task.target, cmor_target.freq_key, None))
operators = [str(o) for o in getattr(task.target, "time_operator", ["point"])]
for i in range(len(operators)):
operator_words = operators[i].split(" ")
if len(operator_words) > 2 and operator_words[1] == "where":
operators[i] = operator_words[0]
if freq == "yr":
if operators == ["mean"]:
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.mean)
elif operators == ["maximum"]:
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.max)
elif operators == ["minimum"]:
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.min)
elif operators == ["sum"]:
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.sum)
elif operators == ["maximum within months", "mean over months"]:
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.max)
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.mean)
elif operators == ["minimum within months", "mean over months"]:
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.min)
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.mean)
elif operators == ["maximum within days", "mean over days"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.max)
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.mean)
elif operators == ["minimum within days", "mean over days"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.min)
cdo.add_operator(cdoapi.cdo_command.year + cdoapi.cdo_command.mean)
else:
log.error(
"Unsupported combination of frequency %s with time operators %s encountered for variable %s in table %s"
% (freq, str(operators), task.target.variable, task.target.table))
task.set_failed()
elif freq == "yrPt":
# End-of-year values:
if operators == ["point"]:
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.month, 12)
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.day, 31)
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, 21)
else:
log.error(
"Unsupported combination of frequency %s with time operators %s encountered for variable %s in table %s"
% (freq, str(operators), task.target.variable, task.target.table))
task.set_failed()
elif freq == "mon":
if operators == ["point"]:
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.day, 15)
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, 12)
elif operators == ["mean"]:
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.mean)
elif operators == ["maximum"]:
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.max)
elif operators == ["minimum"]:
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.min)
elif operators == ["sum"]:
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.sum)
elif operators == ["maximum within days", "mean over days"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.max)
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.mean)
elif operators == ["minimum within days", "mean over days"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.min)
cdo.add_operator(cdoapi.cdo_command.month + cdoapi.cdo_command.mean)
else:
log.error(
"Unsupported combination of frequency %s with time operators %s encountered for variable %s in table %s"
% (freq, str(operators), task.target.variable, task.target.table))
task.set_failed()
elif freq == "monPt":
if operators == ["point"]:
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.day, 15)
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, 12)
else:
log.error(
"Unsupported combination of frequency %s with time operators %s encountered for variable %s in table %s"
% (freq, str(operators), task.target.variable, task.target.table))
task.set_failed()
elif freq == "day":
if operators == ["point"]:
cdo.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, 12)
elif operators == ["mean"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.mean)
elif operators == ["maximum"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.max)
elif operators == ["minimum"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.min)
elif operators == ["sum"]:
cdo.add_operator(cdoapi.cdo_command.day + cdoapi.cdo_command.sum)
else:
log.error(
"Unsupported combination of frequency %s with time operators %s encountered for variable %s in table %s"
% (freq, str(operators), task.target.variable, task.target.table))
task.set_failed()
elif freq in ["6hr", "6hrPt"] and len(operators) == 1:
add_high_freq_operator(cdo, 6, operators[0], task)
elif freq in ["3hr", "3hrPt"] and len(operators) == 1:
add_high_freq_operator(cdo, 3, operators[0], task)
elif freq == "fx" and operators == ["point"] or operators == ["mean"]:
cdo.add_operator(cdoapi.cdo_command.select_step_operator, 1)
else:
log.error(
"Unsupported combination of frequency %s with time operators %s encountered for variable %s in table %s"
% (freq, str(operators), task.target.variable, task.target.table))
task.set_failed()
def add_high_freq_operator(cdo_command, target_freq, operator, task):
timestamps = [i * target_freq for i in range(24 / target_freq)]
aggregators = {"mean": (cmor_source.ifs_source.grib_codes_accum, cdoapi.cdo_command.timselmean_operator),
"minimum": (cmor_source.ifs_source.grib_codes_min, cdoapi.cdo_command.timselmin_operator),
"maximum": (cmor_source.ifs_source.grib_codes_max, cdoapi.cdo_command.timselmax_operator)}
if operator == "point":
if any([c for c in task.source.get_root_codes() if c in cmor_source.ifs_source.grib_codes_accum]):
log.warning("Sampling values of accumulated model output for variable %s in "
"table %s" % (task.target.variable, task.target.table))
if any([c for c in task.source.get_root_codes() if c in cmor_source.ifs_source.grib_codes_min]):
log.warning("Sampling values of minimum model output for variable %s in "
"table %s" % (task.target.variable, task.target.table))
if any([c for c in task.source.get_root_codes() if c in cmor_source.ifs_source.grib_codes_max]):
log.warning("Sampling values of maximum model output for variable %s in "
"table %s" % (task.target.variable, task.target.table))
cdo_command.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, *timestamps)
elif operator in aggregators:
if not all([c for c in task.source.get_root_codes() if c in aggregators[operator][0]]):
source_freq = getattr(task, cmor_task.output_frequency_key)
steps = target_freq / source_freq
if steps == 0:
log.error("Requested %s at %d-hourly frequency cannot be computed for variable %s in table %s "
"because its output frequency is only %d" % (operator, target_freq, task.target.variable,
task.target.table, source_freq))
task.set_failed()
else:
log.warning("Computing inaccurate mean value over %d time steps for variable "
"%s in table %s" % (target_freq / source_freq, task.target.variable, task.target.table))
if steps == 1:
cdo_command.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, *timestamps)
else:
cdo_command.add_operator(aggregators[operator][1], steps)
else:
cdo_command.add_operator(cdoapi.cdo_command.select + cdoapi.cdo_command.hour, *timestamps)
else:
log.error("The operator %s is not supported by this post-processing software" % operator)
task.set_failed()
return cdo_command
# Translates the cmor vertical level post-processing operation to a cdo command-line option
def add_level_operators(cdo, task):
global log
if task.source.spatial_dims == 2:
return
zdims = getattr(task.target, "z_dims", [])
if len(zdims) == 0:
return
if len(zdims) > 1:
log.error("Multiple level dimensions in table %s are not supported by this post-processing software",
task.target.table)
task.set_failed()
return
axisname = zdims[0]
if axisname == "alevel":
cdo.add_operator(cdoapi.cdo_command.select_z_operator, cdoapi.cdo_command.model_level)
if axisname == "alevhalf":
log.error("Vertical half-levels in table %s are not supported by this post-processing software",
task.target.table)
task.set_failed()
return
axis_infos = cmor_target.get_axis_info(task.target.table)
axisinfo = axis_infos.get(axisname, None)
if not axisinfo:
log.error("Could not retrieve information for axis %s in table %s" % (axisname, task.target.table))
task.set_failed()
return
levels = axisinfo.get("requested", [])
if len(levels) == 0:
val = axisinfo.get("value", None)
if val:
levels = [val]
level_types = [grib_file.hybrid_level_code, grib_file.pressure_level_hPa_code, grib_file.height_level_code]
input_files = getattr(task, cmor_task.filter_output_key, [])
if any(input_files):
level_types = cdo.get_z_axes(input_files[0], task.source.get_root_codes()[0].var_id)
name = axisinfo.get("standard_name", None)
if name == "air_pressure":
add_zaxis_operators(cdo, task, level_types, levels, cdoapi.cdo_command.pressure,
grib_file.pressure_level_hPa_code)
elif name in ["height", "altitude"]:
add_zaxis_operators(cdo, task, level_types, levels, cdoapi.cdo_command.height, grib_file.height_level_code)
elif axisname not in ["alevel", "alevhalf"]:
log.error("Could not convert vertical axis type %s to CDO axis selection operator" % name)
task.set_failed()
# Helper function for setting the vertical axis and levels selection
def add_zaxis_operators(cdo, task, lev_types, req_levs, axis_type, axis_code):
if axis_code not in lev_types and grib_file.hybrid_level_code in lev_types:
log.warning(
"Could not find %s levels for %s, will interpolate from model levels" % (axis_type, task.target.variable))
cdo.add_operator(cdoapi.cdo_command.select_code_operator, *[134])
cdo.add_operator(cdoapi.cdo_command.select_z_operator,
*[cdoapi.cdo_command.model_level, cdoapi.cdo_command.surf_level])
if isinstance(req_levs, list) and any(req_levs):
cdo.add_operator(cdoapi.cdo_command.ml2pl_operator, *req_levs)
elif axis_code in lev_types:
if isinstance(req_levs, list) and any(req_levs):
levels = [float(s) for s in req_levs]
input_files = getattr(task, cmor_task.filter_output_key, [])
if any(input_files):
levels = cdo.get_levels(input_files[0], task.source.get_root_codes()[0].var_id, axis_type)
if set([float(s) for s in req_levs]) <= set(levels):
cdo.add_operator(cdoapi.cdo_command.select_z_operator, axis_type)
cdo.add_operator(cdoapi.cdo_command.select_lev_operator, *req_levs)
else:
log.error("Could not retrieve %s levels %s from levels %s in file for variable %s"
% (axis_type, req_levs, levels, task.target.variable))
task.set_failed()
else:
log.error(
"Could not retrieve %s levels for %s with axes %s" % (axis_type, task.target.variable, str(lev_types)))
task.set_failed()
| 51.080952
| 120
| 0.649856
|
d3cfd1faedfc4fe51ceadd6272e31a25fd66d037
| 23,786
|
py
|
Python
|
tests/wallet/cc_wallet/test_cc_wallet.py
|
CallMeBrado/cunt-blockchain
|
9b140b7e5541f3baffabe02a55b75d9aeb889999
|
[
"Apache-2.0"
] | 7
|
2021-08-09T19:01:51.000Z
|
2021-12-09T04:32:09.000Z
|
tests/wallet/cc_wallet/test_cc_wallet.py
|
CallMeBrado/cunt-blockchain
|
9b140b7e5541f3baffabe02a55b75d9aeb889999
|
[
"Apache-2.0"
] | 22
|
2021-08-17T04:12:11.000Z
|
2022-03-29T04:10:38.000Z
|
tests/wallet/cc_wallet/test_cc_wallet.py
|
CallMeBrado/cunt-blockchain
|
9b140b7e5541f3baffabe02a55b75d9aeb889999
|
[
"Apache-2.0"
] | 4
|
2021-09-05T12:04:51.000Z
|
2022-03-15T08:44:32.000Z
|
import asyncio
from typing import List
import pytest
from cunt.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from cunt.full_node.mempool_manager import MempoolManager
from cunt.simulator.simulator_protocol import FarmNewBlockProtocol
from cunt.types.blockchain_format.coin import Coin
from cunt.types.blockchain_format.sized_bytes import bytes32
from cunt.types.peer_info import PeerInfo
from cunt.util.ints import uint16, uint32, uint64
from cunt.wallet.cc_wallet.cc_utils import cc_puzzle_hash_for_inner_puzzle_hash
from cunt.wallet.cc_wallet.cc_wallet import CCWallet
from cunt.wallet.puzzles.cc_loader import CC_MOD
from cunt.wallet.transaction_record import TransactionRecord
from cunt.wallet.wallet_coin_record import WalletCoinRecord
from tests.setup_nodes import setup_simulators_and_wallets
from tests.time_out_assert import time_out_assert
@pytest.fixture(scope="module")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
async def tx_in_pool(mempool: MempoolManager, tx_id: bytes32):
tx = mempool.get_spendbundle(tx_id)
if tx is None:
return False
return True
class TestCCWallet:
@pytest.fixture(scope="function")
async def wallet_node(self):
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest.fixture(scope="function")
async def two_wallet_nodes(self):
async for _ in setup_simulators_and_wallets(1, 2, {}):
yield _
@pytest.fixture(scope="function")
async def three_wallet_nodes(self):
async for _ in setup_simulators_and_wallets(1, 3, {}):
yield _
@pytest.mark.asyncio
async def test_colour_creation(self, two_wallet_nodes):
num_blocks = 3
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node, server_2 = wallets[0]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node.wallet_state_manager, wallet, uint64(100))
tx_queue: List[TransactionRecord] = await wallet_node.wallet_state_manager.tx_store.get_not_sent()
tx_record = tx_queue[0]
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
@pytest.mark.asyncio
async def test_cc_spend(self, two_wallet_nodes):
num_blocks = 3
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
wallet2 = wallet_node_2.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_3.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node.wallet_state_manager, wallet, uint64(100))
tx_queue: List[TransactionRecord] = await wallet_node.wallet_state_manager.tx_store.get_not_sent()
tx_record = tx_queue[0]
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
assert cc_wallet.cc_info.my_genesis_checker is not None
colour = cc_wallet.get_colour()
cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(wallet_node_2.wallet_state_manager, wallet2, colour)
assert cc_wallet.cc_info.my_genesis_checker == cc_wallet_2.cc_info.my_genesis_checker
cc_2_hash = await cc_wallet_2.get_new_inner_hash()
tx_record = await cc_wallet.generate_signed_transaction([uint64(60)], [cc_2_hash])
await wallet.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 40)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 40)
await time_out_assert(30, cc_wallet_2.get_confirmed_balance, 60)
await time_out_assert(30, cc_wallet_2.get_unconfirmed_balance, 60)
cc_hash = await cc_wallet.get_new_inner_hash()
tx_record = await cc_wallet_2.generate_signed_transaction([uint64(15)], [cc_hash])
await wallet.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 55)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 55)
@pytest.mark.asyncio
async def test_get_wallet_for_colour(self, two_wallet_nodes):
num_blocks = 3
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node, server_2 = wallets[0]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node.wallet_state_manager, wallet, uint64(100))
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
colour = cc_wallet.get_colour()
assert await wallet_node.wallet_state_manager.get_wallet_for_colour(colour) == cc_wallet
@pytest.mark.asyncio
async def test_generate_zero_val(self, two_wallet_nodes):
num_blocks = 4
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
wallet2 = wallet_node_2.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_3.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node.wallet_state_manager, wallet, uint64(100))
await asyncio.sleep(1)
ph = await wallet2.get_new_puzzlehash()
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
assert cc_wallet.cc_info.my_genesis_checker is not None
colour = cc_wallet.get_colour()
cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(wallet_node_2.wallet_state_manager, wallet2, colour)
await asyncio.sleep(1)
assert cc_wallet.cc_info.my_genesis_checker == cc_wallet_2.cc_info.my_genesis_checker
spend_bundle = await cc_wallet_2.generate_zero_val_coin()
await asyncio.sleep(1)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, spend_bundle.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
async def unspent_count():
unspent: List[WalletCoinRecord] = list(
await cc_wallet_2.wallet_state_manager.get_spendable_coins_for_wallet(cc_wallet_2.id())
)
return len(unspent)
await time_out_assert(15, unspent_count, 1)
unspent: List[WalletCoinRecord] = list(
await cc_wallet_2.wallet_state_manager.get_spendable_coins_for_wallet(cc_wallet_2.id())
)
assert unspent.pop().coin.amount == 0
@pytest.mark.asyncio
async def test_cc_spend_uncoloured(self, two_wallet_nodes):
num_blocks = 3
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
wallet2 = wallet_node_2.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_3.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node.wallet_state_manager, wallet, uint64(100))
tx_queue: List[TransactionRecord] = await wallet_node.wallet_state_manager.tx_store.get_not_sent()
tx_record = tx_queue[0]
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
assert cc_wallet.cc_info.my_genesis_checker is not None
colour = cc_wallet.get_colour()
cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(wallet_node_2.wallet_state_manager, wallet2, colour)
assert cc_wallet.cc_info.my_genesis_checker == cc_wallet_2.cc_info.my_genesis_checker
cc_2_hash = await cc_wallet_2.get_new_inner_hash()
tx_record = await cc_wallet.generate_signed_transaction([uint64(60)], [cc_2_hash])
await wallet.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 40)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 40)
await time_out_assert(15, cc_wallet_2.get_confirmed_balance, 60)
await time_out_assert(15, cc_wallet_2.get_unconfirmed_balance, 60)
cc2_ph = await cc_wallet_2.get_new_cc_puzzle_hash()
tx_record = await wallet.wallet_state_manager.main_wallet.generate_signed_transaction(10, cc2_ph, 0)
await wallet.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
id = cc_wallet_2.id()
wsm = cc_wallet_2.wallet_state_manager
await time_out_assert(15, wsm.get_confirmed_balance_for_wallet, 70, id)
await time_out_assert(15, cc_wallet_2.get_confirmed_balance, 60)
await time_out_assert(15, cc_wallet_2.get_unconfirmed_balance, 60)
@pytest.mark.asyncio
async def test_cc_spend_multiple(self, three_wallet_nodes):
num_blocks = 3
full_nodes, wallets = three_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, wallet_server_0 = wallets[0]
wallet_node_1, wallet_server_1 = wallets[1]
wallet_node_2, wallet_server_2 = wallets[2]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
ph = await wallet_0.get_new_puzzlehash()
await wallet_server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await wallet_server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await wallet_server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet_0.get_confirmed_balance, funds)
cc_wallet_0: CCWallet = await CCWallet.create_new_cc(wallet_node_0.wallet_state_manager, wallet_0, uint64(100))
tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
tx_record = tx_queue[0]
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet_0.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet_0.get_unconfirmed_balance, 100)
assert cc_wallet_0.cc_info.my_genesis_checker is not None
colour = cc_wallet_0.get_colour()
cc_wallet_1: CCWallet = await CCWallet.create_wallet_for_cc(
wallet_node_1.wallet_state_manager, wallet_1, colour
)
cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(
wallet_node_2.wallet_state_manager, wallet_2, colour
)
assert cc_wallet_0.cc_info.my_genesis_checker == cc_wallet_1.cc_info.my_genesis_checker
assert cc_wallet_0.cc_info.my_genesis_checker == cc_wallet_2.cc_info.my_genesis_checker
cc_1_hash = await cc_wallet_1.get_new_inner_hash()
cc_2_hash = await cc_wallet_2.get_new_inner_hash()
tx_record = await cc_wallet_0.generate_signed_transaction([uint64(60), uint64(20)], [cc_1_hash, cc_2_hash])
await wallet_0.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet_0.get_confirmed_balance, 20)
await time_out_assert(15, cc_wallet_0.get_unconfirmed_balance, 20)
await time_out_assert(30, cc_wallet_1.get_confirmed_balance, 60)
await time_out_assert(30, cc_wallet_1.get_unconfirmed_balance, 60)
await time_out_assert(30, cc_wallet_2.get_confirmed_balance, 20)
await time_out_assert(30, cc_wallet_2.get_unconfirmed_balance, 20)
cc_hash = await cc_wallet_0.get_new_inner_hash()
tx_record = await cc_wallet_1.generate_signed_transaction([uint64(15)], [cc_hash])
await wallet_1.wallet_state_manager.add_pending_transaction(tx_record)
tx_record_2 = await cc_wallet_2.generate_signed_transaction([uint64(20)], [cc_hash])
await wallet_2.wallet_state_manager.add_pending_transaction(tx_record_2)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record_2.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet_0.get_confirmed_balance, 55)
await time_out_assert(15, cc_wallet_0.get_unconfirmed_balance, 55)
await time_out_assert(30, cc_wallet_1.get_confirmed_balance, 45)
await time_out_assert(30, cc_wallet_1.get_unconfirmed_balance, 45)
await time_out_assert(30, cc_wallet_2.get_confirmed_balance, 0)
await time_out_assert(30, cc_wallet_2.get_unconfirmed_balance, 0)
@pytest.mark.asyncio
async def test_cc_max_amount_send(self, two_wallet_nodes):
num_blocks = 3
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_3.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 1)
]
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node.wallet_state_manager, wallet, uint64(100000))
tx_queue: List[TransactionRecord] = await wallet_node.wallet_state_manager.tx_store.get_not_sent()
tx_record = tx_queue[0]
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0"))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100000)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100000)
assert cc_wallet.cc_info.my_genesis_checker is not None
cc_2_hash = await cc_wallet.get_new_inner_hash()
amounts = []
puzzle_hashes = []
for i in range(1, 50):
amounts.append(uint64(i))
puzzle_hashes.append(cc_2_hash)
spent_coint = (await cc_wallet.get_cc_spendable_coins())[0].coin
tx_record = await cc_wallet.generate_signed_transaction(amounts, puzzle_hashes, coins={spent_coint})
await wallet.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(2)
async def check_all_there():
spendable = await cc_wallet.get_cc_spendable_coins()
spendable_name_set = set()
for record in spendable:
spendable_name_set.add(record.coin.name())
puzzle_hash = cc_puzzle_hash_for_inner_puzzle_hash(CC_MOD, cc_wallet.cc_info.my_genesis_checker, cc_2_hash)
for i in range(1, 50):
coin = Coin(spent_coint.name(), puzzle_hash, i)
if coin.name() not in spendable_name_set:
return False
return True
await time_out_assert(15, check_all_there, True)
await asyncio.sleep(5)
max_sent_amount = await cc_wallet.get_max_send_amount()
# 1) Generate transaction that is under the limit
under_limit_tx = None
try:
under_limit_tx = await cc_wallet.generate_signed_transaction(
[max_sent_amount - 1],
[ph],
)
except ValueError:
assert ValueError
assert under_limit_tx is not None
# 2) Generate transaction that is equal to limit
at_limit_tx = None
try:
at_limit_tx = await cc_wallet.generate_signed_transaction(
[max_sent_amount],
[ph],
)
except ValueError:
assert ValueError
assert at_limit_tx is not None
# 3) Generate transaction that is greater than limit
above_limit_tx = None
try:
above_limit_tx = await cc_wallet.generate_signed_transaction(
[max_sent_amount + 1],
[ph],
)
except ValueError:
pass
assert above_limit_tx is None
| 43.247273
| 120
| 0.701715
|
031f580fa9b928ecaa058cf3731b1f0f7a510ce7
| 18,004
|
py
|
Python
|
Software/STNeuroNet/application_driver.py
|
looooongChen/STNeuroNet
|
6537096d8b2c05858320703418e2a3e2897be928
|
[
"Apache-2.0"
] | 1
|
2019-09-10T21:28:25.000Z
|
2019-09-10T21:28:25.000Z
|
Software/STNeuroNet/application_driver.py
|
looooongChen/STNeuroNet
|
6537096d8b2c05858320703418e2a3e2897be928
|
[
"Apache-2.0"
] | null | null | null |
Software/STNeuroNet/application_driver.py
|
looooongChen/STNeuroNet
|
6537096d8b2c05858320703418e2a3e2897be928
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module defines a general procedure for running applications
Example usage:
app_driver = ApplicationDriver()
app_driver.initialise_application(system_param, input_data_param)
app_driver.run_application()
system_param and input_data_param should be generated using:
niftynet.utilities.user_parameters_parser.run()
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import tensorflow as tf
from niftynet.engine.application_factory import ApplicationFactory
from niftynet.engine.application_variables import CONSOLE
from niftynet.engine.application_variables import GradientsCollector
from niftynet.engine.application_variables import NETORK_OUTPUT
from niftynet.engine.application_variables import OutputsCollector
from niftynet.engine.application_variables import TF_SUMMARIES
from niftynet.engine.application_variables import \
global_vars_init_or_restore
from niftynet.io.misc_io import get_latest_subfolder
from niftynet.io.misc_io import touch_folder
from niftynet.layer.bn import BN_COLLECTION
from niftynet.utilities.util_common import set_cuda_device
FILE_PREFIX = 'model.ckpt'
class ApplicationDriver(object):
"""
This class initialises an application by building a TF graph,
and maintaining a session and coordinator. It controls the
starting/stopping of an application. Applications should be
implemented by inheriting niftynet.application.base_application
to be compatible with this driver.
"""
# pylint: disable=too-many-instance-attributes
def __init__(self):
self.app = None
self.graph = None
self.saver = None
self.is_training = True
self.num_threads = 0
self.num_gpus = 0
self.model_dir = None
self.summary_dir = None
self.session_prefix = None
self.max_checkpoints = 20
self.save_every_n = 10
self.tensorboard_every_n = 20
self.initial_iter = 0
self.final_iter = 0
self._coord = None
self._init_op = None
self.outputs_collector = None
self.gradients_collector = None
def initialise_application(self, workflow_param, data_param):
"""
This function receives all parameters from user config file,
create an instance of application.
:param workflow_param: a dictionary of user parameters,
keys correspond to sections in the config file
:param data_param: a dictionary of input image parameters,
keys correspond to data properties to be used by image_reader
:return:
"""
try:
system_param = workflow_param.get('SYSTEM', None)
net_param = workflow_param.get('NETWORK', None)
train_param = workflow_param.get('TRAINING', None)
infer_param = workflow_param.get('INFERENCE', None)
app_param = workflow_param.get('CUSTOM', None)
except AttributeError:
tf.logging.fatal('parameters should be dictionaries')
raise
self.is_training = (system_param.action == "train")
# hardware-related parameters
self.num_threads = max(system_param.num_threads, 1) \
if self.is_training else 1
self.num_gpus = system_param.num_gpus \
if self.is_training else min(system_param.num_gpus, 1)
set_cuda_device(system_param.cuda_devices)
# set output folders
self.model_dir = touch_folder(
os.path.join(system_param.model_dir, 'models'))
self.session_prefix = os.path.join(self.model_dir, FILE_PREFIX)
if self.is_training:
assert train_param, 'training parameters not specified'
summary_root = os.path.join(self.model_dir, 'logs')
self.summary_dir = get_latest_subfolder(
summary_root, train_param.starting_iter == 0)
# training iterations-related parameters
self.initial_iter = train_param.starting_iter
self.final_iter = train_param.max_iter
self.save_every_n = train_param.save_every_n
self.tensorboard_every_n = train_param.tensorboard_every_n
self.max_checkpoints = train_param.max_checkpoints
self.gradients_collector = GradientsCollector(
n_devices=max(self.num_gpus, 1))
action_param = train_param
else:
assert infer_param, 'inference parameters not specified'
self.initial_iter = infer_param.inference_iter
action_param = infer_param
self.outputs_collector = OutputsCollector(
n_devices=max(self.num_gpus, 1))
# create an application instance
assert app_param, 'application specific param. not specified'
app_module = ApplicationDriver._create_app(app_param.name)
self.app = app_module(net_param, action_param, self.is_training)
# initialise data input
self.app.initialise_dataset_loader(data_param, app_param)
def run_application(self):
"""
Initialise a TF graph, connect data sampler and network within
the graph context, run training loops or inference loops.
The training loop terminates when self.final_iter reached.
The inference loop terminates when there is no more
image sample to be processed from image reader.
:return:
"""
self.graph = self._create_graph()
self.app.check_initialisations()
config = ApplicationDriver._tf_config()
with tf.Session(config=config, graph=self.graph) as session:
# initialise network
tf.logging.info('starting from iter %d', self.initial_iter)
self._rand_init_or_restore_vars(session)
# start samplers' threads
tf.logging.info('Filling queues (this can take a few minutes)')
self._coord = tf.train.Coordinator()
for sampler in self.app.get_sampler():
sampler.run_threads(session, self._coord, self.num_threads)
start_time = time.time()
loop_status = {}
try:
# iteratively run the graph
if self.is_training:
loop_status['current_iter'] = self.initial_iter
self._training_loop(session, loop_status)
else:
loop_status['all_saved_flag'] = False
self._inference_loop(session, loop_status)
except KeyboardInterrupt:
tf.logging.warning('User cancelled application')
except tf.errors.OutOfRangeError:
pass
except RuntimeError:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(
exc_type, exc_value, exc_traceback, file=sys.stdout)
finally:
tf.logging.info('Cleaning up...')
if self.is_training and loop_status.get('current_iter', None):
self._save_model(session, loop_status['current_iter'])
elif loop_status.get('all_saved_flag', None):
if not loop_status['all_saved_flag']:
tf.logging.warning('stopped early, incomplete loops')
tf.logging.info('stopping sampling threads')
self.app.stop()
tf.logging.info(
"%s stopped (time in second %.2f).",
type(self.app).__name__, (time.time() - start_time))
# pylint: disable=not-context-manager
def _create_graph(self):
"""
tensorflow graph is only created within this function
"""
graph = tf.Graph()
main_device = self._device_string(0, is_worker=False)
# start constructing the graph, handling training and inference cases
with graph.as_default(), tf.device(main_device):
# initialise sampler and network, these are connected in
# the context of multiple gpus
with tf.name_scope('Sampler'):
self.app.initialise_sampler()
self.app.initialise_network()
# for data parallelism --
# defining and collecting variables from multiple devices
bn_ops = None
for gpu_id in range(0, max(self.num_gpus, 1)):
worker_device = self._device_string(gpu_id, is_worker=True)
scope_string = 'worker_{}'.format(gpu_id)
with tf.name_scope(scope_string) as scope:
with tf.device(worker_device):
# setup network for each of the multiple devices
self.app.connect_data_and_network(
self.outputs_collector,
self.gradients_collector)
if self.is_training:
# batch norm statistics from the last device
bn_ops = tf.get_collection(BN_COLLECTION, scope)
# assemble all training operations
if self.is_training and self.gradients_collector:
updates_op = []
# batch normalisation moving averages operation
if bn_ops:
updates_op.extend(bn_ops)
# combine them with model parameter updating operation
with tf.name_scope('ApplyGradients'):
with graph.control_dependencies(updates_op):
self.app.set_network_update_op(
self.gradients_collector.gradients)
# initialisation operation
with tf.name_scope('Initialization'):
self._init_op = global_vars_init_or_restore()
with tf.name_scope('MergedOutputs'):
self.outputs_collector.finalise_output_op()
# saving operation
self.saver = tf.train.Saver(max_to_keep=self.max_checkpoints)
# no more operation definitions after this point
tf.Graph.finalize(graph)
return graph
def _rand_init_or_restore_vars(self, sess):
"""
Randomly initialising all trainable variables defined in session,
or loading checkpoint files as variable initialisations
"""
if self.is_training and self.initial_iter == 0:
sess.run(self._init_op)
tf.logging.info('Parameters from random initialisations ...')
return
# check model's folder
assert os.path.exists(self.model_dir), \
"Model folder not found {}, please check" \
"config parameter: model_dir".format(self.model_dir)
# check model's file
ckpt_state = tf.train.get_checkpoint_state(self.model_dir)
if ckpt_state is None:
tf.logging.fatal(
"%s/checkpoint not found, please check"
"config parameter: model_dir", self.model_dir)
if self.initial_iter > 0:
checkpoint = '{}-{}'.format(self.session_prefix, self.initial_iter)
else:
try:
checkpoint = ckpt_state.model_checkpoint_path
# ADDED to fix the absolute path problem
INDX = checkpoint.find('model.')
checkpoint = os.path.join(self.model_dir,checkpoint[INDX:])
####################
assert checkpoint, 'checkpoint path not found ' \
'in {}/checkpoints'.format(self.model_dir)
self.initial_iter = int(checkpoint.rsplit('-')[-1])
tf.logging.info('set initial_iter to %d based '
'on checkpoints', self.initial_iter)
except (ValueError, AttributeError):
tf.logging.fatal('failed to get iteration number'
'from checkpoint path')
raise
# restore session
tf.logging.info('Accessing %s ...', checkpoint)
try:
self.saver.restore(sess, checkpoint)
except tf.errors.NotFoundError:
tf.logging.fatal(
'checkpoint %s not found or variables to restore do not '
'match the current application graph', checkpoint)
raise
def _training_loop(self, sess, loop_status):
"""
Training loop is running through the training_ops generator
defined for each application (the application can specify
training ops based on the current iteration number, this allows
for complex optimisation schedules).
At every iteration it also evaluates all variables returned by
the output_collector.
"""
writer = tf.summary.FileWriter(self.summary_dir, sess.graph)
# running through training_op from application
for (iter_i, train_op) in \
self.app.training_ops(self.initial_iter, self.final_iter):
loop_status['current_iter'] = iter_i
local_time = time.time()
if self._coord.should_stop():
break
# variables to the graph
vars_to_run = dict(train_op=train_op)
vars_to_run[CONSOLE], vars_to_run[NETORK_OUTPUT] = \
self.outputs_collector.variables(CONSOLE), \
self.outputs_collector.variables(NETORK_OUTPUT)
if self.tensorboard_every_n > 0 and \
(iter_i % self.tensorboard_every_n == 0):
# adding tensorboard summary
vars_to_run[TF_SUMMARIES] = \
self.outputs_collector.variables(collection=TF_SUMMARIES)
# run all variables in one go
graph_output = sess.run(vars_to_run)
# process graph outputs
self.app.interpret_output(graph_output[NETORK_OUTPUT])
console_str = self._console_vars_to_str(graph_output[CONSOLE])
summary = graph_output.get(TF_SUMMARIES, {})
if summary:
writer.add_summary(summary, iter_i)
# save current model
if (self.save_every_n > 0) and (iter_i % self.save_every_n == 0):
self._save_model(sess, iter_i)
tf.logging.info('iter %d, %s (%.3fs)',
iter_i, console_str, time.time() - local_time)
def _inference_loop(self, sess, loop_status):
"""
Runs all variables returned by outputs_collector,
this loop stops when the return value of
application.interpret_output is False.
"""
loop_status['all_saved_flag'] = False
while True:
local_time = time.time()
if self._coord.should_stop():
break
# build variables to run
vars_to_run = dict()
vars_to_run[NETORK_OUTPUT], vars_to_run[CONSOLE] = \
self.outputs_collector.variables(NETORK_OUTPUT), \
self.outputs_collector.variables(CONSOLE)
# evaluate the graph variables
graph_output = sess.run(vars_to_run)
# process the graph outputs
if not self.app.interpret_output(graph_output[NETORK_OUTPUT]):
tf.logging.info('processed all batches.')
loop_status['all_saved_flag'] = True
break
console_str = self._console_vars_to_str(graph_output[CONSOLE])
tf.logging.info(
'%s (%.3fs)', console_str, time.time() - local_time)
def _save_model(self, session, iter_i):
"""
save session parameters to the hard drive
"""
if iter_i <= 0:
return
self.saver.save(sess=session,
save_path=self.session_prefix,
global_step=iter_i)
tf.logging.info('iter %d saved: %s', iter_i, self.session_prefix)
def _device_string(self, device_id=0, is_worker=True):
"""
assigning CPU/GPU based on user specifications
"""
# pylint: disable=no-name-in-module
from tensorflow.python.client import device_lib
devices = device_lib.list_local_devices()
n_local_gpus = sum([x.device_type == 'GPU' for x in devices])
if self.num_gpus <= 0: # user specified no gpu at all
return '/cpu:{}'.format(device_id)
if self.is_training:
# in training: use gpu only for workers whenever n_local_gpus
device = 'gpu' if (is_worker and n_local_gpus > 0) else 'cpu'
if device == 'gpu' and device_id >= n_local_gpus:
tf.logging.fatal(
'trying to use gpu id %s, but only has %s GPU(s), '
'please set num_gpus to %s at most',
device_id, n_local_gpus, n_local_gpus)
raise ValueError
return '/{}:{}'.format(device, device_id)
# in inference: use gpu for everything whenever n_local_gpus
return '/gpu:0' if n_local_gpus > 0 else '/cpu:0'
@staticmethod
def _console_vars_to_str(console_dict):
"""
Printing values of variable evaluations to command line output
"""
if not console_dict:
return ''
console_str = ', '.join(
'{}={}'.format(key, val) for (key, val) in console_dict.items())
return console_str
@staticmethod
def _create_app(app_type_string):
"""
Import the application module
"""
return ApplicationFactory.create(app_type_string)
@staticmethod
def _tf_config():
"""
tensorflow system configurations
"""
config = tf.ConfigProto()
config.log_device_placement = False
config.allow_soft_placement = True
return config
| 41.293578
| 79
| 0.612142
|
203cba017e9464fdeee0e4647c552ae7f9d28d68
| 1,477
|
py
|
Python
|
redisext/queue.py
|
mylokin/redisext
|
b49c2699183d795ec462f0288bb3fb7110936a89
|
[
"MIT"
] | null | null | null |
redisext/queue.py
|
mylokin/redisext
|
b49c2699183d795ec462f0288bb3fb7110936a89
|
[
"MIT"
] | 5
|
2015-02-15T11:03:29.000Z
|
2018-03-05T19:11:20.000Z
|
redisext/queue.py
|
mylokin/redisext
|
b49c2699183d795ec462f0288bb3fb7110936a89
|
[
"MIT"
] | 2
|
2018-03-05T19:06:34.000Z
|
2020-07-01T09:18:40.000Z
|
from __future__ import absolute_import
import redisext.models.abc
class Queue(redisext.models.abc.Model):
def pop(self):
'''
Pop item from queue.
:returns: item from queue
'''
item = self.connect_to_master().rpop(self.key)
return self.decode(item)
def push(self, item):
'''
Push item into queue.
:param item:
:type item:
:returns: number of items in queue
:rtype: int
'''
item = self.encode(item)
return self.connect_to_master().lpush(self.key, item)
def size(self):
'''
Get queue size.
:returns: number of items in queue
:rtype: int
'''
return self.connect_to_master().llen(self.key)
def items(self):
'''
Get all queue items.
:returns: ordered list of items
:rtype: list
'''
items = self.connect_to_slave().lrange(self.key, 0, -1)
return [self.decode(i) for i in items]
class PriorityQueue(redisext.models.abc.Model):
def pop(self):
redis = self.connect_to_master()
item = redis.zrangebyscore(self.key, '-inf', '+inf', num=1)
item = item[0] if item else None
redis.zrem(self.key, item)
return self.decode(item)
def push(self, item, priority):
item = self.encode(item)
data = {item: int(priority)}
return self.connect_to_master().zadd(self.key, data)
| 24.213115
| 67
| 0.570752
|
7a4d36333dd35763c1d629a72391c50412b63ebb
| 18,648
|
py
|
Python
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/tests/test_half.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 145
|
2017-01-19T23:33:03.000Z
|
2021-06-05T05:34:55.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/tests/test_half.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 17
|
2017-02-03T20:51:39.000Z
|
2020-05-21T11:33:52.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/tests/test_half.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 44
|
2017-02-04T19:40:03.000Z
|
2020-10-01T19:24:19.000Z
|
from __future__ import division, absolute_import, print_function
import platform
import numpy as np
from numpy import uint16, float16, float32, float64
from numpy.testing import TestCase, run_module_suite, assert_, assert_equal, \
dec
def assert_raises_fpe(strmatch, callable, *args, **kwargs):
try:
callable(*args, **kwargs)
except FloatingPointError as exc:
assert_(str(exc).find(strmatch) >= 0,
"Did not raise floating point %s error" % strmatch)
else:
assert_(False,
"Did not raise floating point %s error" % strmatch)
class TestHalf(TestCase):
def setUp(self):
# An array of all possible float16 values
self.all_f16 = np.arange(0x10000, dtype=uint16)
self.all_f16.dtype = float16
self.all_f32 = np.array(self.all_f16, dtype=float32)
self.all_f64 = np.array(self.all_f16, dtype=float64)
# An array of all non-NaN float16 values, in sorted order
self.nonan_f16 = np.concatenate(
(np.arange(0xfc00, 0x7fff, -1, dtype=uint16),
np.arange(0x0000, 0x7c01, 1, dtype=uint16)))
self.nonan_f16.dtype = float16
self.nonan_f32 = np.array(self.nonan_f16, dtype=float32)
self.nonan_f64 = np.array(self.nonan_f16, dtype=float64)
# An array of all finite float16 values, in sorted order
self.finite_f16 = self.nonan_f16[1:-1]
self.finite_f32 = self.nonan_f32[1:-1]
self.finite_f64 = self.nonan_f64[1:-1]
def test_half_conversions(self):
"""Checks that all 16-bit values survive conversion
to/from 32-bit and 64-bit float"""
# Because the underlying routines preserve the NaN bits, every
# value is preserved when converting to/from other floats.
# Convert from float32 back to float16
b = np.array(self.all_f32, dtype=float16)
assert_equal(self.all_f16.view(dtype=uint16),
b.view(dtype=uint16))
# Convert from float64 back to float16
b = np.array(self.all_f64, dtype=float16)
assert_equal(self.all_f16.view(dtype=uint16),
b.view(dtype=uint16))
# Convert float16 to longdouble and back
# This doesn't necessarily preserve the extra NaN bits,
# so exclude NaNs.
a_ld = np.array(self.nonan_f16, dtype=np.longdouble)
b = np.array(a_ld, dtype=float16)
assert_equal(self.nonan_f16.view(dtype=uint16),
b.view(dtype=uint16))
# Check the range for which all integers can be represented
i_int = np.arange(-2048, 2049)
i_f16 = np.array(i_int, dtype=float16)
j = np.array(i_f16, dtype=np.int)
assert_equal(i_int, j)
def test_nans_infs(self):
with np.errstate(all='ignore'):
# Check some of the ufuncs
assert_equal(np.isnan(self.all_f16), np.isnan(self.all_f32))
assert_equal(np.isinf(self.all_f16), np.isinf(self.all_f32))
assert_equal(np.isfinite(self.all_f16), np.isfinite(self.all_f32))
assert_equal(np.signbit(self.all_f16), np.signbit(self.all_f32))
assert_equal(np.spacing(float16(65504)), np.inf)
# Check comparisons of all values with NaN
nan = float16(np.nan)
assert_(not (self.all_f16 == nan).any())
assert_(not (nan == self.all_f16).any())
assert_((self.all_f16 != nan).all())
assert_((nan != self.all_f16).all())
assert_(not (self.all_f16 < nan).any())
assert_(not (nan < self.all_f16).any())
assert_(not (self.all_f16 <= nan).any())
assert_(not (nan <= self.all_f16).any())
assert_(not (self.all_f16 > nan).any())
assert_(not (nan > self.all_f16).any())
assert_(not (self.all_f16 >= nan).any())
assert_(not (nan >= self.all_f16).any())
def test_half_values(self):
"""Confirms a small number of known half values"""
a = np.array([1.0, -1.0,
2.0, -2.0,
0.0999755859375, 0.333251953125, # 1/10, 1/3
65504, -65504, # Maximum magnitude
2.0**(-14), -2.0**(-14), # Minimum normal
2.0**(-24), -2.0**(-24), # Minimum subnormal
0, -1/1e1000, # Signed zeros
np.inf, -np.inf])
b = np.array([0x3c00, 0xbc00,
0x4000, 0xc000,
0x2e66, 0x3555,
0x7bff, 0xfbff,
0x0400, 0x8400,
0x0001, 0x8001,
0x0000, 0x8000,
0x7c00, 0xfc00], dtype=uint16)
b.dtype = float16
assert_equal(a, b)
def test_half_rounding(self):
"""Checks that rounding when converting to half is correct"""
a = np.array([2.0**-25 + 2.0**-35, # Rounds to minimum subnormal
2.0**-25, # Underflows to zero (nearest even mode)
2.0**-26, # Underflows to zero
1.0+2.0**-11 + 2.0**-16, # rounds to 1.0+2**(-10)
1.0+2.0**-11, # rounds to 1.0 (nearest even mode)
1.0+2.0**-12, # rounds to 1.0
65519, # rounds to 65504
65520], # rounds to inf
dtype=float64)
rounded = [2.0**-24,
0.0,
0.0,
1.0+2.0**(-10),
1.0,
1.0,
65504,
np.inf]
# Check float64->float16 rounding
b = np.array(a, dtype=float16)
assert_equal(b, rounded)
# Check float32->float16 rounding
a = np.array(a, dtype=float32)
b = np.array(a, dtype=float16)
assert_equal(b, rounded)
def test_half_correctness(self):
"""Take every finite float16, and check the casting functions with
a manual conversion."""
# Create an array of all finite float16s
a_bits = self.finite_f16.view(dtype=uint16)
# Convert to 64-bit float manually
a_sgn = (-1.0)**((a_bits & 0x8000) >> 15)
a_exp = np.array((a_bits & 0x7c00) >> 10, dtype=np.int32) - 15
a_man = (a_bits & 0x03ff) * 2.0**(-10)
# Implicit bit of normalized floats
a_man[a_exp != -15] += 1
# Denormalized exponent is -14
a_exp[a_exp == -15] = -14
a_manual = a_sgn * a_man * 2.0**a_exp
a32_fail = np.nonzero(self.finite_f32 != a_manual)[0]
if len(a32_fail) != 0:
bad_index = a32_fail[0]
assert_equal(self.finite_f32, a_manual,
"First non-equal is half value %x -> %g != %g" %
(self.finite_f16[bad_index],
self.finite_f32[bad_index],
a_manual[bad_index]))
a64_fail = np.nonzero(self.finite_f64 != a_manual)[0]
if len(a64_fail) != 0:
bad_index = a64_fail[0]
assert_equal(self.finite_f64, a_manual,
"First non-equal is half value %x -> %g != %g" %
(self.finite_f16[bad_index],
self.finite_f64[bad_index],
a_manual[bad_index]))
def test_half_ordering(self):
"""Make sure comparisons are working right"""
# All non-NaN float16 values in reverse order
a = self.nonan_f16[::-1].copy()
# 32-bit float copy
b = np.array(a, dtype=float32)
# Should sort the same
a.sort()
b.sort()
assert_equal(a, b)
# Comparisons should work
assert_((a[:-1] <= a[1:]).all())
assert_(not (a[:-1] > a[1:]).any())
assert_((a[1:] >= a[:-1]).all())
assert_(not (a[1:] < a[:-1]).any())
# All != except for +/-0
assert_equal(np.nonzero(a[:-1] < a[1:])[0].size, a.size-2)
assert_equal(np.nonzero(a[1:] > a[:-1])[0].size, a.size-2)
def test_half_funcs(self):
"""Test the various ArrFuncs"""
# fill
assert_equal(np.arange(10, dtype=float16),
np.arange(10, dtype=float32))
# fillwithscalar
a = np.zeros((5,), dtype=float16)
a.fill(1)
assert_equal(a, np.ones((5,), dtype=float16))
# nonzero and copyswap
a = np.array([0, 0, -1, -1/1e20, 0, 2.0**-24, 7.629e-6], dtype=float16)
assert_equal(a.nonzero()[0],
[2, 5, 6])
a = a.byteswap().newbyteorder()
assert_equal(a.nonzero()[0],
[2, 5, 6])
# dot
a = np.arange(0, 10, 0.5, dtype=float16)
b = np.ones((20,), dtype=float16)
assert_equal(np.dot(a, b),
95)
# argmax
a = np.array([0, -np.inf, -2, 0.5, 12.55, 7.3, 2.1, 12.4], dtype=float16)
assert_equal(a.argmax(),
4)
a = np.array([0, -np.inf, -2, np.inf, 12.55, np.nan, 2.1, 12.4], dtype=float16)
assert_equal(a.argmax(),
5)
# getitem
a = np.arange(10, dtype=float16)
for i in range(10):
assert_equal(a.item(i), i)
def test_spacing_nextafter(self):
"""Test np.spacing and np.nextafter"""
# All non-negative finite #'s
a = np.arange(0x7c00, dtype=uint16)
hinf = np.array((np.inf,), dtype=float16)
a_f16 = a.view(dtype=float16)
assert_equal(np.spacing(a_f16[:-1]), a_f16[1:]-a_f16[:-1])
assert_equal(np.nextafter(a_f16[:-1], hinf), a_f16[1:])
assert_equal(np.nextafter(a_f16[0], -hinf), -a_f16[1])
assert_equal(np.nextafter(a_f16[1:], -hinf), a_f16[:-1])
# switch to negatives
a |= 0x8000
assert_equal(np.spacing(a_f16[0]), np.spacing(a_f16[1]))
assert_equal(np.spacing(a_f16[1:]), a_f16[:-1]-a_f16[1:])
assert_equal(np.nextafter(a_f16[0], hinf), -a_f16[1])
assert_equal(np.nextafter(a_f16[1:], hinf), a_f16[:-1])
assert_equal(np.nextafter(a_f16[:-1], -hinf), a_f16[1:])
def test_half_ufuncs(self):
"""Test the various ufuncs"""
a = np.array([0, 1, 2, 4, 2], dtype=float16)
b = np.array([-2, 5, 1, 4, 3], dtype=float16)
c = np.array([0, -1, -np.inf, np.nan, 6], dtype=float16)
assert_equal(np.add(a, b), [-2, 6, 3, 8, 5])
assert_equal(np.subtract(a, b), [2, -4, 1, 0, -1])
assert_equal(np.multiply(a, b), [0, 5, 2, 16, 6])
assert_equal(np.divide(a, b), [0, 0.199951171875, 2, 1, 0.66650390625])
assert_equal(np.equal(a, b), [False, False, False, True, False])
assert_equal(np.not_equal(a, b), [True, True, True, False, True])
assert_equal(np.less(a, b), [False, True, False, False, True])
assert_equal(np.less_equal(a, b), [False, True, False, True, True])
assert_equal(np.greater(a, b), [True, False, True, False, False])
assert_equal(np.greater_equal(a, b), [True, False, True, True, False])
assert_equal(np.logical_and(a, b), [False, True, True, True, True])
assert_equal(np.logical_or(a, b), [True, True, True, True, True])
assert_equal(np.logical_xor(a, b), [True, False, False, False, False])
assert_equal(np.logical_not(a), [True, False, False, False, False])
assert_equal(np.isnan(c), [False, False, False, True, False])
assert_equal(np.isinf(c), [False, False, True, False, False])
assert_equal(np.isfinite(c), [True, True, False, False, True])
assert_equal(np.signbit(b), [True, False, False, False, False])
assert_equal(np.copysign(b, a), [2, 5, 1, 4, 3])
assert_equal(np.maximum(a, b), [0, 5, 2, 4, 3])
x = np.maximum(b, c)
assert_(np.isnan(x[3]))
x[3] = 0
assert_equal(x, [0, 5, 1, 0, 6])
assert_equal(np.minimum(a, b), [-2, 1, 1, 4, 2])
x = np.minimum(b, c)
assert_(np.isnan(x[3]))
x[3] = 0
assert_equal(x, [-2, -1, -np.inf, 0, 3])
assert_equal(np.fmax(a, b), [0, 5, 2, 4, 3])
assert_equal(np.fmax(b, c), [0, 5, 1, 4, 6])
assert_equal(np.fmin(a, b), [-2, 1, 1, 4, 2])
assert_equal(np.fmin(b, c), [-2, -1, -np.inf, 4, 3])
assert_equal(np.floor_divide(a, b), [0, 0, 2, 1, 0])
assert_equal(np.remainder(a, b), [0, 1, 0, 0, 2])
assert_equal(np.divmod(a, b), ([0, 0, 2, 1, 0], [0, 1, 0, 0, 2]))
assert_equal(np.square(b), [4, 25, 1, 16, 9])
assert_equal(np.reciprocal(b), [-0.5, 0.199951171875, 1, 0.25, 0.333251953125])
assert_equal(np.ones_like(b), [1, 1, 1, 1, 1])
assert_equal(np.conjugate(b), b)
assert_equal(np.absolute(b), [2, 5, 1, 4, 3])
assert_equal(np.negative(b), [2, -5, -1, -4, -3])
assert_equal(np.positive(b), b)
assert_equal(np.sign(b), [-1, 1, 1, 1, 1])
assert_equal(np.modf(b), ([0, 0, 0, 0, 0], b))
assert_equal(np.frexp(b), ([-0.5, 0.625, 0.5, 0.5, 0.75], [2, 3, 1, 3, 2]))
assert_equal(np.ldexp(b, [0, 1, 2, 4, 2]), [-2, 10, 4, 64, 12])
def test_half_coercion(self):
"""Test that half gets coerced properly with the other types"""
a16 = np.array((1,), dtype=float16)
a32 = np.array((1,), dtype=float32)
b16 = float16(1)
b32 = float32(1)
assert_equal(np.power(a16, 2).dtype, float16)
assert_equal(np.power(a16, 2.0).dtype, float16)
assert_equal(np.power(a16, b16).dtype, float16)
assert_equal(np.power(a16, b32).dtype, float16)
assert_equal(np.power(a16, a16).dtype, float16)
assert_equal(np.power(a16, a32).dtype, float32)
assert_equal(np.power(b16, 2).dtype, float64)
assert_equal(np.power(b16, 2.0).dtype, float64)
assert_equal(np.power(b16, b16).dtype, float16)
assert_equal(np.power(b16, b32).dtype, float32)
assert_equal(np.power(b16, a16).dtype, float16)
assert_equal(np.power(b16, a32).dtype, float32)
assert_equal(np.power(a32, a16).dtype, float32)
assert_equal(np.power(a32, b16).dtype, float32)
assert_equal(np.power(b32, a16).dtype, float16)
assert_equal(np.power(b32, b16).dtype, float32)
@dec.skipif(platform.machine() == "armv5tel", "See gh-413.")
def test_half_fpe(self):
with np.errstate(all='raise'):
sx16 = np.array((1e-4,), dtype=float16)
bx16 = np.array((1e4,), dtype=float16)
sy16 = float16(1e-4)
by16 = float16(1e4)
# Underflow errors
assert_raises_fpe('underflow', lambda a, b:a*b, sx16, sx16)
assert_raises_fpe('underflow', lambda a, b:a*b, sx16, sy16)
assert_raises_fpe('underflow', lambda a, b:a*b, sy16, sx16)
assert_raises_fpe('underflow', lambda a, b:a*b, sy16, sy16)
assert_raises_fpe('underflow', lambda a, b:a/b, sx16, bx16)
assert_raises_fpe('underflow', lambda a, b:a/b, sx16, by16)
assert_raises_fpe('underflow', lambda a, b:a/b, sy16, bx16)
assert_raises_fpe('underflow', lambda a, b:a/b, sy16, by16)
assert_raises_fpe('underflow', lambda a, b:a/b,
float16(2.**-14), float16(2**11))
assert_raises_fpe('underflow', lambda a, b:a/b,
float16(-2.**-14), float16(2**11))
assert_raises_fpe('underflow', lambda a, b:a/b,
float16(2.**-14+2**-24), float16(2))
assert_raises_fpe('underflow', lambda a, b:a/b,
float16(-2.**-14-2**-24), float16(2))
assert_raises_fpe('underflow', lambda a, b:a/b,
float16(2.**-14+2**-23), float16(4))
# Overflow errors
assert_raises_fpe('overflow', lambda a, b:a*b, bx16, bx16)
assert_raises_fpe('overflow', lambda a, b:a*b, bx16, by16)
assert_raises_fpe('overflow', lambda a, b:a*b, by16, bx16)
assert_raises_fpe('overflow', lambda a, b:a*b, by16, by16)
assert_raises_fpe('overflow', lambda a, b:a/b, bx16, sx16)
assert_raises_fpe('overflow', lambda a, b:a/b, bx16, sy16)
assert_raises_fpe('overflow', lambda a, b:a/b, by16, sx16)
assert_raises_fpe('overflow', lambda a, b:a/b, by16, sy16)
assert_raises_fpe('overflow', lambda a, b:a+b,
float16(65504), float16(17))
assert_raises_fpe('overflow', lambda a, b:a-b,
float16(-65504), float16(17))
assert_raises_fpe('overflow', np.nextafter, float16(65504), float16(np.inf))
assert_raises_fpe('overflow', np.nextafter, float16(-65504), float16(-np.inf))
assert_raises_fpe('overflow', np.spacing, float16(65504))
# Invalid value errors
assert_raises_fpe('invalid', np.divide, float16(np.inf), float16(np.inf))
assert_raises_fpe('invalid', np.spacing, float16(np.inf))
assert_raises_fpe('invalid', np.spacing, float16(np.nan))
assert_raises_fpe('invalid', np.nextafter, float16(np.inf), float16(0))
assert_raises_fpe('invalid', np.nextafter, float16(-np.inf), float16(0))
assert_raises_fpe('invalid', np.nextafter, float16(0), float16(np.nan))
# These should not raise
float16(65472)+float16(32)
float16(2**-13)/float16(2)
float16(2**-14)/float16(2**10)
np.spacing(float16(-65504))
np.nextafter(float16(65504), float16(-np.inf))
np.nextafter(float16(-65504), float16(np.inf))
float16(2**-14)/float16(2**10)
float16(-2**-14)/float16(2**10)
float16(2**-14+2**-23)/float16(2)
float16(-2**-14-2**-23)/float16(2)
def test_half_array_interface(self):
"""Test that half is compatible with __array_interface__"""
class Dummy:
pass
a = np.ones((1,), dtype=float16)
b = Dummy()
b.__array_interface__ = a.__array_interface__
c = np.array(b)
assert_(c.dtype == float16)
assert_equal(a, c)
if __name__ == "__main__":
run_module_suite()
| 42.47836
| 90
| 0.5429
|
824032d999f4f646f18c1336ba8d22774a338d9c
| 2,654
|
py
|
Python
|
pycharm2020.1.3/script/server_entity/LoadReporter.py
|
LaudateCorpus1/realtime-server
|
25cb1d92dff3eb820e7a38d427538c0397199675
|
[
"MIT"
] | 465
|
2018-06-21T02:50:56.000Z
|
2022-03-27T11:51:46.000Z
|
pycharm2020.1.3/script/server_entity/LoadReporter.py
|
Dango1992/realtime-server
|
25cb1d92dff3eb820e7a38d427538c0397199675
|
[
"MIT"
] | 8
|
2018-08-09T09:25:00.000Z
|
2022-03-10T14:54:51.000Z
|
pycharm2020.1.3/script/server_entity/LoadReporter.py
|
Dango1992/realtime-server
|
25cb1d92dff3eb820e7a38d427538c0397199675
|
[
"MIT"
] | 113
|
2018-06-25T01:42:20.000Z
|
2022-03-23T11:27:56.000Z
|
import random
from RpcHandler import rpc_func
from common import service_const, gv
# from common.service_const import ETCD_TAG_DISPATCHER_SERVICE
from core.util import UtilApi
from core.util.UtilApi import Singleton
from core.util.performance.cpu_load_handler import AvgCpuLoad
from server_entity.ServerEntity import ServerEntity
# LOAD_REPORT_INTERVAL = 0.01 # todo modify to 8
# LOAD_REPORT_INTERVAL = 0.04 # todo modify to 8
LOAD_REPORT_INTERVAL = 6
class LoadReporter(ServerEntity):
def __init__(self, load_collector_etcd_tag):
super().__init__()
self._load_collector_etcd_tag = load_collector_etcd_tag
self._avg_load = AvgCpuLoad()
self.timer_hub.call_later(
0, self.report_load, repeat_count=-1, repeat_interval_sec=LOAD_REPORT_INTERVAL)
# self.timer_hub.call_later(LOAD_REPORT_INTERVAL, self.report_load, repeat_count=8) # TODO: del
def report_load(self):
try:
if gv.etcd_service_node is None:
return
dispatcher_service_addr = UtilApi.get_lowest_load_service_info(self._load_collector_etcd_tag)
# if self._rpc_handler._conn:
# self.logger.info(f"{self._rpc_handler._conn.get_addr()=}")
if dispatcher_service_addr: # todo: 每次都有新ip, 但是还是用self.rpc_handler还是用老conn
# self.logger.debug(f"_etcd_tag: {gv.etcd_tag} server_name: {gv.server_name}")
self.call_remote_method(
"report_load",
[gv.etcd_tag, gv.server_name, gv.local_ip, gv.local_port,
self._avg_load.get_avg_cpu_by_period(10)],
rpc_remote_entity_type="LoadCollector", ip_port_tuple=dispatcher_service_addr)
# self.logger.info(f"report_server_load: {self._avg_load.get_avg_cpu_by_period(10)}")
# print(f"report_server_load: {self._avg_load.get_avg_cpu_by_period(10)}") # TODO: DEL
else:
self.logger.error("can not find dispatcher_service_addr")
except:
self.logger.log_last_except()
# todo: del
@rpc_func
def report_load_pingpong_test(self):
self.call_remote_method(
"pick_lowest_load_service_addr",
[gv.etcd_tag],
# rpc_remote_entity_type="LoadCollector", ip_port_tuple=dispatcher_service_addr
# rpc_callback=lambda err, res: self.logger.info(f"pick_lowest_load_service_addr: {err=} {res=}"),
rpc_callback=lambda err, res: print(f"pick_lowest_load_service_addr: {err=} {res=}"),
rpc_remote_entity_type="LoadCollector")
self.report_load()
| 43.508197
| 110
| 0.680482
|
8b1fc7beb00f3030d5763cd013104bd93ef5fc4d
| 716
|
py
|
Python
|
ex099.py
|
nascimentobrenda24/PythonExercises
|
2055f42a0454ae25cba6a6457c85822eaad2df01
|
[
"MIT"
] | 1
|
2021-11-23T21:41:25.000Z
|
2021-11-23T21:41:25.000Z
|
ex099.py
|
nascimentobrenda24/PythonExercises
|
2055f42a0454ae25cba6a6457c85822eaad2df01
|
[
"MIT"
] | null | null | null |
ex099.py
|
nascimentobrenda24/PythonExercises
|
2055f42a0454ae25cba6a6457c85822eaad2df01
|
[
"MIT"
] | null | null | null |
# Faça um programa que tenha uma função chamada maior(), que receba vários parâmetros com valores inteiros.
# Seu programa tem que analisar todos os valores e dizer qual deles é o maior.
from time import sleep
def maior(*num):
cont = bigger = 0
print('=-' * 30)
print('Analisando os valores passados...')
for value in num:
print(value, end=' ')
sleep(0.3)
if cont == 0:
bigger = value
else:
if value > bigger:
bigger = value
print(f'\nForam informados {cont} valores no total')
print(f'E o maior deles digitado foi {bigger}')
# Principal Program
maior(2, 3, 6, 4, 5)
maior(10, 1, 0)
maior(1, 5)
maior(8)
maior()
| 22.375
| 107
| 0.606145
|
103c310c77dd7142644cfbf0ad38e18eb52a189f
| 22,448
|
py
|
Python
|
phi/math/backend/_profile.py
|
eliasdjo/PhiFlow
|
dc88dca696d25a5ea5793aa48fae390469f0d829
|
[
"MIT"
] | 556
|
2019-12-04T16:48:54.000Z
|
2022-03-31T16:31:59.000Z
|
phi/math/backend/_profile.py
|
eliasdjo/PhiFlow
|
dc88dca696d25a5ea5793aa48fae390469f0d829
|
[
"MIT"
] | 26
|
2019-12-12T16:54:06.000Z
|
2022-03-14T19:44:36.000Z
|
phi/math/backend/_profile.py
|
eliasdjo/PhiFlow
|
dc88dca696d25a5ea5793aa48fae390469f0d829
|
[
"MIT"
] | 93
|
2019-12-08T14:38:27.000Z
|
2022-03-29T16:38:37.000Z
|
import inspect
import json
from contextlib import contextmanager
from time import perf_counter
from typing import Optional, Callable
from ._backend import Backend, BACKENDS, _DEFAULT
class BackendCall:
def __init__(self, start: float, stop: float, backend: 'ProfilingBackend', function_name):
self._start = start
self._stop = stop
self._backend = backend
self._function_name = function_name
self._args = {"Backend": backend.name}
def __repr__(self):
return f"{1000 * self._duration:.2f} ms {self._function_name}"
def print(self, include_parents, depth, min_duration, code_col, code_len):
if self._duration >= min_duration:
print(f"{' ' * depth}{1000 * self._duration:.2f} ms {self._backend}.{self._function_name}")
@property
def _name(self):
return repr(self)
@property
def _duration(self):
return self._stop - self._start
def trace_json_events(self, include_parents) -> list:
backend_index = self._backend._index
name = self._function_name
return [
{
'name': name,
'ph': 'X',
'pid': 1,
'tid': backend_index+1,
'ts': int(round(self._start * 1000000)),
'dur': int(round((self._stop - self._start) * 1000000)),
'args': self._args
}
]
def call_count(self) -> int:
return 1
def add_arg(self, key, value):
assert key not in self._args
self._args[key] = value
class ExtCall:
""" Function invocation that is not a Backend method but internally calls Backend methods. """
def __init__(self,
parent: 'ExtCall' or None,
name: str,
level: int,
function: str,
code_context: list or None,
file_name: str,
line_number: int):
"""
Args:
parent: Parent call.
name: Name of this call, see `ExtCall.determine_name()`.
level: Number of parent stack items including this one.
"""
self._parent = parent
if parent is None:
self._parents = ()
else:
self._parents = parent._parents + (parent,)
self._children = [] # BackendCalls and ExtCalls
self._converted = False
self._name = name
self._level = level
self._function = function
self._code_context = code_context
self._file_name = file_name
self._line_number = line_number
def common_call(self, stack: list):
""" Returns the deepest ExtCall in the hierarchy of this call that contains `stack`. """
if self._parent is None:
return self
if len(stack) < self._level:
return self._parent.common_call(stack)
for i in range(self._level - 1):
if self._parents[i+1]._function != stack[-1-i].function:
return self._parents[i]
return self
def add(self, child):
self._children.append(child)
@staticmethod
def determine_name(info):
fun = info.function
if 'self' in info.frame.f_locals:
if fun == '__init__':
return f"{type(info.frame.f_locals['self']).__name__}()"
return f"{type(info.frame.f_locals['self']).__name__}.{fun}"
if 'phi/math' in info.filename or 'phi\\math' in info.filename:
return f"math.{fun}"
else:
return fun
@property
def _start(self):
return self._children[0]._start
@property
def _stop(self):
return self._children[-1]._stop
@property
def _duration(self):
return sum(c._duration for c in self._children)
def call_count(self) -> int:
return sum(child.call_count() for child in self._children)
def __repr__(self):
if not self._converted:
if self._parent is None:
return "/"
return f"{self._name} ({self._level})"
else:
context = self._code_context
return f"sum {1000 * self._duration:.2f} ms {context}"
def __len__(self):
return len(self._children)
def _empty_parent_count(self):
for i, parent in enumerate(reversed(self._parents)):
if len(parent._children) > 1:
return i
return len(self._parents)
def _eff_parent_count(self):
return len([p for p in self._parents if len(p._children) > 1])
def _closest_non_trivial_parent(self):
parent = self._parent
while parent._parent is not None:
if len(parent._children) > 1:
return parent
parent = parent._parent
return parent
def _calling_code(self, backtrack=0):
if self._level > backtrack + 1:
call: ExtCall = self._parents[-backtrack-1]
return call._code_context[0].strip(), call._file_name, call._function, call._line_number
else:
return "", "", "", -1
def print(self, include_parents=(), depth=0, min_duration=0., code_col=80, code_len=50):
if self._duration < min_duration:
return
if len(self._children) == 1 and isinstance(self._children[0], ExtCall):
self._children[0].print(include_parents + ((self,) if self._parent is not None else ()), depth, min_duration, code_col, code_len)
else:
funcs = [par._name for par in include_parents] + [self._name]
text = f"{'. ' * depth}-> {' -> '.join(funcs)} ({1000 * self._duration:.2f} ms)"
if self._level > len(include_parents)+1:
code = self._calling_code(backtrack=len(include_parents))[0]
if len(code) > code_len:
code = code[:code_len-3] + "..."
text += " " + "." * max(0, (code_col - len(text))) + " > " + code
print(text)
for child in self._children:
child.print((), depth + 1, min_duration, code_col, code_len)
def children_to_properties(self) -> dict:
result = {}
for child in self._children:
name = f"{len(result)} {child._name}" if len(self._children) <= 10 else f"{len(result):02d} {child._name}"
while isinstance(child, ExtCall) and len(child) == 1:
child = child._children[0]
name += " -> " + child._name
result[name] = child
if isinstance(child, ExtCall):
child.children_to_properties()
# finalize
for name, child in result.items():
setattr(self, name, child)
self._converted = True
return result
def trace_json_events(self, include_parents=()) -> list:
if len(self._children) == 1:
return self._children[0].trace_json_events(include_parents + (self,))
else:
name = ' -> '.join([par._name for par in include_parents] + [self._name])
eff_parent_count = self._eff_parent_count()
calling_code, calling_filename, calling_function, lineno = self._calling_code(backtrack=self._empty_parent_count())
result = [
{
'name': name,
'ph': "X", # complete event
'pid': 0,
'tid': eff_parent_count,
'ts': int(self._start * 1000000),
'dur': int((self._stop - self._start) * 1000000),
'args': {
"Calling code snippet": calling_code,
"Called by": f"{calling_function}() in {calling_filename}, line {lineno}",
"Active time (backend calls)": f"{self._duration * 1000:.2f} ms ({round(100 * self._duration / self._closest_non_trivial_parent()._duration):.0f}% of parent, {100 * self._duration / (self._stop - self._start):.1f}% efficiency)",
"Backend calls": f"{self.call_count()} ({round(100 * self.call_count() / self._closest_non_trivial_parent().call_count()):.0f}% of parent)"
}
}
]
for child in self._children:
result.extend(child.trace_json_events(()))
return result
class Profile:
"""
Stores information about calls to backends and their timing.
Profile may be created through `profile()` or `profile_function()`.
Profiles can be printed or saved to disc.
"""
def __init__(self, trace: bool, backends: tuple or list, subtract_trace_time: bool):
self._start = perf_counter()
self._stop = None
self._root = ExtCall(None, "", 0, "", "", "", -1)
self._last_ext_call = self._root
self._messages = []
self._trace = trace
self._backend_calls = []
self._retime_index = -1
self._accumulating = False
self._backends = backends
self._subtract_trace_time = subtract_trace_time
self._total_trace_time = 0
def _add_call(self, backend_call: BackendCall, args: tuple, kwargs: dict, result):
if self._retime_index >= 0:
prev_call = self._backend_calls[self._retime_index]
assert prev_call._function_name == backend_call._function_name
if self._accumulating:
prev_call._start += backend_call._start
prev_call._stop += backend_call._stop
else:
prev_call._start = backend_call._start
prev_call._stop = backend_call._stop
self._retime_index = (self._retime_index + 1) % len(self._backend_calls)
else:
self._backend_calls.append(backend_call)
args = {i: arg for i, arg in enumerate(args)}
args.update(kwargs)
backend_call.add_arg("Inputs", _format_values(args, backend_call._backend))
if isinstance(result, (tuple, list)):
backend_call.add_arg("Outputs", _format_values({i: res for i, res in enumerate(result)}, backend_call._backend))
else:
backend_call.add_arg("Outputs", _format_values({0: result}, backend_call._backend))
if self._trace:
stack = inspect.stack()[2:]
call = self._last_ext_call.common_call(stack)
for i in range(call._level, len(stack)):
stack_frame = stack[len(stack) - i - 1]
name = ExtCall.determine_name(stack_frame) # if len(stack) - i > 1 else ""
sub_call = ExtCall(call, name, i + 1, stack_frame.function, stack_frame.code_context, stack_frame.filename, stack_frame.lineno)
call.add(sub_call)
call = sub_call
call.add(backend_call)
self._last_ext_call = call
if self._subtract_trace_time:
delta_trace_time = perf_counter() - backend_call._stop
backend_call._start -= self._total_trace_time
backend_call._stop -= self._total_trace_time
self._total_trace_time += delta_trace_time
def _finish(self):
self._stop = perf_counter()
self._children_to_properties()
@property
def duration(self) -> float:
""" Total time passed from creation of the profile to the end of the last operation. """
return self._stop - self._start if self._stop is not None else None
def print(self, min_duration=1e-3, code_col=80, code_len=50):
"""
Prints this profile to the console.
Args:
min_duration: Hides elements with less time spent on backend calls than `min_duration` (seconds)
code_col: Formatting option for where the context code is printed.
code_len: Formatting option for cropping the context code
"""
print(f"Profile: {self.duration:.4f} seconds total. Skipping elements shorter than {1000 * min_duration:.2f} ms")
if self._messages:
print("External profiling:")
for message in self._messages:
print(f" {message}")
print()
self._root.print(min_duration=min_duration, code_col=code_col, code_len=code_len)
def save(self, json_file: str):
"""
Saves this profile to disc using the *trace event format* described at
https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/edit
This file can be viewed with external applications such as Google chrome.
Args:
json_file: filename
"""
data = [
{'name': "process_name", 'ph': 'M', 'pid': 0, 'tid': 0, "args": {"name": "0 Python calls"}},
{'name': "process_name", 'ph': 'M', 'pid': 1, 'tid': 1, "args": {"name": "1 Operations"}},
] + [
{'name': "thread_name", 'ph': 'M', 'pid': 1, 'tid': i + 1, "args": {"name": backend.name}}
for i, backend in enumerate(self._backends)
]
if self._trace:
if len(self._root._children) > 0:
data.extend(self._root.trace_json_events())
else:
data.extend(sum([call.trace_json_events(()) for call in self._backend_calls], []))
with open(json_file, 'w') as file:
json.dump(data, file)
save_trace = save
def _children_to_properties(self):
children = self._root.children_to_properties()
for name, child in children.items():
setattr(self, name, child)
def add_external_message(self, message: str):
""" Stores an external message in this profile. External messages are printed in `Profile.print()`. """
self._messages.append(message)
@contextmanager
def retime(self):
"""
To be used in `with` statements, `with prof.retime(): ...`.
Updates this profile by running the same operations again but without tracing.
This gives a much better indication of the true timing.
The code within the `with` block must perform the same operations as the code that created this profile.
*Warning:* Internal caching may reduce the number of operations after the first time a function is called.
To prevent this, run the function before profiling it, see `warmup` in `profile_function()`.
"""
self._retime_index = 0
restore_data = _start_profiling(self, self._backends)
try:
yield None
finally:
_stop_profiling(self, *restore_data)
assert self._retime_index == 0, f"Number of calls during retime did not match original profile, originally {len(self._backend_calls)}, now {self._retime_index}, "
self._retime_index = -1
@contextmanager
def _accumulate_average(self, n):
self._retime_index = 0
self._accumulating = True
restore_data = _start_profiling(self, self._backends)
try:
yield None
finally:
_stop_profiling(self, *restore_data)
assert self._retime_index == 0, f"Number of calls during retime did not match original profile, originally {len(self._backend_calls)}, now {self._retime_index}, "
self._retime_index = -1
for call in self._backend_calls:
call._start /= n
call._stop /= n
self._accumulating = False
def _format_values(values: dict, backend):
def format_val(value):
if isinstance(value, str):
return f'"{value}"'
if isinstance(value, (int, float, complex, bool)):
return value
if isinstance(value, (tuple, list)):
return str([format_val(v) for v in value])
try:
shape = backend.shape(value)
dtype = backend.dtype(value)
try:
shape = (int(dim) if dim is not None else '?' for dim in shape)
except Exception:
pass
return f"{tuple(shape)}, {dtype}"
except BaseException:
return str(value)
lines = [f"{key}: {format_val(val)}" for key, val in values.items()]
return "\n".join(lines)
class ProfilingBackend:
def __init__(self, prof: Profile, backend: Backend, index: int):
self._backend = backend
self._profile = prof
self._index = index
# non-profiling methods
self.name = backend.name
self.combine_types = backend.combine_types
self.auto_cast = backend.auto_cast
self.is_tensor = backend.is_tensor
self.is_available = backend.is_available
self.shape = backend.shape
self.staticshape = backend.staticshape
self.ndims = backend.ndims
self.dtype = backend.dtype
self.expand_dims = backend.expand_dims
self.reshape = backend.reshape
self.supports = backend.supports
# TODO strided slice does not go through backend atm
# profiling methods
for item_name in dir(backend):
item = getattr(backend, item_name)
if callable(item) and not hasattr(self, item_name):
def context(item=item, item_name=item_name, profiling_backend=self):
def call_fun(*args, **kwargs):
start = perf_counter()
result = item(*args, **kwargs)
stop = perf_counter()
prof._add_call(BackendCall(start, stop, profiling_backend, item_name), args, kwargs, result)
return result
return call_fun
setattr(self, item_name, context())
def call(self, f: Callable, *args, name=None):
start = perf_counter()
result = f(*args)
self._backend.block_until_ready(result)
stop = perf_counter()
self._profile._add_call(BackendCall(start, stop, self, name), args, {}, result)
return result
def __repr__(self):
return f"profile[{self._backend}]"
def __enter__(self):
_DEFAULT.append(self)
def __exit__(self, exc_type, exc_val, exc_tb):
_DEFAULT.pop(-1)
def __eq__(self, other):
return other is self or other is self._backend
def __hash__(self):
return hash(self._backend)
_PROFILE = []
@contextmanager
def profile(backends=None, trace=True, subtract_trace_time=True, save: str or None = None) -> Profile:
"""
To be used in `with` statements, `with math.backend.profile() as prof: ...`.
Creates a `Profile` for the code executed within the context by tracking calls to the `backends` and optionally tracing the call.
Args:
backends: List of backends to profile, `None` to profile all.
trace: Whether to perform a full stack trace for each backend call. If true, groups backend calls by function.
subtract_trace_time: If True, subtracts the time it took to trace the call stack from the event times
save: (Optional) File path to save the profile to. This will call `Profile.save()`.
Returns:
Created `Profile`
"""
backends = BACKENDS if backends is None else backends
prof = Profile(trace, backends, subtract_trace_time)
restore_data = _start_profiling(prof, backends)
try:
yield prof
finally:
_stop_profiling(prof, *restore_data)
if save is not None:
prof.save(save)
def profile_function(fun: Callable,
args: tuple or list = (),
kwargs: dict or None = None,
backends=None,
trace=True,
subtract_trace_time=True,
retime=True,
warmup=1,
call_count=1) -> Profile:
"""
Creates a `Profile` for the function `fun(*args, **kwargs)`.
Args:
fun: Function to be profiled. In case `retime=True`, this function must perform the same operations each time it is called.
Use `warmup>0` to ensure that internal caching does not interfere with the operations.
args: Arguments to be passed to `fun`.
kwargs: Keyword arguments to be passed to `fun`.
backends: List of backends to profile, `None` to profile all.
trace: Whether to perform a full stack trace for each backend call. If true, groups backend calls by function.
subtract_trace_time: If True, subtracts the time it took to trace the call stack from the event times. Has no effect if `retime=True`.
retime: If true, calls `fun` another time without tracing the calls and updates the profile.
This gives a much better indication of the true timing.
See `Profile.retime()`.
warmup: Number of times to call `fun` before profiling it.
call_count: How often to call the function (excluding retime and warmup). The times will be averaged over multiple runs if `call_count > 1`.
Returns:
Created `Profile` for `fun`.
"""
kwargs = kwargs if isinstance(kwargs, dict) else {}
for _ in range(warmup):
fun(*args, **kwargs)
with profile(backends=backends, trace=trace, subtract_trace_time=subtract_trace_time) as prof:
fun(*args, **kwargs)
if retime:
with prof.retime():
fun(*args, **kwargs)
if call_count > 1:
with prof._accumulate_average(call_count):
for _ in range(call_count - 1):
fun(*args, **kwargs)
return prof
def _start_profiling(prof: Profile, backends: tuple or list):
_PROFILE.append(prof)
original_default = _DEFAULT[-1]
original_backends = tuple(BACKENDS)
for i, backend in enumerate(backends):
prof_backend = ProfilingBackend(prof, backend, i)
BACKENDS[BACKENDS.index(backend)] = prof_backend
if _DEFAULT[-1] == backend:
_DEFAULT[-1] = prof_backend
return original_backends, original_default
def _stop_profiling(prof: Profile, original_backends, original_default):
prof._finish()
_PROFILE.pop(-1)
BACKENDS.clear()
BACKENDS.extend(original_backends)
_DEFAULT[-1] = original_default
def get_current_profile() -> Optional[Profile]:
""" Returns the currently active `Profile` if one is active. Otherwise returns `None`. """
return _PROFILE[-1] if _PROFILE else None
| 39.801418
| 252
| 0.595599
|
ae0574762077630d502f1dce7dff5ac51655520c
| 3,714
|
py
|
Python
|
geninv.py
|
manticode/wow-inventory-offline
|
5d0e1cb4ce8344522188b102be0f1be6491f9813
|
[
"Apache-2.0"
] | 1
|
2021-06-07T15:36:36.000Z
|
2021-06-07T15:36:36.000Z
|
geninv.py
|
manticode/wow-inventory-offline
|
5d0e1cb4ce8344522188b102be0f1be6491f9813
|
[
"Apache-2.0"
] | null | null | null |
geninv.py
|
manticode/wow-inventory-offline
|
5d0e1cb4ce8344522188b102be0f1be6491f9813
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import re
import csv
from slpp import slpp as lua
def prerun():
""" Check input args are valid. """
argparser = argparse.ArgumentParser(description="Inventory database file.")
argparser.add_argument("-i", help="the lua datafile", dest="infilename")
argparser.add_argument("-o", help="Output filename in CSV format", dest="outfilename")
argparser.add_argument("-n", "--toon_name", help="Character (toon) nam", dest="toon_name")
argparser.add_argument("-r", "--realm_name", help="Realm (server) name. Defaults to Hydraxian Waterlords if not "
"specified", dest="realm_name")
argparser.print_help()
args = argparser.parse_args()
luafile = open(args.infilename, "r")
gu_char_name = get_unique_char_name(args.toon_name, args.realm_name)
return luafile, gu_char_name
def parse_lua(luadb, gu_toon_name):
""" Parse the lua data"""
inventorydata = luadb.read()
inventorydata = "{ "+inventorydata+" }"
inventorydataparsed = lua.decode(inventorydata)
itemid_list, itemname_list = iter_luadb(inventorydataparsed, gu_toon_name)
qty_list = get_item_qty(inventorydataparsed, gu_toon_name, itemid_list)
return itemid_list, itemname_list, qty_list
def extract_item_name(item_string):
item_name = re.search("^.*\[([a-zA-Z0-9\s\:\',\-]*)\].*$", item_string)
if item_name:
return item_name.group(1)
def get_item_qty(lua_obj, gu_toon_name, item_id_list):
""" Correlate quantities for respective items."""
bank_inv_qty_lookup = lua_obj["AskMrRobotDbClassic"]["char"][gu_toon_name]["BankItemsAndCounts"]
storage_list_qty = []
qty_insert = 0
for item_id_lookup in item_id_list:
for container_id in bank_inv_qty_lookup:
bank_container = bank_inv_qty_lookup[container_id]
item_qty = bank_container.get(item_id_lookup)
if item_qty:
qty_insert = qty_insert + item_qty
else:
pass
storage_list_qty.append(qty_insert)
qty_insert = 0
return storage_list_qty
def get_unique_char_name(toon_name, realm_name):
""" globally unique toon name in Name - Realm format"""
gu_char_name = toon_name + " - " + realm_name
return gu_char_name
def iter_luadb(lua_obj, gu_char_name):
""" Extract the stuff we want. Each bag """
bank_inv_lookup = lua_obj["AskMrRobotDbClassic"]["char"][gu_char_name]["BankItems"]
storage_list_itemid = []
storage_list_itemname = []
for key in bank_inv_lookup:
bank_container = bank_inv_lookup[key]
for slot_item in bank_container:
if slot_item["id"] in storage_list_itemid:
pass
else:
storage_list_itemid.append(slot_item["id"])
storage_list_itemname.append(extract_item_name(slot_item["link"]))
if isinstance(bank_inv_lookup[key], dict):
iter_luadb(bank_inv_lookup[key], toon_name, realm_name)
return storage_list_itemid, storage_list_itemname
def create_combined_inv(item_id_list, item_name_list, item_qty_list):
zip_inv = zip(item_name_list, item_qty_list)
dict_inv = dict(zip_inv)
return dict_inv
def write_out_csv(inv_dict, outfile):
with open(outfile, "w") as file_handle:
writer = csv.writer(file_handle)
writer.writerows(inv_dict.items())
file_handle.close()
if __name__ == "__main__":
databaseobj, gu_name = prerun()
itemid_list, itemname_list, itemqty_list = parse_lua(databaseobj, gu_name)
inventory_dict = create_combined_inv(itemid_list, itemname_list, itemqty_list)
write_out_csv(inventory_dict, "inventory.csv")
| 38.28866
| 117
| 0.687668
|
04cbd8c73c9161c901ae2c415a2a602d9f2c0fa0
| 567
|
py
|
Python
|
2.py
|
flpcan/project_euler
|
2cabb0a51c70b0b6e145328f3e3c55de41ac2854
|
[
"CC0-1.0"
] | null | null | null |
2.py
|
flpcan/project_euler
|
2cabb0a51c70b0b6e145328f3e3c55de41ac2854
|
[
"CC0-1.0"
] | null | null | null |
2.py
|
flpcan/project_euler
|
2cabb0a51c70b0b6e145328f3e3c55de41ac2854
|
[
"CC0-1.0"
] | null | null | null |
#
# Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be:
#
# 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
#
# By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms.
numbers = [1,2]
pairs = []
new = int()
while new < 4000000:
new = numbers[-1] + numbers[-2]
if new > 4000000:
break
numbers.append(new)
for i in numbers:
if i % 2 == 0:
pairs.append(i)
print(sum(pairs))
| 23.625
| 142
| 0.638448
|
303a078cc9697f943d9468e24217a221244dad88
| 11,022
|
py
|
Python
|
test-src/gen-ovs-info.py
|
call518/virtual-network-visualizer
|
dfa10c249768a82125e901f2c3357e0278b8b4ea
|
[
"Apache-2.0"
] | 3
|
2019-04-02T14:22:26.000Z
|
2021-03-16T06:52:54.000Z
|
test-src/gen-ovs-info.py
|
call518/OpenStack-Network-Visualizer
|
dfa10c249768a82125e901f2c3357e0278b8b4ea
|
[
"Apache-2.0"
] | null | null | null |
test-src/gen-ovs-info.py
|
call518/OpenStack-Network-Visualizer
|
dfa10c249768a82125e901f2c3357e0278b8b4ea
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8 -*-
import paramiko
import time
import sys
import json
import socket
import networkx as nx
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import pandas as pd
def exec_ssh(ssh_hostname, ssh_cmd):
SSH_USERNAME = "root"
SSH_PASSWORD = "password"
SSH_KEY_FILE = "/root/.ssh/id_rsa"
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh_stdin = ssh_stdout = ssh_stderr = None
try:
#ssh.connect(SSH_ADDRESS, username=SSH_USERNAME, password=SSH_PASSWORD)
ssh.connect(hostname=ssh_hostname, port=22, username=SSH_USERNAME, key_filename=SSH_KEY_FILE)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(ssh_cmd, timeout=None, bufsize=-1, get_pty=False, environment=None)
except Exception as e:
sys.stderr.write("SSH connection error: {0}".format(e))
output = ssh_stdout.read()
return output
def isStrBlank (myString):
return not (myString and myString.strip())
if __name__ == '__main__':
result = []
hostnames = (
#"pub-network-001",
#"pub-network-002",
"pub-compute-001",
"pub-compute-002",
# "pub-compute-003",
# "pub-compute-004",
)
for hostname in hostnames:
output_bridge = exec_ssh(hostname, "ovs-vsctl -f json list br")
output_port = exec_ssh(hostname, "ovs-vsctl -f json list port")
output_interface = exec_ssh(hostname, "ovs-vsctl -f json list interface")
json_data_bridge = json.loads(output_bridge)
json_data_interface = json.loads(output_interface)
json_data_port = json.loads(output_port)
for item_interface in json_data_interface['data']:
if_hostname = hostname
if_uuid = item_interface[0][1]
if_admin_state = item_interface[1]
if_name = "I:" + item_interface[26]
if if_name.startswith("I:eth"):
if_name = if_name + "(" + hostname + ")"
if_type = item_interface[33]
if if_type in ["vxlan", "patch", "internal"]:
if_name = if_name + "(" + hostname + ")"
if_external_ids = item_interface[13][1]
if_link_speed = item_interface[19]
if_link_state = item_interface[20]
if type(item_interface[24]) is list:
if_mtu = None
else:
if_mtu = item_interface[24]
if_ofport = item_interface[27]
if_options = item_interface[29][1]
if_other_config = item_interface[30][1]
if_statistics = item_interface[31][1]
if_status = item_interface[32][1]
## OpenStack 메타 정보 검색
if_external_ids_attached_mac = if_external_ids_iface_id = if_external_ids_iface_status = if_external_ids_vm_uuid = None
if len(if_external_ids) > 0:
if_external_ids_attached_mac = if_external_ids[0][1]
if_external_ids_iface_id = if_external_ids[1][1]
if_external_ids_iface_status = if_external_ids[2][1]
if len(if_external_ids) > 3:
if_external_ids_vm_uuid = if_external_ids[3][1]
## Options 속성 검색
if_options_patch_peer = if_options_vxlan_df_default = if_options_vxlan_in_key = if_options_vxlan_local_ip = if_options_vxlan_out_key = if_options_vxlan_remote_ip = None
if if_type == "patch":
if_options_patch_peer = if_options[0][1]
elif if_type == "vxlan":
if_options_vxlan_df_default = if_options[0][1]
if_options_vxlan_in_key = if_options[1][1]
if_options_vxlan_local_ip = if_options[2][1]
if_options_vxlan_out_key = if_options[3][1]
if_options_vxlan_remote_ip = if_options[4][1]
## Interface가 속해 있는 Port 검색
if_port_uuid = if_port_name = None
for item_port in json_data_port['data']:
if if_uuid == item_port[8][1]:
if_port_uuid = item_port[0][1]
if_port_name = "P:" + item_port[11] + "(" + hostname + ")"
break
## Port가 속해 있는 Bridge 검색
if_br_uuid = if_br_name = None
if if_port_uuid:
for item_bridge in json_data_bridge['data']:
tmp_br_uuid = item_bridge[0][1]
tmp_br_name = item_bridge[13]
for port in item_bridge[16][1]:
if if_port_uuid == port[1]:
if_br_uuid = tmp_br_uuid
if_br_name = "B:" + tmp_br_name + "(" + hostname + ")"
break
result.append({
"if_hostname": if_hostname,
"if_uuid": if_uuid,
"if_name": if_name,
"if_admin_state": if_admin_state,
"if_name": if_name,
"if_type": if_type,
"if_external_ids_attached_mac": if_external_ids_attached_mac,
"if_external_ids_iface_id": if_external_ids_iface_id,
"if_external_ids_iface_status": if_external_ids_iface_status,
"if_external_ids_vm_uuid": if_external_ids_vm_uuid,
"if_link_speed": if_link_speed,
"if_link_state": if_link_state,
"if_mtu": if_mtu,
"if_ofport": if_ofport,
"if_options": if_options,
"if_options_patch_peer": if_options_patch_peer,
"if_options_vxlan_df_default": if_options_vxlan_df_default,
"if_options_vxlan_in_key": if_options_vxlan_in_key,
"if_options_vxlan_local_ip": if_options_vxlan_local_ip,
"if_options_vxlan_out_key": if_options_vxlan_out_key,
"if_options_vxlan_remote_ip": if_options_vxlan_remote_ip,
"if_other_config": if_other_config,
"if_statistics": if_statistics,
"if_status": if_status,
"if_port_uuid": if_port_uuid,
"if_port_name": if_port_name,
"if_br_uuid": if_br_uuid,
"if_br_name": if_br_name
})
#print(result)
## 시각화 이미지 생성
G = nx.Graph()
for interface in result:
#print("if_name: %s (%s)" % (interface['if_name'], interface['if_uuid']))
#print(" if_port_name: %s (%s)" % (interface['if_port_name'], interface['if_port_uuid']))
#print(" if_br_name: %s (%s)" % (interface['if_br_name'], interface['if_br_uuid']))
if_name = interface['if_name']
if_type = interface['if_type']
G.add_node(if_name,
if_hostname = interface['if_hostname'],
if_uuid = interface['if_uuid'],
if_admin_state = interface['if_admin_state'],
if_type = if_type,
if_external_ids_attached_mac = interface['if_external_ids_attached_mac'],
if_external_ids_iface_id = interface['if_external_ids_iface_id'],
if_external_ids_iface_status = interface['if_external_ids_iface_status'],
if_external_ids_vm_uuid = interface['if_external_ids_vm_uuid'],
if_link_speed = interface['if_link_speed'],
if_link_state = interface['if_link_state'],
if_mtu = interface['if_mtu'],
if_ofport = interface['if_ofport'],
if_options = interface['if_options'],
if_options_patch_peer = interface['if_options_patch_peer'],
if_options_vxlan_df_default = interface['if_options_vxlan_df_default'],
if_options_vxlan_in_key = interface['if_options_vxlan_in_key'],
if_options_vxlan_local_ip = interface['if_options_vxlan_local_ip'],
if_options_vxlan_out_key = interface['if_options_vxlan_out_key'],
if_options_vxlan_remote_ip = interface['if_options_vxlan_remote_ip'],
if_other_config = interface['if_other_config'],
if_statistics = interface['if_statistics'],
if_status = interface['if_status'],
if_port_uuid = interface['if_port_uuid'],
if_port_name = interface['if_port_name'],
if_br_uuid = interface['if_br_uuid'],
if_br_name = interface['if_br_name']
)
G.add_edge(interface['if_name'], interface['if_port_name'])
G.add_edge(interface['if_port_name'], interface['if_br_name'])
## VxLAN 터널 연결 구성
#if if_type == "if_type" and not isStrBlank(data['if_type']):
if if_type == "vxlan":
vxlan_local_ip = interface['if_options_vxlan_local_ip']
vxlan_remote_ip = interface['if_options_vxlan_remote_ip']
vxlan_local_hostname = interface['if_options_vxlan_local_ip']
vxlan_remote_hostname = interface['if_options_vxlan_remote_ip']
#print(vxlan_local_ip, vxlan_remote_ip)
#G.add_edge(interface['if_name'], interface['if_port_name'])
#print(if_name, interface['if_options'])
#print(G.nodes.data())
#print(G.nodes())
#print(G.edges())
#pos = nx.shell_layout(G) # positions for all nodes
pos = nx.spring_layout(G, k=0.05, iterations=40) # positions for all nodes
#pos = nx.spring_layout(G, iterations=50)
#pos = nx.spectral_layout(G, scale=2) # positions for all nodes
#pos = nx.circular_layout(G) # positions for all nodes
#pos = nx.random_layout(G) # positions for all nodes
## Node 종류(Interface/Port/Bridge)별 목록 생성
nodes_interface = [node for node in G.nodes() if node.startswith("I:")]
nodes_port = [node for node in G.nodes() if node.startswith("P:")]
nodes_bridge = [node for node in G.nodes() if node.startswith("B:")]
## if_type 속성에 따른 Node 및 Edge 목록 생성
nodes_if_type_patch = []
nodes_if_type_vxlan = []
nodes_if_type_internal = []
nodes_if_type_normal = []
edge_if_type_patch = []
for node_data in G.nodes(data=True):
if_name = node_data[0]
if len(node_data[1]) > 0:
if_type = node_data[1]['if_type']
if if_type == "patch":
nodes_if_type_patch.append(if_name)
peer_if_hostname = node_data[1]['if_hostname']
peer_if_name = "I:" + node_data[1]['if_options_patch_peer'] + "(" + peer_if_hostname + ")"
edge_if_type_patch.append((if_name, peer_if_name))
elif if_type == "vxlan":
nodes_if_type_vxlan.append(if_name)
elif if_type == "internal":
nodes_if_type_internal.append(if_name)
else:
nodes_if_type_normal.append(if_name)
## Interface Node 그리기
nx.draw_networkx_nodes(G, pos, nodelist=nodes_interface, with_labels=True, node_size=30, node_shape='o', node_color='#F972FF', alpha=0.5, linewidths=1)
## Port Node 그리기
nx.draw_networkx_nodes(G, pos, nodelist=nodes_port, with_labels=True, node_size=40, node_shape='o', node_color='#72B2FF', alpha=0.5, linewidths=1)
## Bridge Node 그리기
nx.draw_networkx_nodes(G, pos, nodelist=nodes_bridge, with_labels=True, node_size=50, node_shape='o', node_color='#FF5634', alpha=0.5, linewidths=1)
## Patch 타입 노드 다시 그리기 (색상 변경)
nx.draw_networkx_nodes(G, pos, nodelist=nodes_if_type_patch, with_labels=True, node_size=50, node_shape='o', node_color='#279700', alpha=0.5, linewidths=1)
## VxLAN 타입 노드 다시 그리기 (색상 변경)
nx.draw_networkx_nodes(G, pos, nodelist=nodes_if_type_vxlan, with_labels=True, node_size=50, node_shape='o', node_color='#FF990F', alpha=0.5, linewidths=1)
## Internal 타입 노드 다시 그리기 (색상 변경)
nx.draw_networkx_nodes(G, pos, nodelist=nodes_if_type_internal, with_labels=True, node_size=50, node_shape='o', node_color='#382000', alpha=0.5, linewidths=1)
## Node Label 그리기
nx.draw_networkx_labels(G, pos, font_size=1, font_family='sans-serif')
## Edge 목록 생성
edge_I2P = [(u, v) for (u, v) in G.edges() if (u.startswith("I:") and v.startswith("P:")) or (u.startswith("P:") and v.startswith("I:"))]
edge_P2B = [(u, v) for (u, v) in G.edges() if (u.startswith("P:") and v.startswith("B:")) or (u.startswith("B:") and v.startswith("P:"))]
## Edge 그리기
nx.draw_networkx_edges(G, pos, edgelist=edge_I2P, width=0.2, alpha=0.5, edge_color='#E67E22')
nx.draw_networkx_edges(G, pos, edgelist=edge_P2B, width=0.5, alpha=0.5, edge_color='#2ECC71')
nx.draw_networkx_edges(G, pos, edgelist=edge_if_type_patch, width=2, alpha=0.3, edge_color='#00FFE8', style="dashed")
plt.axis('off')
#plt.figure(figsize = (10,9))
plt.title("OpenStack Network Connectitivity")
plt.savefig("/var/www/html/test.png", format = "png", dpi = 600)
| 37.746575
| 171
| 0.721829
|
3784efafee0beb9eca270def82573cfe3464f358
| 6,485
|
py
|
Python
|
timeout_decorator/timeout_decorator.py
|
woipot/timeout-decorator
|
0a707dab52b5711eb8cea18afc9441b38f6e07fd
|
[
"MIT"
] | null | null | null |
timeout_decorator/timeout_decorator.py
|
woipot/timeout-decorator
|
0a707dab52b5711eb8cea18afc9441b38f6e07fd
|
[
"MIT"
] | null | null | null |
timeout_decorator/timeout_decorator.py
|
woipot/timeout-decorator
|
0a707dab52b5711eb8cea18afc9441b38f6e07fd
|
[
"MIT"
] | null | null | null |
"""
Timeout decorator.
:copyright: (c) 2012-2013 by PN.
:license: MIT, see LICENSE for more details.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import sys
import time
import multiprocessing
import signal
from functools import wraps
############################################################
# Timeout
############################################################
# http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/
# Used work of Stephen "Zero" Chappell <Noctis.Skytower@gmail.com>
# in https://code.google.com/p/verse-quiz/source/browse/trunk/timeout.py
class TimeoutError(AssertionError):
"""Thrown when a timeout occurs in the `timeout` context manager."""
def __init__(self, value="Timed Out"):
self.value = value
def __str__(self):
return repr(self.value)
def _raise_exception(exception, exception_message):
""" This function checks if a exception message is given.
If there is no exception message, the default behaviour is maintained.
If there is an exception message, the message is passed to the exception with the 'value' keyword.
"""
if exception_message is None:
raise exception()
else:
raise exception(exception_message)
def timeout(seconds=None, use_signals=True, timeout_exception=TimeoutError, exception_message=None, enabled=True):
"""Add a timeout parameter to a function and return it.
:param seconds: optional time limit in seconds or fractions of a second. If None is passed, no timeout is applied.
This adds some flexibility to the usage: you can disable timing out depending on the settings.
:type seconds: float
:param use_signals: flag indicating whether signals should be used for timing function out or the multiprocessing
When using multiprocessing, timeout granularity is limited to 10ths of a second.
:type use_signals: bool
:raises: TimeoutError if time limit is reached
It is illegal to pass anything other than a function as the first
parameter. The function is wrapped and returned to the caller.
"""
def decorate(function):
if not enabled:
return function
if use_signals:
def handler(signum, frame):
_raise_exception(timeout_exception, exception_message)
@wraps(function)
def new_function(*args, **kwargs):
new_seconds = kwargs.pop('timeout', seconds)
if new_seconds:
old = signal.signal(signal.SIGALRM, handler)
signal.setitimer(signal.ITIMER_REAL, new_seconds)
if not seconds:
return function(*args, **kwargs)
try:
return function(*args, **kwargs)
finally:
if new_seconds:
signal.setitimer(signal.ITIMER_REAL, 0)
signal.signal(signal.SIGALRM, old)
return new_function
else:
@wraps(function)
def new_function(*args, **kwargs):
timeout_wrapper = _Timeout(function, timeout_exception, exception_message, seconds)
return timeout_wrapper(*args, **kwargs)
return new_function
return decorate
def _target(queue, function, *args, **kwargs):
"""Run a function with arguments and return output via a queue.
This is a helper function for the Process created in _Timeout. It runs
the function with positional arguments and keyword arguments and then
returns the function's output by way of a queue. If an exception gets
raised, it is returned to _Timeout to be raised by the value property.
"""
try:
queue.put((True, function(*args, **kwargs)))
except:
queue.put((False, sys.exc_info()[1]))
class _Timeout(object):
"""Wrap a function and add a timeout (limit) attribute to it.
Instances of this class are automatically generated by the add_timeout
function defined above. Wrapping a function allows asynchronous calls
to be made and termination of execution after a timeout has passed.
"""
def __init__(self, function, timeout_exception, exception_message, limit):
"""Initialize instance in preparation for being called."""
self.__limit = limit
self.__function = function
self.__timeout_exception = timeout_exception
self.__exception_message = exception_message
self.__name__ = function.__name__
self.__doc__ = function.__doc__
self.__timeout = time.time()
self.__process = multiprocessing.Process()
self.__queue = multiprocessing.Queue()
def __call__(self, *args, **kwargs):
"""Execute the embedded function object asynchronously.
The function given to the constructor is transparently called and
requires that "ready" be intermittently polled. If and when it is
True, the "value" property may then be checked for returned data.
"""
self.__limit = kwargs.pop('timeout', self.__limit)
self.__queue = multiprocessing.Queue(1)
args = (self.__queue, self.__function) + args
self.__process = multiprocessing.Process(target=_target,
args=args,
kwargs=kwargs)
self.__process.daemon = True
self.__process.start()
if self.__limit is not None:
self.__timeout = self.__limit + time.time()
while not self.ready:
time.sleep(0.01)
return self.value
def cancel(self):
"""Terminate any possible execution of the embedded function."""
if self.__process.is_alive():
self.__process.terminate()
_raise_exception(self.__timeout_exception, self.__exception_message)
@property
def ready(self):
"""Read-only property indicating status of "value" property."""
if self.__limit and self.__timeout < time.time():
self.cancel()
return self.__queue.full() and not self.__queue.empty()
@property
def value(self):
"""Read-only property containing data returned from function."""
if self.ready is True:
flag, load = self.__queue.get()
if flag:
return load
raise load
| 36.432584
| 118
| 0.638551
|
220cf547f0f0491a2f74b9ce8738039febb777bb
| 2,092
|
py
|
Python
|
monasca_api/v2/reference/versions.py
|
swen-ttc/monasca_api
|
dae51730fc887b862a7a7e1b51b9ac7190584dcf
|
[
"Apache-2.0"
] | null | null | null |
monasca_api/v2/reference/versions.py
|
swen-ttc/monasca_api
|
dae51730fc887b862a7a7e1b51b9ac7190584dcf
|
[
"Apache-2.0"
] | null | null | null |
monasca_api/v2/reference/versions.py
|
swen-ttc/monasca_api
|
dae51730fc887b862a7a7e1b51b9ac7190584dcf
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Hewlett-Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import falcon
from oslo_log import log
from monasca_api.api import versions_api
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
LOG = log.getLogger(__name__)
VERSIONS = {
'v2.0': {
'id': 'v2.0',
'links': [{
'rel': 'self',
'href': ''
}],
'status': 'CURRENT',
'updated': "2013-03-06T00:00:00.000Z"
}
}
class Versions(versions_api.VersionsAPI):
def __init__(self):
super(Versions, self).__init__()
def on_get(self, req, res, version_id=None):
result = {
'links': [{
'rel': 'self',
'href': req.uri.decode('utf8')
}],
'elements': []
}
if version_id is None:
for version in VERSIONS:
VERSIONS[version]['links'][0]['href'] = (
req.uri.decode('utf8') + version)
result['elements'].append(VERSIONS[version])
res.body = json.dumps(result)
res.status = falcon.HTTP_200
else:
if version_id in VERSIONS:
VERSIONS[version_id]['links'][0]['href'] = (
req.uri.decode('utf8'))
res.body = json.dumps(VERSIONS[version_id])
res.status = falcon.HTTP_200
else:
raise HTTPUnprocessableEntityError('Invalid version',
'No versions found matching ' + version_id)
| 32.184615
| 94
| 0.580784
|
4516e376795f887baedceadbfbf96ae2b80ae773
| 8,372
|
py
|
Python
|
third_party/BraTS2018_tumor/train.py
|
zwxu064/RANP
|
92135583e0ced21fa5634823b289c5aea366de21
|
[
"MIT"
] | 9
|
2020-11-17T08:44:55.000Z
|
2022-02-14T12:02:32.000Z
|
third_party/BraTS2018_tumor/train.py
|
zwxu064/RANP
|
92135583e0ced21fa5634823b289c5aea366de21
|
[
"MIT"
] | null | null | null |
third_party/BraTS2018_tumor/train.py
|
zwxu064/RANP
|
92135583e0ced21fa5634823b289c5aea366de21
|
[
"MIT"
] | 1
|
2021-03-17T02:34:12.000Z
|
2021-03-17T02:34:12.000Z
|
import argparse
import os
import shutil
import time
import logging
import random
import torch
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torch.optim
from torch.utils.data import DataLoader
cudnn.benchmark = True
import numpy as np
import models
from models import criterions
from data import datasets
from data.sampler import CycleSampler
from data.data_utils import add_mask, init_fn
from utils import Parser
from predict import validate, AverageMeter
parser = argparse.ArgumentParser()
#parser.add_argument('-cfg', '--cfg', default='deepmedic_nr_ce', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_nr', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_ce', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_nr_ce_all', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_ce_50_10', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_ce_50_50_all', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_ce_50_50_check', type=str)
parser.add_argument('-cfg', '--cfg', default='deepmedic_ce_50_50_redo', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_ce_50_50_c25_redo', type=str)
#parser.add_argument('-cfg', '--cfg', default='deepmedic_ce_50_50_all', type=str)
parser.add_argument('-gpu', '--gpu', default='0', type=str)
parser.add_argument('-out', '--out', default='', type=str)
path = os.path.dirname(__file__)
## parse arguments
args = parser.parse_args()
args = Parser(args.cfg, log='train').add_args(args)
args.gpu = str(args.gpu)
ckpts = args.makedir()
resume = os.path.join(ckpts, 'model_last.tar')
if not args.resume and os.path.exists(resume):
args.resume = resume
def main():
# setup environments and seeds
# os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
os.environ['CUDA_VISIBLE_DEVICES'] = "2,3"
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
random.seed(args.seed)
np.random.seed(args.seed)
# setup networks
Network = getattr(models, args.net)
model = Network(**args.net_params)
model = model.cuda()
optimizer = getattr(torch.optim, args.opt)(
model.parameters(), **args.opt_params)
criterion = getattr(criterions, args.criterion)
msg = ''
# optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_iter = checkpoint['iter']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optim_dict'])
msg = ("=> loaded checkpoint '{}' (iter {})"
.format(args.resume, checkpoint['iter']))
else:
msg = "=> no checkpoint found at '{}'".format(args.resume)
else:
msg = '-------------- New training session ----------------'
msg += '\n' + str(args)
logging.info(msg)
# Data loading code
Dataset = getattr(datasets, args.dataset)
# The loader will get 1000 patches from 50 subjects for each sub epoch
# each subject sample 20 patches
train_list = os.path.join(args.data_dir, args.train_list)
train_set = Dataset(train_list, root=args.data_dir,
for_train=True, num_patches=args.num_patches,
transforms=args.train_transforms,
sample_size=args.sample_size, sub_sample_size=args.sub_sample_size,
target_size=args.target_size)
num_iters = args.num_iters or (len(train_set) * args.num_epochs) // args.batch_size
num_iters -= args.start_iter
train_sampler = CycleSampler(len(train_set), num_iters*args.batch_size)
train_loader = DataLoader(
train_set,
batch_size=args.batch_size,
collate_fn=train_set.collate, sampler=train_sampler,
num_workers=args.workers, pin_memory=True, worker_init_fn=init_fn)
if args.valid_list:
valid_list = os.path.join(args.data_dir, args.valid_list)
valid_set = Dataset(valid_list, root=args.data_dir,
for_train=False, crop=False,
transforms=args.test_transforms,
sample_size=args.sample_size, sub_sample_size=args.sub_sample_size,
target_size=args.target_size)
valid_loader = DataLoader(
valid_set, batch_size=1, shuffle=False,
collate_fn=valid_set.collate,
num_workers=4, pin_memory=True)
train_valid_set = Dataset(train_list, root=args.data_dir,
for_train=False, crop=False,
transforms=args.test_transforms,
sample_size=args.sample_size, sub_sample_size=args.sub_sample_size,
target_size=args.target_size)
train_valid_loader = DataLoader(
train_valid_set, batch_size=1, shuffle=False,
collate_fn=train_valid_set.collate,
num_workers=4, pin_memory=True)
start = time.time()
enum_batches = len(train_set)/float(args.batch_size)
args.schedule = {int(k*enum_batches): v for k, v in args.schedule.items()}
args.save_freq = int(enum_batches * args.save_freq)
args.valid_freq = int(enum_batches * args.valid_freq)
losses = AverageMeter()
torch.set_grad_enabled(True)
for i, (data, label) in enumerate(train_loader, args.start_iter):
## validation
#if args.valid_list and (i % args.valid_freq) == 0:
# logging.info('-'*50)
# msg = 'Iter {}, Epoch {:.4f}, {}'.format(i, i/enum_batches, 'validation')
# logging.info(msg)
# with torch.no_grad():
# validate(valid_loader, model, batch_size=args.mini_batch_size, names=valid_set.names)
# actual training
adjust_learning_rate(optimizer, i)
for data in zip(*[d.split(args.mini_batch_size) for d in data]):
data = [t.cuda(non_blocking=True) for t in data]
x1, x2, target = data[:3]
if len(data) > 3: # has mask
m1, m2 = data[3:]
x1 = add_mask(x1, m1, 1)
x2 = add_mask(x2, m2, 1)
# compute output
output = model((x1, x2)) # output nx5x9x9x9, target nx9x9x9
loss = criterion(output, target, args.alpha)
# measure accuracy and record loss
losses.update(loss.item(), target.numel())
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (i+1) % args.save_freq == 0:
epoch = int((i+1) // enum_batches)
file_name = os.path.join(ckpts, 'model_epoch_{}.tar'.format(epoch))
torch.save({
'iter': i+1,
'state_dict': model.state_dict(),
'optim_dict': optimizer.state_dict(),
},
file_name)
msg = 'Iter {0:}, Epoch {1:.4f}, Loss {2:.4f}'.format(
i+1, (i+1)/enum_batches, losses.avg)
logging.info(msg)
losses.reset()
i = num_iters + args.start_iter
file_name = os.path.join(ckpts, 'model_last.tar')
torch.save({
'iter': i,
'state_dict': model.state_dict(),
'optim_dict': optimizer.state_dict(),
},
file_name)
if args.valid_list:
logging.info('-'*50)
msg = 'Iter {}, Epoch {:.4f}, {}'.format(i, i/enum_batches, 'validate validation data')
logging.info(msg)
with torch.no_grad():
validate(valid_loader, model, batch_size=args.mini_batch_size, names=valid_set.names, out_dir = args.out)
#logging.info('-'*50)
#msg = 'Iter {}, Epoch {:.4f}, {}'.format(i, i/enum_batches, 'validate training data')
#logging.info(msg)
#with torch.no_grad():
# validate(train_valid_loader, model, batch_size=args.mini_batch_size, names=train_valid_set.names, verbose=False)
msg = 'total time: {:.4f} minutes'.format((time.time() - start)/60)
logging.info(msg)
def adjust_learning_rate(optimizer, epoch):
# reduce learning rate by a factor of 10
if epoch+1 in args.schedule:
for param_group in optimizer.param_groups:
param_group['lr'] *= 0.1
if __name__ == '__main__':
main()
| 36.086207
| 125
| 0.634496
|
0330bd09d3a297ed0c7910e21e972c3fe53af434
| 2,525
|
py
|
Python
|
kafka/cruise-control/cruise-control-client/cruisecontrolclient/client/CCParameter/PositiveIntegerParameter.py
|
smthkissinger/docker-images
|
35e868295d04fa780325ada4168381f1e80e8fe4
|
[
"BSD-3-Clause"
] | 63
|
2018-02-04T03:31:22.000Z
|
2022-03-07T08:27:39.000Z
|
kafka/cruise-control/cruise-control-client/cruisecontrolclient/client/CCParameter/PositiveIntegerParameter.py
|
smthkissinger/docker-images
|
35e868295d04fa780325ada4168381f1e80e8fe4
|
[
"BSD-3-Clause"
] | 93
|
2019-07-18T20:04:31.000Z
|
2021-04-03T01:02:01.000Z
|
kafka/cruise-control/cruise-control-client/cruisecontrolclient/client/CCParameter/PositiveIntegerParameter.py
|
smthkissinger/docker-images
|
35e868295d04fa780325ada4168381f1e80e8fe4
|
[
"BSD-3-Clause"
] | 40
|
2018-01-22T16:31:16.000Z
|
2022-03-08T04:40:42.000Z
|
from cruisecontrolclient.client.CCParameter.Parameter import AbstractParameter
class AbstractPositiveIntegerParameter(AbstractParameter):
def __init__(self, value: int):
AbstractParameter.__init__(self, value)
def validate_value(self):
if type(self.value) != int:
raise ValueError(f"{self.value} is not an integer value")
elif self.value < 1:
raise ValueError(f"{self.value} must be a positive integer")
class ConcurrentLeaderMovementsParameter(AbstractPositiveIntegerParameter):
"""concurrent_leader_movements=[POSITIVE-INTEGER]"""
name = 'concurrent_leader_movements'
description = 'The maximum number of concurrent leadership movements across the entire cluster'
argparse_properties = {
'args': ('--leader-concurrency', '--leadership-concurrency', '--concurrent-leader-movements'),
'kwargs': dict(metavar='K', help=description, type=int)
}
class ConcurrentPartitionMovementsPerBrokerParameter(AbstractPositiveIntegerParameter):
"""concurrent_partition_movements_per_broker=[POSITIVE-INTEGER]"""
name = 'concurrent_partition_movements_per_broker'
description = 'The maximum number of concurrent partition movements per broker'
argparse_properties = {
'args': ('--concurrency', '--concurrent-partition-movements-per-broker'),
'kwargs': dict(metavar='K', help=description, type=int)
}
class DataFromParameter(AbstractPositiveIntegerParameter):
"""data_from=[valid_windows/valid_partitions]"""
name = 'data_from'
description = "The number of valid [windows, partitions] from which to use data"
argparse_properties = {
'args': ('--data-from',),
'kwargs': dict(metavar='K', help=description, type=int)
}
class EntriesParameter(AbstractPositiveIntegerParameter):
"""entries=[number-of-entries-to-show]"""
name = 'entries'
description = 'The number of entries to show in the response'
argparse_properties = {
'args': ('--number-of-entries-to-show', '--num-entries'),
'kwargs': dict(metavar='K', help=description, type=int)
}
class ReplicationFactorParameter(AbstractPositiveIntegerParameter):
"""replication_factor=[target_replication_factor]"""
name = 'replication_factor'
description = 'The target replication factor to which the specified topics should be set'
argparse_properties = {
'args': ('--replication-factor',),
'kwargs': dict(metavar='K', help=description, type=int)
}
| 40.079365
| 102
| 0.709307
|
e94d42fb8f8db7b361a557a5fb488e243fbf6a08
| 5,238
|
py
|
Python
|
dex-net/src/dexnet/visualization/visualizer2d.py
|
peter0749/PointNetGPD
|
5e2be543057657f1faaef87e80074d392823e5df
|
[
"MIT"
] | 193
|
2018-11-02T20:37:18.000Z
|
2022-03-18T05:11:16.000Z
|
dex-net/src/dexnet/visualization/visualizer2d.py
|
peter0749/PointNetGPD
|
5e2be543057657f1faaef87e80074d392823e5df
|
[
"MIT"
] | 48
|
2019-02-14T01:44:10.000Z
|
2022-03-20T08:35:16.000Z
|
dex-net/src/dexnet/visualization/visualizer2d.py
|
peter0749/PointNetGPD
|
5e2be543057657f1faaef87e80074d392823e5df
|
[
"MIT"
] | 65
|
2018-11-04T05:13:46.000Z
|
2022-02-09T13:08:55.000Z
|
# -*- coding: utf-8 -*-
"""
Copyright ©2017. The Regents of the University of California (Regents). All Rights Reserved.
Permission to use, copy, modify, and distribute this software and its documentation for educational,
research, and not-for-profit purposes, without fee and without a signed licensing agreement, is
hereby granted, provided that the above copyright notice, this paragraph and the following two
paragraphs appear in all copies, modifications, and distributions. Contact The Office of Technology
Licensing, UC Berkeley, 2150 Shattuck Avenue, Suite 510, Berkeley, CA 94720-1620, (510) 643-
7201, otl@berkeley.edu, http://ipira.berkeley.edu/industry-info for commercial licensing opportunities.
IN NO EVENT SHALL REGENTS BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL,
INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, ARISING OUT OF
THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF REGENTS HAS BEEN
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED
HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE
MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
"""
Dex-Net 2D visualizer extension
Author: Jeff Mahler
"""
import copy
import json
import IPython
import logging
import numpy as np
import os
import matplotlib.pyplot as plt
import matplotlib.tri as mtri
import scipy.spatial.distance as ssd
from visualization import Visualizer2D
class DexNetVisualizer2D(Visualizer2D):
"""
Dex-Net extension of the base pyplot 2D visualization tools
"""
@staticmethod
def grasp(grasp, color='r', arrow_len=4, arrow_head_len=2, arrow_head_width=3,
arrow_width=1, jaw_len=3, jaw_width=3.0,
grasp_center_size=7.5, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=8.0, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# plot grasp center
if show_center:
plt.plot(grasp.center[1], grasp.center[0], c=color, marker=grasp_center_style,
mew=scale * grasp_center_thickness, ms=scale * grasp_center_size)
# compute axis and jaw locations
axis = np.array([np.sin(grasp.angle), np.cos(grasp.angle)])
g1 = grasp.center - (grasp.width / 2) * axis
g2 = grasp.center + (grasp.width / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[1], g2[1]], [g1[0], g2[0]], color=color, linewidth=scale * grasp_axis_width,
linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale * (arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale * arrow_head_len * axis].T
# plt.plot(g1_line[:,1], g1_line[:,0], linewidth=scale*line_width, c=color)
plt.arrow(g1p[1], g1p[0], alpha * axis[1], alpha * axis[0], width=scale * arrow_width,
head_width=scale * arrow_head_width, head_length=scale * arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:, 1], jaw_line1[:, 0], linewidth=scale * jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale * arrow_head_len * axis].T
# plt.plot(g2_line[:,1], g2_line[:,0], linewidth=scale*line_width, c=color)
plt.arrow(g2p[1], g2p[0], -alpha * axis[1], -alpha * axis[0], width=scale * arrow_width,
head_width=scale * arrow_head_width, head_length=scale * arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:, 1], jaw_line2[:, 0], linewidth=scale * jaw_width, c=color)
| 42.934426
| 110
| 0.655212
|
649ee74e6feec0840c517ef1bc181dad34cecedf
| 21,095
|
py
|
Python
|
fhirbug/Fhir/Resources/valueset.py
|
VerdantAI/fhirbug
|
8a8e2555c0edfeee0a7edbc8d67f2fcb2edd3c2d
|
[
"MIT"
] | 8
|
2019-01-06T18:11:20.000Z
|
2022-02-24T02:06:55.000Z
|
fhirbug/Fhir/Resources/valueset.py
|
VerdantAI/fhirbug
|
8a8e2555c0edfeee0a7edbc8d67f2fcb2edd3c2d
|
[
"MIT"
] | 5
|
2019-01-25T14:15:35.000Z
|
2021-06-01T23:22:41.000Z
|
fhirbug/Fhir/Resources/valueset.py
|
VerdantAI/fhirbug
|
8a8e2555c0edfeee0a7edbc8d67f2fcb2edd3c2d
|
[
"MIT"
] | 3
|
2020-10-14T23:09:29.000Z
|
2021-08-09T19:27:31.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b (http://hl7.org/fhir/StructureDefinition/ValueSet) on 2019-01-25.
# 2019, SMART Health IT.
##
from . import domainresource
class ValueSet(domainresource.DomainResource):
""" A set of codes drawn from one or more code systems.
A ValueSet resource instance specifies a set of codes drawn from one or
more code systems, intended for use in a particular context. Value sets
link between [CodeSystem](codesystem.html) definitions and their use in
[coded elements](terminologies.html).
"""
resource_type = "ValueSet"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.compose = None
""" Content logical definition of the value set (CLD).
Type `ValueSetCompose` (represented as `dict` in JSON). """
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self.date = None
""" Date last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.description = None
""" Natural language description of the value set.
Type `str`. """
self.expansion = None
""" Used when the value set is "expanded".
Type `ValueSetExpansion` (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self.identifier = None
""" Additional identifier for the value set (business identifier).
List of `Identifier` items (represented as `dict` in JSON). """
self.immutable = None
""" Indicates whether or not any change to the content logical
definition may occur.
Type `bool`. """
self.jurisdiction = None
""" Intended jurisdiction for value set (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.name = None
""" Name for this value set (computer friendly).
Type `str`. """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self.purpose = None
""" Why this value set is defined.
Type `str`. """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self.title = None
""" Name for this value set (human friendly).
Type `str`. """
self.url = None
""" Canonical identifier for this value set, represented as a URI
(globally unique).
Type `str`. """
self.useContext = None
""" The context that the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the value set.
Type `str`. """
super(ValueSet, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSet, self).elementProperties()
js.extend([
("compose", "compose", ValueSetCompose, False, None, False),
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("expansion", "expansion", ValueSetExpansion, False, None, False),
("experimental", "experimental", bool, False, None, False),
("identifier", "identifier", identifier.Identifier, True, None, False),
("immutable", "immutable", bool, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("name", "name", str, False, None, False),
("publisher", "publisher", str, False, None, False),
("purpose", "purpose", str, False, None, False),
("status", "status", str, False, None, True),
("title", "title", str, False, None, False),
("url", "url", str, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("version", "version", str, False, None, False),
])
return js
from . import backboneelement
class ValueSetCompose(backboneelement.BackboneElement):
""" Content logical definition of the value set (CLD).
A set of criteria that define the contents of the value set by including or
excluding codes selected from the specified code system(s) that the value
set draws from. This is also known as the Content Logical Definition (CLD).
"""
resource_type = "ValueSetCompose"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.exclude = None
""" Explicitly exclude codes from a code system or other value sets.
List of `ValueSetComposeInclude` items (represented as `dict` in JSON). """
self.inactive = None
""" Whether inactive codes are in the value set.
Type `bool`. """
self.include = None
""" Include one or more codes from a code system or other value set(s).
List of `ValueSetComposeInclude` items (represented as `dict` in JSON). """
self.lockedDate = None
""" Fixed date for references with no specified version (transitive).
Type `FHIRDate` (represented as `str` in JSON). """
super(ValueSetCompose, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetCompose, self).elementProperties()
js.extend([
("exclude", "exclude", ValueSetComposeInclude, True, None, False),
("inactive", "inactive", bool, False, None, False),
("include", "include", ValueSetComposeInclude, True, None, True),
("lockedDate", "lockedDate", fhirdate.FHIRDate, False, None, False),
])
return js
class ValueSetComposeInclude(backboneelement.BackboneElement):
""" Include one or more codes from a code system or other value set(s).
"""
resource_type = "ValueSetComposeInclude"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.concept = None
""" A concept defined in the system.
List of `ValueSetComposeIncludeConcept` items (represented as `dict` in JSON). """
self.filter = None
""" Select codes/concepts by their properties (including relationships).
List of `ValueSetComposeIncludeFilter` items (represented as `dict` in JSON). """
self.system = None
""" The system the codes come from.
Type `str`. """
self.valueSet = None
""" Select the contents included in this value set.
List of `str` items. """
self.version = None
""" Specific version of the code system referred to.
Type `str`. """
super(ValueSetComposeInclude, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetComposeInclude, self).elementProperties()
js.extend([
("concept", "concept", ValueSetComposeIncludeConcept, True, None, False),
("filter", "filter", ValueSetComposeIncludeFilter, True, None, False),
("system", "system", str, False, None, False),
("valueSet", "valueSet", str, True, None, False),
("version", "version", str, False, None, False),
])
return js
class ValueSetComposeIncludeConcept(backboneelement.BackboneElement):
""" A concept defined in the system.
Specifies a concept to be included or excluded.
"""
resource_type = "ValueSetComposeIncludeConcept"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.code = None
""" Code or expression from system.
Type `str`. """
self.designation = None
""" Additional representations for this concept.
List of `ValueSetComposeIncludeConceptDesignation` items (represented as `dict` in JSON). """
self.display = None
""" Text to display for this code for this value set in this valueset.
Type `str`. """
super(ValueSetComposeIncludeConcept, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetComposeIncludeConcept, self).elementProperties()
js.extend([
("code", "code", str, False, None, True),
("designation", "designation", ValueSetComposeIncludeConceptDesignation, True, None, False),
("display", "display", str, False, None, False),
])
return js
class ValueSetComposeIncludeConceptDesignation(backboneelement.BackboneElement):
""" Additional representations for this concept.
Additional representations for this concept when used in this value set -
other languages, aliases, specialized purposes, used for particular
purposes, etc.
"""
resource_type = "ValueSetComposeIncludeConceptDesignation"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.language = None
""" Human language of the designation.
Type `str`. """
self.use = None
""" Types of uses of designations.
Type `Coding` (represented as `dict` in JSON). """
self.value = None
""" The text value for this designation.
Type `str`. """
super(ValueSetComposeIncludeConceptDesignation, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetComposeIncludeConceptDesignation, self).elementProperties()
js.extend([
("language", "language", str, False, None, False),
("use", "use", coding.Coding, False, None, False),
("value", "value", str, False, None, True),
])
return js
class ValueSetComposeIncludeFilter(backboneelement.BackboneElement):
""" Select codes/concepts by their properties (including relationships).
Select concepts by specify a matching criterion based on the properties
(including relationships) defined by the system, or on filters defined by
the system. If multiple filters are specified, they SHALL all be true.
"""
resource_type = "ValueSetComposeIncludeFilter"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.op = None
""" = | is-a | descendent-of | is-not-a | regex | in | not-in |
generalizes | exists.
Type `str`. """
self.property = None
""" A property/filter defined by the code system.
Type `str`. """
self.value = None
""" Code from the system, or regex criteria, or boolean value for
exists.
Type `str`. """
super(ValueSetComposeIncludeFilter, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetComposeIncludeFilter, self).elementProperties()
js.extend([
("op", "op", str, False, None, True),
("property", "property", str, False, None, True),
("value", "value", str, False, None, True),
])
return js
class ValueSetExpansion(backboneelement.BackboneElement):
""" Used when the value set is "expanded".
A value set can also be "expanded", where the value set is turned into a
simple collection of enumerated codes. This element holds the expansion, if
it has been performed.
"""
resource_type = "ValueSetExpansion"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contains = None
""" Codes in the value set.
List of `ValueSetExpansionContains` items (represented as `dict` in JSON). """
self.identifier = None
""" Identifies the value set expansion (business identifier).
Type `str`. """
self.offset = None
""" Offset at which this resource starts.
Type `int`. """
self.parameter = None
""" Parameter that controlled the expansion process.
List of `ValueSetExpansionParameter` items (represented as `dict` in JSON). """
self.timestamp = None
""" Time ValueSet expansion happened.
Type `FHIRDate` (represented as `str` in JSON). """
self.total = None
""" Total number of codes in the expansion.
Type `int`. """
super(ValueSetExpansion, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetExpansion, self).elementProperties()
js.extend([
("contains", "contains", ValueSetExpansionContains, True, None, False),
("identifier", "identifier", str, False, None, False),
("offset", "offset", int, False, None, False),
("parameter", "parameter", ValueSetExpansionParameter, True, None, False),
("timestamp", "timestamp", fhirdate.FHIRDate, False, None, True),
("total", "total", int, False, None, False),
])
return js
class ValueSetExpansionContains(backboneelement.BackboneElement):
""" Codes in the value set.
The codes that are contained in the value set expansion.
"""
resource_type = "ValueSetExpansionContains"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.abstract = None
""" If user cannot select this entry.
Type `bool`. """
self.code = None
""" Code - if blank, this is not a selectable code.
Type `str`. """
self.contains = None
""" Codes contained under this entry.
List of `ValueSetExpansionContains` items (represented as `dict` in JSON). """
self.designation = None
""" Additional representations for this item.
List of `ValueSetComposeIncludeConceptDesignation` items (represented as `dict` in JSON). """
self.display = None
""" User display for the concept.
Type `str`. """
self.inactive = None
""" If concept is inactive in the code system.
Type `bool`. """
self.system = None
""" System value for the code.
Type `str`. """
self.version = None
""" Version in which this code/display is defined.
Type `str`. """
super(ValueSetExpansionContains, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetExpansionContains, self).elementProperties()
js.extend([
("abstract", "abstract", bool, False, None, False),
("code", "code", str, False, None, False),
("contains", "contains", ValueSetExpansionContains, True, None, False),
("designation", "designation", ValueSetComposeIncludeConceptDesignation, True, None, False),
("display", "display", str, False, None, False),
("inactive", "inactive", bool, False, None, False),
("system", "system", str, False, None, False),
("version", "version", str, False, None, False),
])
return js
class ValueSetExpansionParameter(backboneelement.BackboneElement):
""" Parameter that controlled the expansion process.
A parameter that controlled the expansion process. These parameters may be
used by users of expanded value sets to check whether the expansion is
suitable for a particular purpose, or to pick the correct expansion.
"""
resource_type = "ValueSetExpansionParameter"
def __init__(self, jsondict=None, strict=True, **kwargs):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.name = None
""" Name as assigned by the client or server.
Type `str`. """
self.valueBoolean = None
""" Value of the named parameter.
Type `bool`. """
self.valueCode = None
""" Value of the named parameter.
Type `str`. """
self.valueDateTime = None
""" Value of the named parameter.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueDecimal = None
""" Value of the named parameter.
Type `float`. """
self.valueInteger = None
""" Value of the named parameter.
Type `int`. """
self.valueString = None
""" Value of the named parameter.
Type `str`. """
self.valueUri = None
""" Value of the named parameter.
Type `str`. """
super(ValueSetExpansionParameter, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
def elementProperties(self):
js = super(ValueSetExpansionParameter, self).elementProperties()
js.extend([
("name", "name", str, False, None, True),
("valueBoolean", "valueBoolean", bool, False, "value", False),
("valueCode", "valueCode", str, False, "value", False),
("valueDateTime", "valueDateTime", fhirdate.FHIRDate, False, "value", False),
("valueDecimal", "valueDecimal", float, False, "value", False),
("valueInteger", "valueInteger", int, False, "value", False),
("valueString", "valueString", str, False, "value", False),
("valueUri", "valueUri", str, False, "value", False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import coding
except ImportError:
coding = sys.modules[__package__ + '.coding']
try:
from . import contactdetail
except ImportError:
contactdetail = sys.modules[__package__ + '.contactdetail']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
try:
from . import usagecontext
except ImportError:
usagecontext = sys.modules[__package__ + '.usagecontext']
| 37.204586
| 114
| 0.627826
|
adcb6caf86db69c14dc99f4220ce97ea60157a09
| 15,291
|
py
|
Python
|
grr/client/grr_response_client/vfs_handlers/sleuthkit.py
|
JiYonG-Lee/grr
|
57fef67080ac6b8fd3de3ba0adfca064d34b7689
|
[
"Apache-2.0"
] | 1
|
2020-06-25T14:25:51.000Z
|
2020-06-25T14:25:51.000Z
|
grr/client/grr_response_client/vfs_handlers/sleuthkit.py
|
JiYonG-Lee/grr
|
57fef67080ac6b8fd3de3ba0adfca064d34b7689
|
[
"Apache-2.0"
] | 3
|
2020-06-18T15:33:21.000Z
|
2022-03-02T08:51:18.000Z
|
grr/client/grr_response_client/vfs_handlers/sleuthkit.py
|
JiYonG-Lee/grr
|
57fef67080ac6b8fd3de3ba0adfca064d34b7689
|
[
"Apache-2.0"
] | 1
|
2020-06-25T14:25:54.000Z
|
2020-06-25T14:25:54.000Z
|
#!/usr/bin/env python
# Lint as: python3
"""Implement low level disk access using the sleuthkit."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import logging
import stat
from typing import Text
import pytsk3
from grr_response_client import client_utils
from grr_response_client.vfs_handlers import base as vfs_base
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util import precondition
# A central Cache for vfs handlers. This can be used to keep objects alive
# for a limited time.
DEVICE_CACHE = utils.TimeBasedCache()
def _DecodeUTF8WithWarning(string):
try:
return string.decode("utf-8")
except UnicodeDecodeError as e:
result = string.decode("utf-8", "replace")
logging.warning("%s. Decoded %r to %r", e, string, result)
return result
class CachedFilesystem(object):
"""A container for the filesystem and image."""
def __init__(self, fs, img):
self.fs = fs
self.img = img
class MyImgInfo(pytsk3.Img_Info):
"""An Img_Info class using the regular python file handling."""
def __init__(self, fd=None, progress_callback=None):
pytsk3.Img_Info.__init__(self)
self.progress_callback = progress_callback
self.fd = fd
def read(self, offset, length): # pylint: disable=g-bad-name
# Sleuthkit operations might take a long time so we periodically call the
# progress indicator callback as long as there are still data reads.
if self.progress_callback:
self.progress_callback()
self.fd.seek(offset)
return self.fd.read(length)
def get_size(self): # pylint: disable=g-bad-name
# Windows is unable to report the true size of the raw device and allows
# arbitrary reading past the end - so we lie here to force tsk to read it
# anyway
return 1e12
class TSKFile(vfs_base.VFSHandler):
"""Read a regular file."""
supported_pathtype = rdf_paths.PathSpec.PathType.TSK
auto_register = True
# A mapping to encode TSK types to a stat.st_mode
FILE_TYPE_LOOKUP = {
pytsk3.TSK_FS_NAME_TYPE_UNDEF: 0,
pytsk3.TSK_FS_NAME_TYPE_FIFO: stat.S_IFIFO,
pytsk3.TSK_FS_NAME_TYPE_CHR: stat.S_IFCHR,
pytsk3.TSK_FS_NAME_TYPE_DIR: stat.S_IFDIR,
pytsk3.TSK_FS_NAME_TYPE_BLK: stat.S_IFBLK,
pytsk3.TSK_FS_NAME_TYPE_REG: stat.S_IFREG,
pytsk3.TSK_FS_NAME_TYPE_LNK: stat.S_IFLNK,
pytsk3.TSK_FS_NAME_TYPE_SOCK: stat.S_IFSOCK,
}
META_TYPE_LOOKUP = {
pytsk3.TSK_FS_META_TYPE_BLK: 0,
pytsk3.TSK_FS_META_TYPE_CHR: stat.S_IFCHR,
pytsk3.TSK_FS_META_TYPE_DIR: stat.S_IFDIR,
pytsk3.TSK_FS_META_TYPE_FIFO: stat.S_IFIFO,
pytsk3.TSK_FS_META_TYPE_LNK: stat.S_IFLNK,
pytsk3.TSK_FS_META_TYPE_REG: stat.S_IFREG,
pytsk3.TSK_FS_META_TYPE_SOCK: stat.S_IFSOCK,
}
# Files we won't return in directories.
BLACKLIST_FILES = [
"$OrphanFiles" # Special TSK dir that invokes processing.
]
# The file like object we read our image from
tsk_raw_device = None
# NTFS files carry an attribute identified by ntfs_type and ntfs_id.
tsk_attribute = None
# This is all bits that define the type of the file in the stat mode. Equal to
# 0b1111000000000000.
stat_type_mask = (
stat.S_IFREG | stat.S_IFDIR | stat.S_IFLNK | stat.S_IFBLK
| stat.S_IFCHR | stat.S_IFIFO | stat.S_IFSOCK)
def __init__(self, base_fd, handlers, pathspec=None, progress_callback=None):
"""Use TSK to read the pathspec.
Args:
base_fd: The file like object we read this component from.
handlers: A mapping from rdf_paths.PathSpec.PathType to classes
implementing VFSHandler.
pathspec: An optional pathspec to open directly.
progress_callback: A callback to indicate that the open call is still
working but needs more time.
Raises:
IOError: If the file can not be opened.
"""
super().__init__(
base_fd,
handlers=handlers,
pathspec=pathspec,
progress_callback=progress_callback)
if self.base_fd is None:
raise IOError("TSK driver must have a file base.")
# If our base is another tsk driver - borrow the reference to the raw
# device, and replace the last pathspec component with this one after
# extending its path.
elif isinstance(base_fd, TSKFile) and self.base_fd.IsDirectory():
self.tsk_raw_device = self.base_fd.tsk_raw_device
last_path = utils.JoinPath(self.pathspec.last.path, pathspec.path)
# Replace the last component with this one.
self.pathspec.Pop(-1)
self.pathspec.Append(pathspec)
self.pathspec.last.path = last_path
# Use the base fd as a base to parse the filesystem only if its file like.
elif not self.base_fd.IsDirectory():
self.tsk_raw_device = self.base_fd
self.pathspec.Append(pathspec)
else:
# If we get here we have a directory from a non sleuthkit driver - dont
# know what to do with it.
raise IOError("Unable to parse base using Sleuthkit.")
# If we are successful in opening this path below the path casing is
# correct.
self.pathspec.last.path_options = rdf_paths.PathSpec.Options.CASE_LITERAL
fd_hash = self.tsk_raw_device.pathspec.SerializeToBytes()
# Cache the filesystem using the path of the raw device
try:
self.filesystem = DEVICE_CACHE.Get(fd_hash)
self.fs = self.filesystem.fs
except KeyError:
self.img = MyImgInfo(
fd=self.tsk_raw_device, progress_callback=progress_callback)
self.fs = pytsk3.FS_Info(self.img, 0)
self.filesystem = CachedFilesystem(self.fs, self.img)
DEVICE_CACHE.Put(fd_hash, self.filesystem)
# We prefer to open the file based on the inode because that is more
# efficient.
if pathspec.HasField("inode"):
self.fd = self.fs.open_meta(pathspec.inode)
# NTFS_ID is only required when reading ADSs. If it's not provided, we
# just get the first attribute with matching type.
if pathspec.HasField("ntfs_id"):
self.tsk_attribute = self.GetAttribute(pathspec.ntfs_type,
pathspec.ntfs_id)
else:
self.tsk_attribute = self.GetAttribute(pathspec.ntfs_type)
if self.tsk_attribute:
self.size = self.tsk_attribute.info.size
else:
self.size = self.fd.info.meta.size
else:
# TODO: In Python 2 TSK expects bytestring paths whereas in
# Python 3 it expects unicode paths. Once support for Python 2 is dropped,
# this branching can be removed.
if compatibility.PY2:
path = self.pathspec.last.path.encode("utf-8")
else:
path = self.pathspec.last.path
# Does the filename exist in the image?
self.fd = self.fs.open(path)
self.size = self.fd.info.meta.size
self.pathspec.last.inode = self.fd.info.meta.addr
def GetAttribute(self, ntfs_type, ntfs_id=None):
for attribute in self.fd:
if attribute.info.type == ntfs_type:
# If ntfs_id is specified it has to also match.
if ntfs_id is not None and attribute.info.id != ntfs_id:
continue
return attribute
return None
def ListNames(self):
directory_handle = self.fd.as_directory()
seen_names = set()
for f in directory_handle:
# TSK only deals with utf8 strings, but path components are always unicode
# objects - so we convert to unicode as soon as we receive data from
# TSK. Prefer to compare unicode objects to guarantee they are normalized.
name = _DecodeUTF8WithWarning(f.info.name.name)
# TODO: TSK lists duplicate filenames. Only return unique
# names from ListNames(), because parts of the system fail otherwise.
if name not in seen_names:
seen_names.add(name)
yield name
def MakeStatResponse(self, tsk_file, tsk_attribute=None, append_name=None):
"""Given a TSK info object make a StatEntry.
Note that tsk uses two things to uniquely identify a data stream - the inode
object given in tsk_file and the attribute object which may correspond to an
ADS of this file for filesystems which support ADS. We store both of these
in the stat response.
Args:
tsk_file: A TSK File object for the specified inode.
tsk_attribute: A TSK Attribute object for the ADS. If None we use the main
stream.
append_name: If specified we append this name to the last element of the
pathspec.
Returns:
A StatEntry which can be used to re-open this exact VFS node.
"""
precondition.AssertOptionalType(append_name, Text)
info = tsk_file.info
response = rdf_client_fs.StatEntry()
meta = info.meta
if meta:
response.st_ino = meta.addr
for attribute in [
"mode", "nlink", "uid", "gid", "size", "atime", "mtime", "ctime",
"crtime"
]:
try:
value = int(getattr(meta, attribute))
if value < 0:
value &= 0xFFFFFFFF
setattr(response, "st_%s" % attribute, value)
except AttributeError:
pass
name = info.name
child_pathspec = self.pathspec.Copy()
if append_name is not None:
# Append the name to the most inner pathspec
child_pathspec.last.path = utils.JoinPath(child_pathspec.last.path,
append_name)
child_pathspec.last.inode = meta.addr
if tsk_attribute is not None:
child_pathspec.last.ntfs_type = int(tsk_attribute.info.type)
child_pathspec.last.ntfs_id = int(tsk_attribute.info.id)
child_pathspec.last.stream_name = tsk_attribute.info.name
# Update the size with the attribute size.
response.st_size = tsk_attribute.info.size
default = rdf_paths.PathSpec.tsk_fs_attr_type.TSK_FS_ATTR_TYPE_DEFAULT
last = child_pathspec.last
if last.ntfs_type != default or last.ntfs_id:
# This is an ads and should be treated as a file.
# Clear all file type bits.
response.st_mode &= ~self.stat_type_mask
response.st_mode |= stat.S_IFREG
else:
child_pathspec.last.ntfs_type = None
child_pathspec.last.ntfs_id = None
child_pathspec.last.stream_name = None
if name:
# Encode the type onto the st_mode response
response.st_mode |= self.FILE_TYPE_LOOKUP.get(int(name.type), 0)
if meta:
# What if the types are different? What to do here?
response.st_mode |= self.META_TYPE_LOOKUP.get(int(meta.type), 0)
# Write the pathspec on the response.
response.pathspec = child_pathspec
return response
def Read(self, length):
"""Read from the file."""
if not self.IsFile():
raise IOError("%s is not a file." % self.pathspec.last.path)
available = min(self.size - self.offset, length)
if available > 0:
# This raises a RuntimeError in some situations.
try:
# NTFS_ID is only required when reading ADSs. If it's is not provided,
# we just let pytsk use the default.
if self.pathspec.last.HasField("ntfs_id"):
data = self.fd.read_random(self.offset, available,
self.pathspec.last.ntfs_type,
self.pathspec.last.ntfs_id)
else:
data = self.fd.read_random(self.offset, available,
self.pathspec.last.ntfs_type)
except RuntimeError as e:
raise IOError(e)
self.offset += len(data)
return data
return b""
def Stat(
self,
ext_attrs: bool = False,
follow_symlink: bool = True,
) -> rdf_client_fs.StatEntry:
"""Return a stat of the file."""
del ext_attrs, follow_symlink # Unused.
return self.MakeStatResponse(self.fd, tsk_attribute=self.tsk_attribute)
def ListFiles(self, ext_attrs=None):
"""List all the files in the directory."""
del ext_attrs # Unused.
if not self.IsDirectory():
raise IOError("%s is not a directory" % self.pathspec.CollapsePath())
for f in self.fd.as_directory():
try:
name = _DecodeUTF8WithWarning(f.info.name.name)
# Drop these useless entries.
if name in [".", ".."] or name in self.BLACKLIST_FILES:
continue
# First we yield a standard response using the default attributes.
yield self.MakeStatResponse(f, tsk_attribute=None, append_name=name)
# Now send back additional named attributes for the ADS.
for attribute in f:
if attribute.info.type in [
pytsk3.TSK_FS_ATTR_TYPE_NTFS_DATA, pytsk3.TSK_FS_ATTR_TYPE_DEFAULT
]:
if attribute.info.name:
yield self.MakeStatResponse(
f, append_name=name, tsk_attribute=attribute)
except AttributeError:
pass
def IsDirectory(self):
last = self.pathspec.last
default = rdf_paths.PathSpec.tsk_fs_attr_type.TSK_FS_ATTR_TYPE_DEFAULT
if last.ntfs_type != default or last.ntfs_id:
# This is an ads so treat as a file.
return False
return self.fd.info.meta.type == pytsk3.TSK_FS_META_TYPE_DIR
def IsFile(self):
last = self.pathspec.last
default = rdf_paths.PathSpec.tsk_fs_attr_type.TSK_FS_ATTR_TYPE_DEFAULT
if last.ntfs_type != default or last.ntfs_id:
# This is an ads so treat as a file.
return True
return self.fd.info.meta.type == pytsk3.TSK_FS_META_TYPE_REG
@classmethod
def Open(cls, fd, component, handlers, pathspec=None, progress_callback=None):
# A Pathspec which starts with TSK means we need to resolve the mount point
# at runtime.
if fd is None and component.pathtype == rdf_paths.PathSpec.PathType.TSK:
# We are the top level handler. This means we need to check the system
# mounts to work out the exact mount point and device we need to
# open. We then modify the pathspec so we get nested in the raw
# pathspec.
raw_pathspec, corrected_path = client_utils.GetRawDevice(component.path)
# Insert the raw device before the component in the pathspec and correct
# the path
component.path = corrected_path
pathspec.Insert(0, component)
pathspec.Insert(0, raw_pathspec)
# Allow incoming pathspec to be given in the local system path
# conventions.
for component in pathspec:
if component.path:
component.path = client_utils.LocalPathToCanonicalPath(component.path)
# We have not actually opened anything in this iteration, but modified the
# pathspec. Next time we should be able to open it properly.
return fd
# If an inode is specified, just use it directly.
elif component.HasField("inode"):
return TSKFile(
fd, handlers, component, progress_callback=progress_callback)
# Otherwise do the usual case folding.
else:
return super(TSKFile, cls).Open(
fd=fd,
component=component,
handlers=handlers,
pathspec=pathspec,
progress_callback=progress_callback)
| 35.314088
| 80
| 0.68197
|
e9befac2eb34a2f0cb8c4d6e4f5c274c4304ee9e
| 42,801
|
py
|
Python
|
src/sage/combinat/root_system/pieri_factors.py
|
hsm207/sage
|
020bd59ec28717bfab9af44d2231c53da1ff99f1
|
[
"BSL-1.0"
] | 1,742
|
2015-01-04T07:06:13.000Z
|
2022-03-30T11:32:52.000Z
|
src/sage/combinat/root_system/pieri_factors.py
|
hsm207/sage
|
020bd59ec28717bfab9af44d2231c53da1ff99f1
|
[
"BSL-1.0"
] | 66
|
2015-03-19T19:17:24.000Z
|
2022-03-16T11:59:30.000Z
|
src/sage/combinat/root_system/pieri_factors.py
|
hsm207/sage
|
020bd59ec28717bfab9af44d2231c53da1ff99f1
|
[
"BSL-1.0"
] | 495
|
2015-01-10T10:23:18.000Z
|
2022-03-24T22:06:11.000Z
|
r"""
Pieri Factors
"""
# ****************************************************************************
# Copyright (C) 2009-2010 Steven Pon <spon at math.ucdavis.edu>
# Anne Schilling < anne at math.ucdavis.edu>
# Nicolas M. Thiery <nthiery at users.sf.net>
#
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#******************************************************************************
from sage.misc.cachefunc import cached_method
from sage.misc.constant_function import ConstantFunction
from sage.misc.all import prod, attrcall
from sage.categories.finite_enumerated_sets import FiniteEnumeratedSets
from sage.structure.parent import Parent
from sage.structure.unique_representation import UniqueRepresentation
from sage.rings.integer import Integer
from sage.rings.rational_field import QQ
from sage.rings.infinity import infinity
from sage.arith.all import binomial
import sage.combinat.ranker
from sage.sets.recursively_enumerated_set import RecursivelyEnumeratedSet
from sage.combinat.root_system.root_system import RootSystem
from sage.combinat.root_system.dynkin_diagram import DynkinDiagram
from sage.combinat.root_system.weyl_group import WeylGroup
from sage.graphs.digraph import DiGraph
class PieriFactors(UniqueRepresentation, Parent):
r"""
An abstract class for sets of Pieri factors, used for constructing
Stanley symmetric functions. The set of Pieri factors for a given
type can be realized as an order ideal of the Bruhat order poset
generated by a certain set of maximal elements.
.. SEEALSO::
- :meth:`WeylGroups.ParentMethods.pieri_factors`
- :meth:`WeylGroups.ElementMethods.stanley_symmetric_function`
EXAMPLES::
sage: W = WeylGroup(['A',4])
sage: PF = W.pieri_factors()
sage: PF.an_element().reduced_word()
[4, 3, 2, 1]
sage: Waff = WeylGroup(['A',4,1])
sage: PFaff = Waff.pieri_factors()
sage: Waff.from_reduced_word(PF.an_element().reduced_word()) in PFaff
True
sage: W = WeylGroup(['B',3,1])
sage: PF = W.pieri_factors()
sage: W.from_reduced_word([2,3,2]) in PF.elements()
True
sage: PF.cardinality()
47
sage: W = WeylGroup(['C',3,1])
sage: PF = W.pieri_factors()
sage: PF.generating_series()
6*z^6 + 14*z^5 + 18*z^4 + 15*z^3 + 9*z^2 + 4*z + 1
sage: sorted(w.reduced_word() for w in PF if w.length() == 2)
[[0, 1], [1, 0], [1, 2], [2, 0], [2, 1],
[2, 3], [3, 0], [3, 1], [3, 2]]
REFERENCES:
- [FoSta1994]_
- [BH1994]_
- [Lam1996]_
- [Lam2008]_
- [LSS2009]_
- [Pon2010]_
"""
def _repr_(self):
r"""
String representation.
EXAMPLES::
sage: WeylGroup(["A", 2, 1]).pieri_factors() # indirect doctest
Pieri factors for Weyl Group of type ['A', 2, 1] (as a matrix group acting on the root space)
"""
return "Pieri factors for %s" % self.W
def __contains__(self, w):
r"""
Test for containment.
EXAMPLES::
sage: W = WeylGroup(['C',3,1])
sage: w = W.from_reduced_word([3,2,1,0])
sage: PF = W.pieri_factors()
sage: w in PF
True
sage: w = W.from_reduced_word([1,0,1])
sage: w in PF
True
sage: w = W.from_reduced_word([1,0,1,0])
sage: w in PF
False
sage: w = W.from_reduced_word([0,1,2,3,2,1,0])
sage: w in PF
False
sage: w = W.from_reduced_word([2,0,3,2,1])
sage: w in PF
True
sage: W = WeylGroup(['B',4,1])
sage: PF = W.pieri_factors()
sage: w = W.from_reduced_word([1,2,4,3,1])
sage: w in PF
True
sage: w = W.from_reduced_word([1,2,4,3,1,0])
sage: w in PF
False
sage: w = W.from_reduced_word([2,3,4,3,2,1,0])
sage: w in PF
True
sage: W = WeylGroup(['A',4])
sage: PF = W.pieri_factors()
sage: W.from_reduced_word([4,3,1]) in PF
True
sage: W.from_reduced_word([1,2]) in PF
False
"""
if w not in self.W:
return False
return any(w.bruhat_le(m) for m in self.maximal_elements())
@cached_method
def elements(self):
r"""
Return the elements of ``self``.
Those are constructed as the elements below the maximal
elements of ``self`` in Bruhat order.
OUTPUT: a :class:`RecursivelyEnumeratedSet_generic` object
EXAMPLES::
sage: PF = WeylGroup(['A',3]).pieri_factors()
sage: sorted(w.reduced_word() for w in PF.elements())
[[], [1], [2], [2, 1], [3], [3, 1], [3, 2], [3, 2, 1]]
.. SEEALSO:: :meth:`maximal_elements`
.. TODO::
Possibly remove this method and instead have this class
inherit from :class:`RecursivelyEnumeratedSet_generic`.
"""
return RecursivelyEnumeratedSet(self.maximal_elements(),
attrcall('bruhat_lower_covers'), structure=None,
enumeration='naive')
def __iter__(self):
r"""
Return an iterator over the elements of ``self``.
EXAMPLES::
sage: PF = WeylGroup(['A',3,1]).pieri_factors()
sage: f = PF.__iter__()
sage: [next(f).reduced_word() for i in range(5)]
[[], [0], [1], [2], [3]]
"""
return iter(self.elements())
def generating_series(self, weight = None):
r"""
Return a length generating series for the elements of ``self``.
EXAMPLES::
sage: PF = WeylGroup(['C',3,1]).pieri_factors()
sage: PF.generating_series()
6*z^6 + 14*z^5 + 18*z^4 + 15*z^3 + 9*z^2 + 4*z + 1
sage: PF = WeylGroup(['B',4]).pieri_factors()
sage: PF.generating_series()
z^7 + 6*z^6 + 14*z^5 + 18*z^4 + 15*z^3 + 9*z^2 + 4*z + 1
"""
if weight is None:
weight = self.default_weight()
return sum(weight(w.length()) for w in self)
@cached_method
def default_weight(self):
r"""
Return the function `i\mapsto z^i`, where `z` is the
generator of ``QQ['z']``.
EXAMPLES::
sage: W = WeylGroup(["A", 3, 1])
sage: weight = W.pieri_factors().default_weight()
sage: weight(1)
z
sage: weight(5)
z^5
TESTS::
sage: weight(4) in QQ['z']
True
sage: weight(0) in QQ['z']
True
sage: weight(0).parent() == QQ['z'] # todo: not implemented
True
"""
R = QQ['z']
z = R.gen()
return lambda i: z**i
def _test_maximal_elements(self, **options):
r"""
Check that the conjectural type-free definition of Pieri
factors matches with the proven type-specific definition.
.. SEEALSO:: :class:`TestSuite`.
EXAMPLES::
sage: W = WeylGroup(['A',4,1])
sage: PF = W.pieri_factors()
sage: PF._test_maximal_elements()
sage: WeylGroup(['B',5]).pieri_factors()._test_maximal_elements()
TESTS::
sage: W = WeylGroup(['C',4,1])
sage: PF = W.pieri_factors()
sage: PF._test_maximal_elements()
sage: WeylGroup(['D',5,1]).pieri_factors()._test_maximal_elements()
sage: WeylGroup(['A',5,1]).pieri_factors()._test_maximal_elements()
sage: WeylGroup(['B',5,1]).pieri_factors()._test_maximal_elements()
"""
tester = self._tester(**options)
tester.assertEqual(set(self.maximal_elements()),
set(self.maximal_elements_combinatorial()))
@cached_method
def max_length(self):
r"""
Return the maximal length of a Pieri factor.
EXAMPLES:
In type A and A affine, this is `n`::
sage: WeylGroup(['A',5]).pieri_factors().max_length()
5
sage: WeylGroup(['A',5,1]).pieri_factors().max_length()
5
In type B and B affine, this is `2n-1`::
sage: WeylGroup(['B',5,1]).pieri_factors().max_length()
9
sage: WeylGroup(['B',5]).pieri_factors().max_length()
9
In type C affine this is `2n`::
sage: WeylGroup(['C',5,1]).pieri_factors().max_length()
10
In type D affine this is `2n-2`::
sage: WeylGroup(['D',5,1]).pieri_factors().max_length()
8
"""
return self.maximal_elements()[0].length()
class PieriFactors_finite_type(PieriFactors):
r"""
The Pieri factors of finite type A are the restriction of the
Pieri factors of affine type A to finite permutations (under the
canonical embedding of finite type A into the affine Weyl group),
and the Pieri factors of finite type B are the restriction of the
Pieri factors of affine type C. The finite type D Pieri factors
are (weakly) conjectured to be the restriction of the Pieri
factors of affine type D.
"""
def maximal_elements(self):
r"""
The current algorithm uses the fact that the maximal Pieri factors
of affine type A,B,C, or D either contain a finite Weyl group
element, or contain an affine Weyl group element whose reflection
by `s_0` gets a finite Weyl group element, and that either of
these finite group elements will serve as a maximal element for
finite Pieri factors. A better algorithm is desirable.
EXAMPLES::
sage: PF = WeylGroup(['A',5]).pieri_factors()
sage: [v.reduced_word() for v in PF.maximal_elements()]
[[5, 4, 3, 2, 1]]
sage: WeylGroup(['B',4]).pieri_factors().maximal_elements()
[
[-1 0 0 0]
[ 0 1 0 0]
[ 0 0 1 0]
[ 0 0 0 1]
]
"""
ct = self.W.cartan_type()
# The following line may need to be changed when generalizing to more than types A and B.
if ct.type() != 'A' and ct.type() != 'B':
raise NotImplementedError("currently only implemented for finite types A and B")
ct_aff = ct.dual().affine()
max_elts_affine = WeylGroup(ct_aff).pieri_factors().maximal_elements()
for w in max_elts_affine:
if 0 not in w.reduced_word():
return [self.W.from_reduced_word(w.reduced_word())]
for w in max_elts_affine:
if 0 not in w.apply_simple_reflection(0).reduced_word():
return [self.W.from_reduced_word(w.apply_simple_reflection(0).reduced_word())]
class PieriFactors_affine_type(PieriFactors):
def maximal_elements(self):
r"""
Return the maximal elements of ``self`` with respect to Bruhat order.
The current implementation is via a conjectural type-free
formula. Use :meth:`maximal_elements_combinatorial` for proven
type-specific implementations. To compare type-free and
type-specific (combinatorial) implementations, use method
:meth:`_test_maximal_elements`.
EXAMPLES::
sage: W = WeylGroup(['A',4,1])
sage: PF = W.pieri_factors()
sage: sorted([w.reduced_word() for w in PF.maximal_elements()], key=str)
[[0, 4, 3, 2], [1, 0, 4, 3], [2, 1, 0, 4], [3, 2, 1, 0], [4, 3, 2, 1]]
sage: W = WeylGroup(RootSystem(["C",3,1]).weight_space())
sage: PF = W.pieri_factors()
sage: sorted([w.reduced_word() for w in PF.maximal_elements()], key=str)
[[0, 1, 2, 3, 2, 1], [1, 0, 1, 2, 3, 2], [1, 2, 3, 2, 1, 0],
[2, 1, 0, 1, 2, 3], [2, 3, 2, 1, 0, 1], [3, 2, 1, 0, 1, 2]]
sage: W = WeylGroup(RootSystem(["B",3,1]).weight_space())
sage: PF = W.pieri_factors()
sage: sorted([w.reduced_word() for w in PF.maximal_elements()], key=str)
[[0, 2, 3, 2, 0], [1, 0, 2, 3, 2], [1, 2, 3, 2, 1],
[2, 1, 0, 2, 3], [2, 3, 2, 1, 0], [3, 2, 1, 0, 2]]
sage: W = WeylGroup(['D',4,1])
sage: PF = W.pieri_factors()
sage: sorted([w.reduced_word() for w in PF.maximal_elements()], key=str)
[[0, 2, 4, 3, 2, 0], [1, 0, 2, 4, 3, 2], [1, 2, 4, 3, 2, 1],
[2, 1, 0, 2, 4, 3], [2, 4, 3, 2, 1, 0], [3, 2, 1, 0, 2, 3],
[4, 2, 1, 0, 2, 4], [4, 3, 2, 1, 0, 2]]
"""
ct = self.W.cartan_type()
s = ct.translation_factors()[1]
R = RootSystem(ct).weight_space()
Lambda = R.fundamental_weights()
orbit = [R.reduced_word_of_translation(x)
for x in (s*(Lambda[1]-Lambda[1].level()*Lambda[0]))._orbit_iter()]
return [self.W.from_reduced_word(x) for x in orbit]
class PieriFactors_type_A(PieriFactors_finite_type):
r"""
The set of Pieri factors for finite type A.
This is the set of elements of the Weyl group that have a reduced
word that is strictly decreasing. This may also be viewed as the
restriction of affine type A Pieri factors to finite Weyl group
elements.
"""
def __init__(self, W):
r"""
EXAMPLES::
sage: PF = WeylGroup(['A',5]).pieri_factors()
sage: PF
Pieri factors for Weyl Group of type ['A', 5] (as a matrix group acting on the ambient space)
TESTS::
sage: PF = WeylGroup(['A',3]).pieri_factors()
sage: PF.__class__
<class 'sage.combinat.root_system.pieri_factors.PieriFactors_type_A_with_category'>
sage: TestSuite(PF).run()
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self.W = W
def maximal_elements_combinatorial(self):
r"""
Return the maximal Pieri factors, using the type A
combinatorial description.
EXAMPLES::
sage: W = WeylGroup(['A',4])
sage: PF = W.pieri_factors()
sage: PF.maximal_elements_combinatorial()[0].reduced_word()
[4, 3, 2, 1]
"""
return [self.W.from_reduced_word(range(self.W.cartan_type().n, 0, -1))]
def stanley_symm_poly_weight(self,w):
r"""
EXAMPLES::
sage: W = WeylGroup(['A',4])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([3,1]))
0
"""
return 0
class PieriFactors_type_B(PieriFactors_finite_type):
r"""
The type B finite Pieri factors are realized as the set of
elements that have a reduced word that is a subword of
`12...(n-1)n(n-1)...21`. They are the restriction of the type C
affine Pieri factors to the set of finite Weyl group elements
under the usual embedding.
"""
def __init__(self, W):
r"""
EXAMPLES::
sage: WeylGroup(['B',5]).pieri_factors()
Pieri factors for Weyl Group of type ['B', 5] (as a matrix group acting on the ambient space)
TESTS::
sage: PF = WeylGroup(['B',3]).pieri_factors()
sage: PF.__class__
<class 'sage.combinat.root_system.pieri_factors.PieriFactors_type_B_with_category'>
sage: TestSuite(PF).run()
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self.W = W
def maximal_elements_combinatorial(self):
r"""
Return the maximal Pieri factors, using the type B
combinatorial description.
EXAMPLES::
sage: PF = WeylGroup(['B',4]).pieri_factors()
sage: PF.maximal_elements_combinatorial()[0].reduced_word()
[1, 2, 3, 4, 3, 2, 1]
"""
N = self.W.cartan_type().n
li = list(range(1, N)) + list(range(N, 0, -1))
return [self.W.from_reduced_word(li)]
def stanley_symm_poly_weight(self, w):
r"""
Weight used in computing Stanley symmetric polynomials of type `B`.
The weight for finite type B is the number of components
of the support of an element minus the number of occurrences
of `n` in a reduced word.
EXAMPLES::
sage: W = WeylGroup(['B',5])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([3,1,5]))
2
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([3,4,5]))
0
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([1,2,3,4,5,4]))
0
"""
r = w.reduced_word().count(self.W.n)
return WeylGroup(self.W.cartan_type().dual().affine()).pieri_factors().stanley_symm_poly_weight(w) - r
class PieriFactors_type_A_affine(PieriFactors_affine_type):
r"""
The set of Pieri factors for type A affine, that is the set of
elements of the Weyl Group which are cyclically decreasing.
Those are used for constructing (affine) Stanley symmetric functions.
The Pieri factors are in bijection with the proper subsets of the
``index_set``. The bijection is given by the support. Namely, let `f`
be a Pieri factor, and `red` a reduced word for `f`. No simple
reflection appears twice in red, and the support `S` of `red`
(that is the `i` such that `s_i` appears in `red`) does not depend
on the reduced word).
"""
@staticmethod
def __classcall__(cls, W, min_length=0, max_length=infinity,
min_support=frozenset([]), max_support=None):
r"""
TESTS::
sage: W = WeylGroup(['A',5,1])
sage: PF1 = sage.combinat.root_system.pieri_factors.PieriFactors_type_A_affine(W)
sage: PF2 = W.pieri_factors()
sage: PF3 = W.pieri_factors(min_support = [])
sage: PF4 = W.pieri_factors(max_support = [0,1,2,3,4,5])
sage: PF5 = W.pieri_factors(max_length = 10)
sage: PF6 = W.pieri_factors(min_length = 0)
sage: PF2 is PF1, PF3 is PF1, PF4 is PF1, PF5 is PF1, PF6 is PF1
(True, True, True, True, True)
"""
assert W.cartan_type().is_affine() and W.cartan_type().letter == 'A'
# We use Python's frozenset's rather that Sage's Set's because
# the latter do not yet support the issubset method
min_support = frozenset(min_support)
if max_support is None:
max_support = frozenset(W.index_set())
else:
max_support = frozenset(max_support)
min_length = max(min_length, len(min_support))
max_length = min(len(max_support), max_length, len(W.index_set()) - 1)
return super(PieriFactors_type_A_affine, cls).__classcall__(cls, W, min_length, max_length, min_support, max_support)
def __init__(self, W, min_length, max_length, min_support, max_support):
r"""
INPUT:
- ``W`` -- a Weyl group of affine type `A`
- ``min_length``, ``max_length`` -- non negative integers
- ``min_support``, ``max_support`` -- subsets of the index set of `W`
EXAMPLES::
sage: PF = WeylGroup(["A", 3, 1]).pieri_factors(); PF
Pieri factors for Weyl Group of type ['A', 3, 1] (as a matrix group acting on the root space)
TESTS::
sage: PF = WeylGroup(['A',3,1]).pieri_factors()
sage: PF.__class__
<class 'sage.combinat.root_system.pieri_factors.PieriFactors_type_A_affine_with_category'>
sage: TestSuite(PF).run()
sage: PF = WeylGroup(['A',3,1]).pieri_factors(min_length = 3)
sage: [w.reduced_word() for w in PF]
[[2, 1, 0], [1, 0, 3], [0, 3, 2], [3, 2, 1]]
sage: PF = WeylGroup(['A',4,1]).pieri_factors(min_support = [0,2])
sage: [w.reduced_word() for w in PF]
[[2, 0], [2, 1, 0], [3, 2, 0], [0, 4, 2], [3, 2, 1, 0], [2, 1, 0, 4], [0, 4, 3, 2]]
sage: PF = WeylGroup(['A',5,1]).pieri_factors(min_support = [0,1,2], max_support = [0,1,2,3])
sage: [w.reduced_word() for w in PF]
[[2, 1, 0], [3, 2, 1, 0]]
sage: PF = WeylGroup(['A',5,1]).pieri_factors(min_length = 2, max_length = 5)
sage: PF.generating_series()
6*z^5 + 15*z^4 + 20*z^3 + 15*z^2
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self.W = W
self._min_support = frozenset(min_support)
self._max_support = frozenset(max_support)
if not self._min_support.issubset(self._max_support):
raise ValueError("the min support must be a subset "
"of the max support")
self._extra_support = self._max_support.difference(self._min_support)
self._min_length = min_length
self._max_length = max_length
def subset(self, length):
r"""
Return the subset of the elements of ``self`` of length ``length``.
INPUT:
- ``length`` -- a non-negative integer
EXAMPLES::
sage: PF = WeylGroup(["A", 3, 1]).pieri_factors(); PF
Pieri factors for Weyl Group of type ['A', 3, 1] (as a matrix group acting on the root space)
sage: PF3 = PF.subset(length = 2)
sage: PF3.cardinality()
6
TESTS:
We check that there is no reference effect (there was at some point!)::
sage: PF.cardinality()
15
"""
return self.__class__(self.W,
min_support=self._min_support,
max_support=self._max_support,
min_length=length,
max_length=length)
def maximal_elements_combinatorial(self):
r"""
Return the maximal Pieri factors, using the affine type A
combinatorial description.
EXAMPLES::
sage: W = WeylGroup(['A',4,1])
sage: PF = W.pieri_factors()
sage: [w.reduced_word() for w in PF.maximal_elements_combinatorial()]
[[3, 2, 1, 0], [2, 1, 0, 4], [1, 0, 4, 3], [0, 4, 3, 2], [4, 3, 2, 1]]
"""
return self.subset(self._max_length)
def _test_maximal_elements(self, **options):
r"""
Same as :meth:`PieriFactors._test_maximal_elements`, but skips
the tests if ``self`` is not the full set of Pieri factors.
EXAMPLES::
sage: W = WeylGroup(['A',4,1])
sage: W.pieri_factors()._test_maximal_elements(verbose = True)
sage: W.pieri_factors(min_length = 1)._test_maximal_elements(verbose = True)
Strict subset of the Pieri factors; skipping test
"""
tester = self._tester(**options)
index_set = self.W.index_set()
if self._min_length > 0 or self._max_length < len(self.W.index_set())-1 or self._max_support != frozenset(index_set):
tester.info("\n Strict subset of the Pieri factors; skipping test")
return
return super(PieriFactors_type_A_affine, self)._test_maximal_elements(**options)
def __contains__(self, w):
r"""
Return whether ``w`` is in ``self``.
EXAMPLES::
sage: W = WeylGroup(['A',6,1])
sage: PF = W.pieri_factors()
sage: w=W.from_reduced_word([4,3,1,0,6])
sage: w in PF
True
sage: w=W.from_reduced_word([4,3,1,0,2])
sage: w in PF
False
sage: w=W.from_reduced_word([4,3,1,0,6,0])
sage: w in PF
False
sage: w=W.from_reduced_word([])
sage: w in PF
True
sage: w=W.from_reduced_word([3,2,1,0])
sage: w in PF
True
sage: W=WeylGroup(['A',3,1])
sage: PF = W.pieri_factors()
sage: w=W.from_reduced_word([3,2,1,0])
sage: w in PF
False
"""
if w not in self.W:
raise ValueError("{} is not an element of the Weyl group".format(w))
n = len(self.W.index_set()) - 1
red = w.reduced_word()
support = set(red)
if len(support) < len(red): # There should be no repetitions
return False
if not(self._min_length <= len(support) and
len(support) <= self._max_length and
self._min_support.issubset(support) and
support.issubset(self._max_support)):
return False
[rank, unrank] = sage.combinat.ranker.from_list(red)
for i in red:
j = (i + 1) % (n + 1)
if j in support:
if rank(i) < rank(j):
return False
return True
def __getitem__(self, support):
r"""
Return the cyclically decreasing element associated with ``support``.
INPUT:
- ``support`` -- a proper subset of the index_set, as a list or set
EXAMPLES::
sage: W = WeylGroup(["A", 5, 1])
sage: W.pieri_factors()[[0,1,2,3,5]].reduced_word()
[3, 2, 1, 0, 5]
sage: W.pieri_factors()[[0,1,3,4,5]].reduced_word()
[1, 0, 5, 4, 3]
sage: W.pieri_factors()[[0,1,2,3,4]].reduced_word()
[4, 3, 2, 1, 0]
"""
index_set = sorted(self.W.index_set())
support = sorted(support)
if not set(support).issubset(set(index_set)) or support == index_set:
raise ValueError("the support must be a proper subset of the index set")
if not support:
return self.W.one()
s = self.W.simple_reflections()
i = 0
while i < len(support) and support[i] == index_set[i]:
i += 1
# This finds the first hole: either ley[i] is maximal or support[i] < support[i+1]+1
return prod((s[j] for j in list(reversed(support[0:i])) + list(reversed(support[i:]))), self.W.one())
def cardinality(self):
r"""
Return the cardinality of ``self``.
EXAMPLES::
sage: WeylGroup(["A", 3, 1]).pieri_factors().cardinality()
15
"""
if self._min_length == len(self._min_support) and self._max_length == len(self._max_support) -1:
return Integer(2**(len(self._extra_support)) - 1)
else:
return self.generating_series(weight = ConstantFunction(1))
def generating_series(self, weight=None):
r"""
Return a length generating series for the elements of ``self``.
EXAMPLES::
sage: W = WeylGroup(["A", 3, 1])
sage: W.pieri_factors().cardinality()
15
sage: W.pieri_factors().generating_series()
4*z^3 + 6*z^2 + 4*z + 1
"""
if weight is None:
weight = self.default_weight()
l_min = len(self._min_support)
l_max = len(self._max_support)
return sum(binomial(l_max - l_min, l - l_min) * weight(l)
for l in range(self._min_length, self._max_length + 1))
def __iter__(self):
r"""
Return an iterator over the elements of ``self``.
EXAMPLES::
sage: W = WeylGroup(['A',4,1])
sage: PF = W.pieri_factors()
sage: f = PF.__iter__()
sage: next(f)
[1 0 0 0 0]
[0 1 0 0 0]
[0 0 1 0 0]
[0 0 0 1 0]
[0 0 0 0 1]
sage: [next(f).reduced_word() for i in range(6)]
[[0], [1], [2], [3], [4], [1, 0]]
"""
from sage.combinat.subset import Subsets
for l in range(self._min_length, self._max_length + 1):
for extra in Subsets(self._extra_support,
l - len(self._min_support)):
yield self[self._min_support.union(extra)]
def stanley_symm_poly_weight(self, w):
r"""
Weight used in computing (affine) Stanley symmetric polynomials
for affine type A.
EXAMPLES::
sage: W = WeylGroup(['A',5,1])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.one())
0
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([5,4,2,1,0]))
0
"""
return 0
class PieriFactors_type_C_affine(PieriFactors_affine_type):
r"""
The type C affine Pieri factors are realized as the order ideal (in Bruhat
order) generated by cyclic rotations of the element with unique reduced word
`123...(n-1)n(n-1)...3210`.
EXAMPLES::
sage: W = WeylGroup(['C',3,1])
sage: PF = W.pieri_factors()
sage: sorted([u.reduced_word() for u in PF.maximal_elements()], key=str)
[[0, 1, 2, 3, 2, 1], [1, 0, 1, 2, 3, 2], [1, 2, 3, 2, 1, 0],
[2, 1, 0, 1, 2, 3], [2, 3, 2, 1, 0, 1], [3, 2, 1, 0, 1, 2]]
"""
def __init__(self, W):
r"""
TESTS::
sage: PF = WeylGroup(['C',3,1]).pieri_factors()
sage: PF.__class__
<class 'sage.combinat.root_system.pieri_factors.PieriFactors_type_C_affine_with_category'>
sage: TestSuite(PF).run() # long time (4s on sage.math, 2011)
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self.W = W
@cached_method
def maximal_elements_combinatorial(self):
r"""
Return the maximal Pieri factors, using the affine type C
combinatorial description.
EXAMPLES::
sage: PF = WeylGroup(['C',3,1]).pieri_factors()
sage: [w.reduced_word() for w in PF.maximal_elements_combinatorial()]
[[0, 1, 2, 3, 2, 1], [1, 0, 1, 2, 3, 2], [2, 1, 0, 1, 2, 3], [3, 2, 1, 0, 1, 2], [2, 3, 2, 1, 0, 1], [1, 2, 3, 2, 1, 0]]
"""
n = self.W.n
rho = self.W.from_reduced_word(range(1, n-1))*self.W.from_reduced_word(range(n-1,-1,-1))
rotations = []
for i in range(2 * (n - 1)):
rho = rho.apply_simple_reflections(rho.descents()).apply_simple_reflections(rho.descents(), side='left')
rotations.append(rho)
return rotations
def stanley_symm_poly_weight(self, w):
r"""
Return the weight of a Pieri factor to be used in the definition of
Stanley symmetric functions.
For type C, this weight is the number of connected components
of the support (the indices appearing in a reduced word) of an
element.
EXAMPLES::
sage: W = WeylGroup(['C',5,1])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([1,3]))
2
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([1,3,2,0]))
1
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([5,3,0]))
3
sage: PF.stanley_symm_poly_weight(W.one())
0
"""
# The algorithm="delete" is a workaround when the set of
# vertices is empty, in which case subgraph tries another
# method which turns out to currently fail with Dynkin diagrams
return DiGraph(DynkinDiagram(w.parent().cartan_type())).subgraph(set(w.reduced_word()), algorithm="delete").connected_components_number()
class PieriFactors_type_B_affine(PieriFactors_affine_type):
r"""
The type B affine Pieri factors are realized as the order ideal (in Bruhat
order) generated by the following elements:
- cyclic rotations of the element with reduced word
`234...(n-1)n(n-1)...3210`,
except for `123...n...320` and `023...n...321`.
- `123...(n-1)n(n-1)...321`
- `023...(n-1)n(n-1)...320`
EXAMPLES::
sage: W = WeylGroup(['B',4,1])
sage: PF = W.pieri_factors()
sage: W.from_reduced_word([2,3,4,3,2,1,0]) in PF.maximal_elements()
True
sage: W.from_reduced_word([0,2,3,4,3,2,1]) in PF.maximal_elements()
False
sage: W.from_reduced_word([1,0,2,3,4,3,2]) in PF.maximal_elements()
True
sage: W.from_reduced_word([0,2,3,4,3,2,0]) in PF.maximal_elements()
True
sage: W.from_reduced_word([0,2,0]) in PF
True
"""
def __init__(self, W):
r"""
TESTS::
sage: PF = WeylGroup(["B",3,1]).pieri_factors()
sage: PF.__class__
<class 'sage.combinat.root_system.pieri_factors.PieriFactors_type_B_affine_with_category'>
sage: TestSuite(PF).run()
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self.W = W
@cached_method
def maximal_elements_combinatorial(self):
r"""
Return the maximal Pieri factors, using the affine type B
combinatorial description.
EXAMPLES::
sage: W = WeylGroup(['B',4,1])
sage: [u.reduced_word() for u in W.pieri_factors().maximal_elements_combinatorial()]
[[1, 0, 2, 3, 4, 3, 2], [2, 1, 0, 2, 3, 4, 3], [3, 2, 1, 0, 2, 3, 4], [4, 3, 2, 1, 0, 2, 3], [3, 4, 3, 2, 1, 0, 2], [2, 3, 4, 3, 2, 1, 0], [1, 2, 3, 4, 3, 2, 1], [0, 2, 3, 4, 3, 2, 0]]
"""
n = self.W.n
rho = self.W.from_reduced_word(range(2,n-1))*self.W.from_reduced_word(range(n-1,-1,-1))
rotations = []
for i in range(2 * (n - 2)):
rho = rho.apply_simple_reflections(rho.descents()).apply_simple_reflections(rho.descents(), side='left')
rotations.append(rho)
rotations.append(self.W.from_reduced_word(range(1,n-1))*self.W.from_reduced_word(range(n-1,0,-1)))
rotations.append(self.W.from_reduced_word([0])*self.W.from_reduced_word(range(2,n-1))*self.W.from_reduced_word(range(n-1,1,-1))*self.W.from_reduced_word([0]))
return rotations
def stanley_symm_poly_weight(self, w):
r"""
Return the weight of a Pieri factor to be used in the definition of
Stanley symmetric functions.
For type B, this weight involves the number of components of
the complement of the support of an element, where we consider
0 and 1 to be one node -- if 1 is in the support, then we
pretend 0 in the support, and vice versa. We also consider 0
and 1 to be one node for the purpose of counting components of
the complement (as if the Dynkin diagram were that of type C).
Let n be the rank of the affine Weyl group in question (if
type ``['B',k,1]`` then we have n = k+1). Let ``chi(v.length() < n-1)``
be the indicator function that is 1 if the length of v is
smaller than n-1, and 0 if the length of v is greater than or
equal to n-1. If we call ``c'(v)`` the number of components of
the complement of the support of v, then the type B weight is
given by ``weight = c'(v) - chi(v.length() < n-1)``.
EXAMPLES::
sage: W = WeylGroup(['B',5,1])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([0,3]))
1
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([0,1,3]))
1
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([2,3]))
1
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([2,3,4,5]))
0
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([0,5]))
0
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([2,4,5,4,3,0]))
-1
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([4,5,4,3,0]))
0
"""
ct = w.parent().cartan_type()
support = set(w.reduced_word())
if 1 in support or 0 in support:
support_complement = set(ct.index_set()).difference(support).difference(set([0, 1]))
else:
support_complement = set(ct.index_set()).difference(support).difference(set([0]))
return DiGraph(DynkinDiagram(ct)).subgraph(support_complement, algorithm="delete").connected_components_number() - 1
class PieriFactors_type_D_affine(PieriFactors_affine_type):
r"""
The type D affine Pieri factors are realized as the order ideal
(in Bruhat order) generated by the following elements:
* cyclic rotations of the element with reduced word
`234...(n-2)n(n-1)(n-2)...3210`
such that 1 and 0 are always adjacent and (n-1) and n are always adjacent.
* `123...(n-2)n(n-1)(n-2)...321`
* `023...(n-2)n(n-1)(n-2)...320`
* `n(n-2)...2102...(n-2)n`
* `(n-1)(n-2)...2102...(n-2)(n-1)`
EXAMPLES::
sage: W = WeylGroup(['D',5,1])
sage: PF = W.pieri_factors()
sage: W.from_reduced_word([3,2,1,0]) in PF
True
sage: W.from_reduced_word([0,3,2,1]) in PF
False
sage: W.from_reduced_word([0,1,3,2]) in PF
True
sage: W.from_reduced_word([2,0,1,3]) in PF
True
sage: sorted([u.reduced_word() for u in PF.maximal_elements()], key=str)
[[0, 2, 3, 5, 4, 3, 2, 0], [1, 0, 2, 3, 5, 4, 3, 2], [1, 2, 3, 5, 4, 3, 2, 1],
[2, 1, 0, 2, 3, 5, 4, 3], [2, 3, 5, 4, 3, 2, 1, 0], [3, 2, 1, 0, 2, 3, 5, 4],
[3, 5, 4, 3, 2, 1, 0, 2], [4, 3, 2, 1, 0, 2, 3, 4], [5, 3, 2, 1, 0, 2, 3, 5],
[5, 4, 3, 2, 1, 0, 2, 3]]
"""
def __init__(self, W):
r"""
TESTS::
sage: PF = WeylGroup(["D",4,1]).pieri_factors()
sage: PF.__class__
<class 'sage.combinat.root_system.pieri_factors.PieriFactors_type_D_affine_with_category'>
sage: TestSuite(PF).run() # long time
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self.W = W
@cached_method
def maximal_elements_combinatorial(self):
r"""
Return the maximal Pieri factors, using the affine type D
combinatorial description.
EXAMPLES::
sage: W = WeylGroup(['D',5,1])
sage: PF = W.pieri_factors()
sage: set(PF.maximal_elements_combinatorial()) == set(PF.maximal_elements())
True
"""
n = self.W.n
rho = self.W.from_reduced_word(range(2,n))*self.W.from_reduced_word(range(n-3,-1,-1))
rotations = []
for i in range(2 * (n - 3)):
rho = rho.apply_simple_reflections(rho.descents()).apply_simple_reflections(rho.descents(),side='left')
rotations.append(rho)
rotations.append(self.W.from_reduced_word(range(1,n))*self.W.from_reduced_word(range(n-3,0,-1)))
rotations.append(self.W.from_reduced_word([0])*self.W.from_reduced_word(range(2,n))*self.W.from_reduced_word(range(n-3,1,-1))*self.W.from_reduced_word([0]))
rotations.append(self.W.from_reduced_word(range(n-2,-1,-1))*self.W.from_reduced_word(range(2,n-1)))
rotations.append(self.W.from_reduced_word([n-1])*self.W.from_reduced_word(range(n-3,-1,-1))*self.W.from_reduced_word(range(2,n-2))*self.W.from_reduced_word([n-1]))
return rotations
def stanley_symm_poly_weight(self, w):
r"""
Return the weight of `w`, to be used in the definition of
Stanley symmetric functions.
INPUT:
- ``w`` -- a Pieri factor for this type
For type `D`, this weight involves
the number of components of the complement of the support of
an element, where we consider `0` and `1` to be one node -- if `1`
is in the support, then we pretend `0` in the support, and vice
versa. Similarly with `n-1` and `n`. We also consider `0` and
`1`, `n-1` and `n` to be one node for the purpose of counting
components of the complement (as if the Dynkin diagram were
that of type `C`).
Type D Stanley symmetric polynomial weights are still
conjectural. The given weight comes from conditions on
elements of the affine Fomin-Stanley subalgebra, but work is
needed to show this weight is correct for affine Stanley
symmetric functions -- see [LSS2009, Pon2010]_ for details.
EXAMPLES::
sage: W = WeylGroup(['D', 5, 1])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([5,2,1]))
0
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([5,2,1,0]))
0
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([5,2]))
1
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([]))
0
sage: W = WeylGroup(['D',7,1])
sage: PF = W.pieri_factors()
sage: PF.stanley_symm_poly_weight(W.from_reduced_word([2,4,6]))
2
"""
ct = w.parent().cartan_type()
support = set(w.reduced_word())
n = w.parent().n
if 1 in support or 0 in support:
support = support.union(set([1])).difference(set([0]))
if n in support or n - 1 in support:
support = support.union(set([n - 2])).difference(set([n - 1]))
support_complement = set(range(1, n - 1)).difference(support)
return DiGraph(DynkinDiagram(ct)).subgraph(support_complement).connected_components_number() - 1
# Inserts those classes in CartanTypes
from sage.combinat.root_system import type_A_affine, type_B_affine, type_C_affine, type_D_affine, type_A, type_B
type_A_affine.CartanType.PieriFactors = PieriFactors_type_A_affine
type_B_affine.CartanType.PieriFactors = PieriFactors_type_B_affine
type_C_affine.CartanType.PieriFactors = PieriFactors_type_C_affine
type_D_affine.CartanType.PieriFactors = PieriFactors_type_D_affine
type_A.CartanType.PieriFactors = PieriFactors_type_A
type_B.CartanType.PieriFactors = PieriFactors_type_B
# Pieri factors for these types have not yet been mathematically
# introduced rigorously
#
# import type_C, type_D, type_E, type_F, type_G, type_E_affine, type_F_affine, type_G_affine
#type_C.CartanType.PieriFactors = PieriFactors_type_C
#type_D.CartanType.PieriFactors = PieriFactors_type_D
#type_E.CartanType.PieriFactors = PieriFactors_type_E
#type_F.CartanType.PieriFactors = PieriFactors_type_F
#type_G.CartanType.PieriFactors = PieriFactors_type_G
#type_E_affine.CartanType.PieriFactors = PieriFactors_type_E_affine
#type_F_affine.CartanType.PieriFactors = PieriFactors_type_F_affine
#type_G_affine.CartanType.PieriFactors = PieriFactors_type_G_affine
| 37.348168
| 196
| 0.572674
|
e4de6e264c985eb4695d1f9c17c31e9517aa55ad
| 835
|
py
|
Python
|
scripts/ooki-token/ooki-migration/stop-farming-deploy-migrator-polygon.py
|
bZxNetwork/contractsV2
|
e249e73ae6618593633d474236c3a2d45c394ad5
|
[
"Apache-2.0"
] | 177
|
2020-06-13T01:41:04.000Z
|
2022-03-28T06:26:53.000Z
|
scripts/ooki-token/ooki-migration/stop-farming-deploy-migrator-polygon.py
|
MitchellTesla/contractsV2
|
dea0fa3b86bd14616b98f72aa5233e71f7334a63
|
[
"Apache-2.0"
] | 31
|
2020-08-14T14:30:37.000Z
|
2022-03-15T15:36:25.000Z
|
scripts/ooki-token/ooki-migration/stop-farming-deploy-migrator-polygon.py
|
MitchellTesla/contractsV2
|
dea0fa3b86bd14616b98f72aa5233e71f7334a63
|
[
"Apache-2.0"
] | 38
|
2020-06-24T22:24:40.000Z
|
2022-03-26T00:27:14.000Z
|
exec(open("./scripts/env/set-matic.py").read())
deployer = CHEF.owner();
masterChefProxy = Contract.from_abi("masterChefProxy", address=CHEF, abi=Proxy.abi)
masterChefImpl = MasterChef_Polygon.deploy({'from': deployer})
masterChefProxy.replaceImplementation(masterChefImpl, {'from': deployer})
CHEF.setInitialAltRewardsPerShare({'from': deployer})
CHEF.toggleVesting(False, {'from': deployer})
CHEF.setLocked(0, False, {'from': deployer})
CHEF.setLocked(2, False, {'from': deployer})
CHEF.setGOVPerBlock(0, {'from': deployer})
FEE_EXTRACTOR = Contract.from_abi("ext", address=BZX.feesController(), abi=FeeExtractAndDistribute_Polygon.abi)
FEE_EXTRACTOR.togglePause(True, {'from': deployer})
govConverter = FixedSwapTokenConverter.deploy(
[PGOV],
[1e18/19], #19 gov == 1 bzrx
BZRX,
PGOV,
{'from': deployer}
)
| 32.115385
| 111
| 0.735329
|
417f1473ccefecb5baa2651d74ca337f7e5d22ca
| 360
|
py
|
Python
|
examples/text-classification/run_ipython_with_args.py
|
mithunpaul08/transformers
|
55d5e0a1d88f0922dc2af3be140e077850c66fee
|
[
"Apache-2.0"
] | null | null | null |
examples/text-classification/run_ipython_with_args.py
|
mithunpaul08/transformers
|
55d5e0a1d88f0922dc2af3be140e077850c66fee
|
[
"Apache-2.0"
] | null | null | null |
examples/text-classification/run_ipython_with_args.py
|
mithunpaul08/transformers
|
55d5e0a1d88f0922dc2af3be140e077850c66fee
|
[
"Apache-2.0"
] | null | null | null |
import sys,os
IPYNB_FILENAME = 'load_trained_model_visualize_student_teacher_delex.ipynb'
CONFIG_FILENAME = 'x.config_ipynb'
def main(argv):
with open(CONFIG_FILENAME,'w') as f:
f.write(' '.join(argv))
os.system('jupyter nbconvert --execute {:s} --to html'.format(IPYNB_FILENAME))
return None
if __name__ == '__main__':
main(sys.argv)
| 30
| 82
| 0.713889
|
6c4f13c5749d14ffd8979d0de9b4339d69718bf3
| 7,723
|
py
|
Python
|
python/test/grammar_translator/kernelgen/testAdaptiveCufKernel.py
|
ROCmSoftwarePlatform/gpufort
|
b3d392cf28200cd9b3b2f77689d5a81176b3ec42
|
[
"MIT"
] | 57
|
2021-10-04T19:52:55.000Z
|
2022-03-29T17:41:36.000Z
|
python/test/grammar_translator/kernelgen/testAdaptiveCufKernel.py
|
mjklemm/gpufort
|
b3d392cf28200cd9b3b2f77689d5a81176b3ec42
|
[
"MIT"
] | 12
|
2021-09-29T11:32:59.000Z
|
2021-12-09T11:39:54.000Z
|
python/test/grammar_translator/kernelgen/testAdaptiveCufKernel.py
|
ROCmSoftwarePlatform/gpufort
|
b3d392cf28200cd9b3b2f77689d5a81176b3ec42
|
[
"MIT"
] | 5
|
2021-10-05T06:16:28.000Z
|
2022-02-24T14:32:24.000Z
|
#!/usr/bin/env python3
# SPDX-License-Identifier: MIT
# Copyright (c) 2021 Advanced Micro Devices, Inc. All rights reserved.
import addtoplevelpath
import translator.translator as translator
testdata = []
testdata.append("""!$cuf kernel do(3)
DO ibnd = 1, n_starting_atomic_wfc
!
DO ipol = 1, npol
!
DO ig = 1, ngk_ik
!
rnd_idx = 2 * ((ig-1) + ( (ipol-1) + (ibnd-1) * npol ) * ngk_ik) + 1
rr = randy_d(rnd_idx)
arg = tpi * randy_d(rnd_idx+1)
wfcatom_d(ig,ipol,ibnd) = wfcatom_d(ig,ipol,ibnd) &*
( 1.0_DP + 0.05_DP * CMPLX( rr*COS(arg), rr*SIN(arg) ,kind=DP) )
END DO
!
END DO
!
END DO""")
#testdata.clear()
testdata.append("""!$cuf kernel do(2)<<<*,*>>>
DO i = 0, howmany-1
DO j=nsticks_x*nx1+1, nnr_
f_d(j+i*nnr_) = (0.0_DP,0.0_DP)
END DO
END DO""")
testdata.append("""
!$cuf kernel do(2) <<<*,*>>>
DO ih = 1, nhnp
DO jh = 1, nhnp
IF ( jh >= ih ) THEN
!ijh = jh + ((ih-1)*(2*nhnp-ih))/2 is this faster? Does it matter?
ijh=ijtoh_d(ih,jh,np)
IF ( ih == jh ) THEN
fac = 1.0_dp
ELSE
fac = 2.0_dp
END IF
becsum_d(ijh,na,1)= becsum_d(ijh,na,1) + fac * &
DBLE( becsum_nc_d(ih,1,jh,1) + becsum_nc_d(ih,2,jh,2) )
IF (domag) THEN
becsum_d(ijh,na,2)= becsum_d(ijh,na,2) + fac * &
DBLE( becsum_nc_d(ih,1,jh,2) + becsum_nc_d(ih,2,jh,1) )
becsum_d(ijh,na,3)= becsum_d(ijh,na,3) + fac * DBLE( (0.d0,-1.d0)* &
(becsum_nc_d(ih,1,jh,2) - becsum_nc_d(ih,2,jh,1)) )
becsum_d(ijh,na,4)= becsum_d(ijh,na,4) + fac * &
DBLE( becsum_nc_d(ih,1,jh,1) - becsum_nc_d(ih,2,jh,2) )
END IF
END IF
END DO
END DO
""")
testdata.append("""
!$cuf kernel do(1)
DO ih = 1, nhnt
DO jh = 1, nhnt
ijh=ijtoh_d(ih,jh,np)
DO kh = 1, nhnt
IF ( (nhtol_d(kh,np)==nhtol_d(ih,np)).AND. &
(ABS(nhtoj_d(kh,np)-nhtoj_d(ih,np))<1.d8).AND. &
(indv_d(kh,np)==indv_d(ih,np)) ) THEN ! same_lj(kh,ih,np)
DO lh=1,nhnt
IF ( (nhtol_d(lh,np)==nhtol_d(jh,np)).AND. &
(ABS(nhtoj_d(lh,np)-nhtoj_d(jh,np))<1.d8).AND. &
(indv_d(lh,np)==indv_d(jh,np)) ) THEN !same_lj(lh,jh,np)) THEN
DO is1=1,npol
DO is2=1,npol
fac=becsum_nc_d(kh,is1,lh,is2)
becsum_d(ijh,na,1)=becsum_d(ijh,na,1) + DBLE( fac * &
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,1,is2,np) + &
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,2,is2,np) ) )
IF (domag) THEN
becsum_d(ijh,na,2)=becsum_d(ijh,na,2) + DBLE( fac * &
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,2,is2,np) +&
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,1,is2,np) ) )
becsum_d(ijh,na,3)=becsum_d(ijh,na,3) + DBLE( fac*(0.d0,-1.d0)*&
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,2,is2,np) - &
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,1,is2,np) ))
becsum_d(ijh,na,4)=becsum_d(ijh,na,4) + DBLE(fac * &
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,1,is2,np) - &
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,2,is2,np) ) )
END IF
END DO
END DO
END IF
END DO
END IF
END DO
END DO
END DO
""")
testdata.clear()
testdata.append("""
!$cuf kernel do(2) <<<*,*>>>
DO ih = 1, nhnp
DO jh = 1, nhnp
IF ( jh >= ih ) THEN
!ijh = jh + ((ih-1)*(2*nhnp-ih))/2 is this faster? Does it matter?
ijh=ijtoh_d(ih,jh,np)
IF ( ih == jh ) THEN
fac = 1.0_dp
ELSE
fac = 2.0_dp
END IF
becsum_d(ijh,na,1)= becsum_d(ijh,na,1) + fac * &
DBLE( becsum_nc_d(ih,1,jh,1) + becsum_nc_d(ih,2,jh,2) )
IF (domag) THEN
becsum_d(ijh,na,2)= becsum_d(ijh,na,2) + fac * &
DBLE( becsum_nc_d(ih,1,jh,2) + becsum_nc_d(ih,2,jh,1) )
!becsum_d(ijh,na,3)= becsum_d(ijh,na,3) + fac * DBLE( (0.d0,-1.d0)* &
! (becsum_nc_d(ih,1,jh,2) - becsum_nc_d(ih,2,jh,1)) )
becsum_d(ijh,na,4)= becsum_d(ijh,na,4) + fac * &
DBLE( becsum_nc_d(ih,1,jh,1) - becsum_nc_d(ih,2,jh,2) )
END IF
END IF
END DO
END DO
""")
testdata.clear()
testdata.append("""
!$cuf kernel do(1)
DO ih = 1, nhnt
DO jh = 1, nhnt
ijh=ijtoh_d(ih,jh,np)
DO kh = 1, nhnt
IF ( (nhtol_d(kh,np)==nhtol_d(ih,np)).AND. &
(ABS(nhtoj_d(kh,np)-nhtoj_d(ih,np))<1.d8).AND. &
(indv_d(kh,np)==indv_d(ih,np)) ) THEN ! same_lj(kh,ih,np)
DO lh=1,nhnt
IF ( (nhtol_d(lh,np)==nhtol_d(jh,np)).AND. &
(ABS(nhtoj_d(lh,np)-nhtoj_d(jh,np))<1.d8).AND. &
(indv_d(lh,np)==indv_d(jh,np)) ) THEN !same_lj(lh,jh,np)) THEN
DO is1=1,npol
DO is2=1,npol
fac=becsum_nc_d(kh,is1,lh,is2)
becsum_d(ijh,na,1)=becsum_d(ijh,na,1) + DBLE( fac * &
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,1,is2,np) + &
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,2,is2,np) ) )
IF (domag) THEN
becsum_d(ijh,na,2)=becsum_d(ijh,na,2) + DBLE( fac * &
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,2,is2,np) +&
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,1,is2,np) ) )
becsum_d(ijh,na,3)=becsum_d(ijh,na,3) + DBLE( fac*(0.d0,-1.d0)*&
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,2,is2,np) - &
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,1,is2,np) ))
becsum_d(ijh,na,4)=becsum_d(ijh,na,4) + DBLE(fac * &
(fcoef_d(kh,ih,is1,1,np)*fcoef_d(jh,lh,1,is2,np) - &
fcoef_d(kh,ih,is1,2,np)*fcoef_d(jh,lh,2,is2,np) ) )
END IF
END DO
END DO
END IF
END DO
END IF
END DO
END DO
END DO
""")
failedToParse = []
successfullyParsed = []
success = True
for v in testdata:
try:
#translator.cuf_loop_kernel.parseString(v)
c_snippet, problem_size, LoopKernelLaunchInfo, identifier_names, localLValues, loop_vars = translator.convertCufLoopKernel2Hip(v,10)
print("{} -> {}".format(v,c_snippet))
successfullyParsed.append(v)
print(localLValues)
except Exception as e:
print("failed to parse {}".format(v))
success = False
failedToParse.append(v)
raise e
if success:
print("SUCCESS!")
else:
print("Summary: Failed to parse {0} of {1} test inputs".format(len(failedToParse),len(failedToParse)+len(successfullyParsed)))
print("FAILURE!")
| 41.299465
| 139
| 0.454357
|
6f62bda7307f0700e9827c89348978655676d281
| 146
|
py
|
Python
|
cursoemvideo/python3_mundo1/aula_06/ex003.py
|
Tiago-Baptista/CursoEmVideo_Python3
|
381044e66594362a3767a776530c2ba7dc02dcf2
|
[
"MIT"
] | null | null | null |
cursoemvideo/python3_mundo1/aula_06/ex003.py
|
Tiago-Baptista/CursoEmVideo_Python3
|
381044e66594362a3767a776530c2ba7dc02dcf2
|
[
"MIT"
] | null | null | null |
cursoemvideo/python3_mundo1/aula_06/ex003.py
|
Tiago-Baptista/CursoEmVideo_Python3
|
381044e66594362a3767a776530c2ba7dc02dcf2
|
[
"MIT"
] | null | null | null |
n1 = int(input('Digite um valor'))
n2 = int(input('Digite outro valor'))
s = n1 + n2
print('A soma entre {} e {} é igual a {}'.format(n1, n2, s))
| 29.2
| 60
| 0.60274
|
f65467899dcd76a8c51156f0e38d1cb706be979a
| 2,321
|
py
|
Python
|
project/etl_job/etl.py
|
JinglinLi/tweet_posting_slackbot
|
cf9ef1e004093c00a09f4451f89f397459d1f7a0
|
[
"MIT"
] | null | null | null |
project/etl_job/etl.py
|
JinglinLi/tweet_posting_slackbot
|
cf9ef1e004093c00a09f4451f89f397459d1f7a0
|
[
"MIT"
] | null | null | null |
project/etl_job/etl.py
|
JinglinLi/tweet_posting_slackbot
|
cf9ef1e004093c00a09f4451f89f397459d1f7a0
|
[
"MIT"
] | null | null | null |
import pymongo
import time
from sqlalchemy import create_engine
import pandas as pd
import logging
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
# wait until tweets were all collected and writen in MongoDB before starting etl job
time.sleep(120) # seconds
#----- extract data from MongoDB ------
def extract():
""" extract data from MongoDB """
# connect mongodb container of the same composer from python
client = pymongo.MongoClient("mongodb")
# get database
db = client.tweets_stream_db
# get collection
collection = db.tweet_stream_json
return collection
#----- transform data : sentiment analysis
def transform(collection):
"""
transform mongodb cursor into dataframe
perform sentiment analysis
return dataframe with 'tweet' and 'sentiment' column
"""
logging.warning('----------The datatype of collection.find() is ------------')
logging.warning(type(collection.find())) # collection.find() is of type <class 'pymongo.cursor.Cursor'>
logging.warning('-----------------------')
# pointer into dataframe
df = pd.DataFrame(list(collection.find()))
# allocate dataframe to return
tweet_df = pd.DataFrame()
# assign column 'tweets'
tweet_df['tweets'] = df['text']
# sentiment analysis and assign column 'sentiment'
s = SentimentIntensityAnalyzer() # vader sentiment analysis
tweet_df['sentiment'] = [s.polarity_scores(x)['compound'] for x in tweet_df['tweets']]
logging.warning('----------The table to be writen in psql is dataframe: ------------')
logging.warning(tweet_df) # collection.find() is of type <class 'pymongo.cursor.Cursor'>
logging.warning('-----------------------')
return tweet_df
#------------- load to postgres
def load(tweet_df):
""" extract data to postgresdb """
# connect postgresdb container of the same composer from python
pg_engine = create_engine('postgresql://postgres:password@postgresdb:5432/tweeter', echo=True)
# create table tweets
pg_engine.execute('''
CREATE TABLE IF NOT EXISTS tweets (
text VARCHAR(500),
sentiment NUMERIC
);
''')
# write dataframe into postgresdb table tweets
tweet_df.to_sql('tweets', pg_engine, if_exists='replace')
# ETL
load(transform(extract()))
| 36.265625
| 108
| 0.673417
|
997bd754ee89b890260a01e1bafc8b2049478991
| 7,935
|
py
|
Python
|
yt/frontends/owls_subfind/io.py
|
munkm/yt
|
9c92deaa53459762cb35025bdc8b9048a9faac31
|
[
"BSD-3-Clause-Clear"
] | 1
|
2021-09-15T08:17:43.000Z
|
2021-09-15T08:17:43.000Z
|
yt/frontends/owls_subfind/io.py
|
munkm/yt
|
9c92deaa53459762cb35025bdc8b9048a9faac31
|
[
"BSD-3-Clause-Clear"
] | 2
|
2021-09-15T16:10:39.000Z
|
2021-09-16T14:23:31.000Z
|
yt/frontends/owls_subfind/io.py
|
stonnes/yt
|
aad3cfa3b4ebab7838352ab467275a27c26ff363
|
[
"BSD-3-Clause-Clear"
] | 1
|
2021-04-21T07:01:51.000Z
|
2021-04-21T07:01:51.000Z
|
import numpy as np
from yt.funcs import mylog
from yt.utilities.io_handler import BaseIOHandler
from yt.utilities.on_demand_imports import _h5py as h5py
class IOHandlerOWLSSubfindHDF5(BaseIOHandler):
_dataset_type = "subfind_hdf5"
def __init__(self, ds):
super().__init__(ds)
self.offset_fields = set()
def _read_fluid_selection(self, chunks, selector, fields, size):
raise NotImplementedError
def _read_particle_coords(self, chunks, ptf):
# This will read chunks and yield the results.
chunks = list(chunks)
data_files = set()
for chunk in chunks:
for obj in chunk.objs:
data_files.update(obj.data_files)
for data_file in sorted(data_files, key=lambda x: (x.filename, x.start)):
with h5py.File(data_file.filename, mode="r") as f:
for ptype in sorted(ptf):
pcount = data_file.total_particles[ptype]
coords = f[ptype]["CenterOfMass"][()].astype("float64")
coords = np.resize(coords, (pcount, 3))
x = coords[:, 0]
y = coords[:, 1]
z = coords[:, 2]
yield ptype, (x, y, z)
def _read_offset_particle_field(self, field, data_file, fh):
field_data = np.empty(data_file.total_particles["FOF"], dtype="float64")
fofindex = (
np.arange(data_file.total_particles["FOF"]) + data_file.index_start["FOF"]
)
for offset_file in data_file.offset_files:
if fh.filename == offset_file.filename:
ofh = fh
else:
ofh = h5py.File(offset_file.filename, mode="r")
subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
substart = max(fofindex[0] - subindex[0], 0)
subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
fofstart = substart + subindex[0] - fofindex[0]
fofend = subend + subindex[0] - fofindex[0]
field_data[fofstart : fofend + 1] = ofh["SUBFIND"][field][
substart : subend + 1
]
return field_data
def _read_particle_fields(self, chunks, ptf, selector):
# Now we have all the sizes, and we can allocate
chunks = list(chunks)
data_files = set()
for chunk in chunks:
for obj in chunk.objs:
data_files.update(obj.data_files)
for data_file in sorted(data_files, key=lambda x: (x.filename, x.start)):
with h5py.File(data_file.filename, mode="r") as f:
for ptype, field_list in sorted(ptf.items()):
pcount = data_file.total_particles[ptype]
if pcount == 0:
continue
coords = f[ptype]["CenterOfMass"][()].astype("float64")
coords = np.resize(coords, (pcount, 3))
x = coords[:, 0]
y = coords[:, 1]
z = coords[:, 2]
mask = selector.select_points(x, y, z, 0.0)
del x, y, z
if mask is None:
continue
for field in field_list:
if field in self.offset_fields:
field_data = self._read_offset_particle_field(
field, data_file, f
)
else:
if field == "particle_identifier":
field_data = (
np.arange(data_file.total_particles[ptype])
+ data_file.index_start[ptype]
)
elif field in f[ptype]:
field_data = f[ptype][field][()].astype("float64")
else:
fname = field[: field.rfind("_")]
field_data = f[ptype][fname][()].astype("float64")
my_div = field_data.size / pcount
if my_div > 1:
field_data = np.resize(
field_data, (int(pcount), int(my_div))
)
findex = int(field[field.rfind("_") + 1 :])
field_data = field_data[:, findex]
data = field_data[mask]
yield (ptype, field), data
def _count_particles(self, data_file):
with h5py.File(data_file.filename, mode="r") as f:
pcount = {"FOF": f["FOF"].attrs["Number_of_groups"]}
if "SUBFIND" in f:
# We need this to figure out where the offset fields are stored.
data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
pcount["SUBFIND"] = f["FOF"].attrs["Number_of_subgroups"]
else:
data_file.total_offset = 0
pcount["SUBFIND"] = 0
return pcount
def _identify_fields(self, data_file):
fields = []
pcount = data_file.total_particles
if sum(pcount.values()) == 0:
return fields, {}
with h5py.File(data_file.filename, mode="r") as f:
for ptype in self.ds.particle_types_raw:
if data_file.total_particles[ptype] == 0:
continue
fields.append((ptype, "particle_identifier"))
my_fields, my_offset_fields = subfind_field_list(
f[ptype], ptype, data_file.total_particles
)
fields.extend(my_fields)
self.offset_fields = self.offset_fields.union(set(my_offset_fields))
return fields, {}
def subfind_field_list(fh, ptype, pcount):
fields = []
offset_fields = []
for field in fh.keys():
if "PartType" in field:
# These are halo member particles
continue
elif isinstance(fh[field], h5py.Group):
my_fields, my_offset_fields = subfind_field_list(fh[field], ptype, pcount)
fields.extend(my_fields)
my_offset_fields.extend(offset_fields)
else:
if not fh[field].size % pcount[ptype]:
my_div = fh[field].size / pcount[ptype]
fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1 :]
if my_div > 1:
for i in range(int(my_div)):
fields.append((ptype, "%s_%d" % (fname, i)))
else:
fields.append((ptype, fname))
elif (
ptype == "SUBFIND"
and not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]
):
# These are actually FOF fields, but they were written after
# a load balancing step moved halos around and thus they do not
# correspond to the halos stored in the FOF group.
my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1 :]
if my_div > 1:
for i in range(int(my_div)):
fields.append(("FOF", "%s_%d" % (fname, i)))
else:
fields.append(("FOF", fname))
offset_fields.append(fname)
else:
mylog.warning(
"Cannot add field (%s, %s) with size %d.",
ptype,
fh[field].name,
fh[field].size,
)
continue
return fields, offset_fields
| 44.578652
| 86
| 0.495778
|
75bbb43361a8ed1571419eb2d324d98ac5ab417b
| 765
|
py
|
Python
|
server/server/challenges/views.py
|
Vector35/csaw-2019-pwny-race
|
28f2f0a9f9cdc037756fd4223852970d83bd171e
|
[
"MIT"
] | 13
|
2019-11-08T04:26:44.000Z
|
2020-01-24T14:29:09.000Z
|
server/server/challenges/views.py
|
Vector35/csaw-2019-pwny-race
|
28f2f0a9f9cdc037756fd4223852970d83bd171e
|
[
"MIT"
] | null | null | null |
server/server/challenges/views.py
|
Vector35/csaw-2019-pwny-race
|
28f2f0a9f9cdc037756fd4223852970d83bd171e
|
[
"MIT"
] | 1
|
2019-12-03T15:47:19.000Z
|
2019-12-03T15:47:19.000Z
|
from django.http import HttpResponse
from django.shortcuts import render
from django.views import generic
from django.utils import timezone
from .models import Challenge
def index(request):
return render(request, "index.html", )
def challenge_list(request):
challenges = Challenge.objects.filter(release_datetime__lte=str(timezone.localtime()))
next_chals = Challenge.objects.filter(release_datetime__gte=str(timezone.localtime()))
next_chal_time = None
closest_time = None
for chal in next_chals:
t = chal.release_datetime.timestamp()
if closest_time == None or t < closest_time:
closest_time = t
return render(request, "index.html", {"challenge_list": challenges, "next_chal_time": closest_time})
| 31.875
| 104
| 0.738562
|
6c697420687e0cb101f53dfc6a430fb50d84c02d
| 1,419
|
py
|
Python
|
src/mender/scripts/runner.py
|
lluiscampos/mender-python-client
|
67b35fa13c4f01a8b2a990c95b65eacfee7879e3
|
[
"Apache-2.0"
] | null | null | null |
src/mender/scripts/runner.py
|
lluiscampos/mender-python-client
|
67b35fa13c4f01a8b2a990c95b65eacfee7879e3
|
[
"Apache-2.0"
] | null | null | null |
src/mender/scripts/runner.py
|
lluiscampos/mender-python-client
|
67b35fa13c4f01a8b2a990c95b65eacfee7879e3
|
[
"Apache-2.0"
] | 1
|
2020-12-16T15:05:53.000Z
|
2020-12-16T15:05:53.000Z
|
# Copyright 2020 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import logging as log
import mender.settings.settings as settings
def run_sub_updater(deployment_id: str) -> bool:
"""run_sub_updater runs the /usr/share/mender/install script"""
log.info("Running the sub-updater script at /usr/share/mender/install")
try:
# Store the deployment ID in the update lockfile
with open(settings.PATHS.lockfile_path, "w") as f:
f.write(deployment_id)
subprocess.run(
[
"/usr/share/mender/install",
settings.PATHS.artifact_download + "/artifact.mender",
],
check=True,
)
return True
except subprocess.CalledProcessError as e:
log.error(f"Failed to run the install script '/var/lib/mender/install' {e}")
return False
| 37.342105
| 84
| 0.675123
|
888f5fced24763d5415a9933bc9c1cb308d40f95
| 4,208
|
py
|
Python
|
simscale_sdk/models/fixed_gradient_evbc.py
|
slainesimscale/simscale-python-sdk
|
db483eeabe558e55d020f5f829a3bf13c9c287a7
|
[
"MIT"
] | 8
|
2021-01-22T13:41:03.000Z
|
2022-01-03T09:00:10.000Z
|
simscale_sdk/models/fixed_gradient_evbc.py
|
slainesimscale/simscale-python-sdk
|
db483eeabe558e55d020f5f829a3bf13c9c287a7
|
[
"MIT"
] | null | null | null |
simscale_sdk/models/fixed_gradient_evbc.py
|
slainesimscale/simscale-python-sdk
|
db483eeabe558e55d020f5f829a3bf13c9c287a7
|
[
"MIT"
] | 3
|
2021-03-18T15:52:52.000Z
|
2022-01-03T08:59:30.000Z
|
# coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class FixedGradientEVBC(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'gradient': 'DimensionalEddyViscosityGradient'
}
attribute_map = {
'type': 'type',
'gradient': 'gradient'
}
def __init__(self, type='FIXED_GRADIENT', gradient=None, local_vars_configuration=None): # noqa: E501
"""FixedGradientEVBC - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._type = None
self._gradient = None
self.discriminator = None
self.type = type
if gradient is not None:
self.gradient = gradient
@property
def type(self):
"""Gets the type of this FixedGradientEVBC. # noqa: E501
Schema name: FixedGradientEVBC # noqa: E501
:return: The type of this FixedGradientEVBC. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this FixedGradientEVBC.
Schema name: FixedGradientEVBC # noqa: E501
:param type: The type of this FixedGradientEVBC. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def gradient(self):
"""Gets the gradient of this FixedGradientEVBC. # noqa: E501
:return: The gradient of this FixedGradientEVBC. # noqa: E501
:rtype: DimensionalEddyViscosityGradient
"""
return self._gradient
@gradient.setter
def gradient(self, gradient):
"""Sets the gradient of this FixedGradientEVBC.
:param gradient: The gradient of this FixedGradientEVBC. # noqa: E501
:type: DimensionalEddyViscosityGradient
"""
self._gradient = gradient
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FixedGradientEVBC):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, FixedGradientEVBC):
return True
return self.to_dict() != other.to_dict()
| 28.432432
| 106
| 0.586027
|
737399c797e1075b127703a183d36e9b4a45ee2b
| 204
|
py
|
Python
|
miniurl/urls.py
|
joannawetesko/django-mini-url
|
5086da1db2db6e8213bc3330fb7a153d206da69f
|
[
"Apache-2.0"
] | null | null | null |
miniurl/urls.py
|
joannawetesko/django-mini-url
|
5086da1db2db6e8213bc3330fb7a153d206da69f
|
[
"Apache-2.0"
] | null | null | null |
miniurl/urls.py
|
joannawetesko/django-mini-url
|
5086da1db2db6e8213bc3330fb7a153d206da69f
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from miniurl import views
urlpatterns = [
path('<path:link>', views.RedirectURLView.as_view(), name='redirect_url'),
path('', views.MainView.as_view(), name='main')
]
| 29.142857
| 78
| 0.705882
|
a6b2a574c20a7b8f0d1ce7c155ce37771bcfd593
| 1,580
|
py
|
Python
|
scripts/tf_cnn_benchmarks/models/overfeat_model.py
|
MikulasZelinka/benchmarks
|
23b611b27ecb2927dab647c9d1892f5bdfc24c88
|
[
"Apache-2.0"
] | null | null | null |
scripts/tf_cnn_benchmarks/models/overfeat_model.py
|
MikulasZelinka/benchmarks
|
23b611b27ecb2927dab647c9d1892f5bdfc24c88
|
[
"Apache-2.0"
] | 1
|
2018-11-01T06:20:53.000Z
|
2018-11-08T16:37:00.000Z
|
scripts/tf_cnn_benchmarks/models/overfeat_model.py
|
MikulasZelinka/benchmarks
|
23b611b27ecb2927dab647c9d1892f5bdfc24c88
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Overfeat model configuration.
References:
OverFeat: Integrated Recognition, Localization and Detection using
Convolutional Networks
Pierre Sermanet, David Eigen, Xiang Zhang, Michael Mathieu, Rob Fergus,
Yann LeCun, 2014
http://arxiv.org/abs/1312.6229
"""
from models import model
class OverfeatModel(model.CNNModel):
"""OverfeatModel."""
def __init__(self):
super(OverfeatModel, self).__init__('overfeat', 231, 32, 0.005)
def add_inference(self, cnn):
# Note: VALID requires padding the images by 3 in width and height
cnn.conv(96, 11, 11, 4, 4, mode='VALID')
cnn.mpool(2, 2)
cnn.conv(256, 5, 5, 1, 1, mode='VALID')
cnn.mpool(2, 2)
cnn.conv(512, 3, 3)
cnn.conv(1024, 3, 3)
cnn.conv(1024, 3, 3)
cnn.mpool(2, 2)
cnn.reshape([-1, 1024 * 6 * 6])
cnn.affine(3072)
cnn.dropout()
cnn.affine(4096)
cnn.dropout()
| 31.6
| 80
| 0.667089
|
0424ae5c232828dfc1f65f1828cda9453a2d5060
| 511
|
py
|
Python
|
ngage/config.py
|
djeromov/ngage
|
e816b3cebad6dd6faa1a33e3115136969b44314e
|
[
"Apache-2.0"
] | 8
|
2016-05-12T20:14:22.000Z
|
2021-03-10T00:46:07.000Z
|
ngage/config.py
|
djeromov/ngage
|
e816b3cebad6dd6faa1a33e3115136969b44314e
|
[
"Apache-2.0"
] | 5
|
2016-07-27T03:57:21.000Z
|
2021-08-06T20:03:53.000Z
|
ngage/config.py
|
djeromov/ngage
|
e816b3cebad6dd6faa1a33e3115136969b44314e
|
[
"Apache-2.0"
] | 3
|
2016-12-02T08:33:58.000Z
|
2021-08-06T19:36:49.000Z
|
import munge
class Config(munge.Config):
defaults = {
"config": {
"ngage": {
"default": {
"user": None,
"password": None,
"port": None,
"type": "eznc",
"driver_args": {},
},
"plugin_path": [],
"hosts": [],
"groups": [],
},
},
"config_dir": None,
"codec": "yaml",
}
| 22.217391
| 38
| 0.295499
|
ce236a470ff82094f215095ff4ee55a7385a63e2
| 78
|
py
|
Python
|
tree_bark_synthesis/ShapeOptimization/__init__.py
|
laitoch/tree-bark-synthesis
|
0bd43d6699d2e05f62d144f310874f986bbd91d2
|
[
"MIT"
] | null | null | null |
tree_bark_synthesis/ShapeOptimization/__init__.py
|
laitoch/tree-bark-synthesis
|
0bd43d6699d2e05f62d144f310874f986bbd91d2
|
[
"MIT"
] | null | null | null |
tree_bark_synthesis/ShapeOptimization/__init__.py
|
laitoch/tree-bark-synthesis
|
0bd43d6699d2e05f62d144f310874f986bbd91d2
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0,'..')
from control_map import shape_optimization
| 19.5
| 42
| 0.794872
|
89ecc35c24d61e15f09a21ec46ae3a9fa394eeeb
| 5,407
|
py
|
Python
|
src/systemtest/test_vds.py
|
Odin-SMR/odin-api
|
dea75016a6f9e61be0dd64a698b03ded5a6f0f10
|
[
"MIT"
] | null | null | null |
src/systemtest/test_vds.py
|
Odin-SMR/odin-api
|
dea75016a6f9e61be0dd64a698b03ded5a6f0f10
|
[
"MIT"
] | 85
|
2020-04-01T06:24:29.000Z
|
2022-03-28T04:28:56.000Z
|
src/systemtest/test_vds.py
|
Odin-SMR/odin-api
|
dea75016a6f9e61be0dd64a698b03ded5a6f0f10
|
[
"MIT"
] | null | null | null |
import pytest
import requests
def test_vds_file4ace_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['ace']['example-file']
== '/vds-data/ACE_Level2/v2/2004-03/ss2969.nc'
)
def test_vds_file4mipas_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['mipas']['example-file']
== '/vds-data/Envisat_MIPAS_Level2/O3/V5/2007/02/MIPAS-E_IMK.200702.V5R_O3_224.nc' # noqa
)
def test_vds_file4mipas_esa_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['mipas_esa']['example-file']
== '/vds-data/MIP_NL__2P/v7.03/2002/07/31/MIP_NL__2PWDSI20020731_121351_000060462008_00124_02182_1000_11.nc' # noqa
)
def test_vds_file4mls_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['mls']['example-file']
== '/vds-data/Aura_MLS_Level2/O3/v04/2009/11/MLS-Aura_L2GP-O3_v04-20-c01_2009d331.he5' # noqa
)
def test_vds_file4osiris_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['osiris']['example-file']
== '/osiris-data/201410/OSIRIS-Odin_L2-O3-Limb-MART_v5-07_2014m1027.he5' # noqa
)
def test_vds_file4sageIII_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['sageIII']['example-file']
== '/vds-data/Meteor3M_SAGEIII_Level2/2002/09/v04/g3a.ssp.00386710v04.h5' # noqa
)
def test_vds_file4smiles_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['smiles']['example-file']
== '/vds-data/ISS_SMILES_Level2/O3/v2.4/2009/11/SMILES_L2_O3_B_008-11-0502_20091112.he5' # noqa
)
def test_vds_file4smr_exists(odinapi_service):
url = '{}/rest_api/v4/config_data/data_files/'.format(odinapi_service)
data = requests.get(url).json()
assert (
data['data']['vds-files']['smr']['example-file']
== '/odin-smr-2-1-data/SM_AC2ab/SCH_5018_C11DC6_021.L2P'
)
def test_vds_file4ace_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'ace/T/2004-03-01/ss2969.nc/0/'
)
data = requests.get(url).json()
t0 = data['Data-L2_retreival_grid']['T'][0]
assert t0 == pytest.approx(214.420, abs=0.001)
def test_vds_file4mipas_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'mipas/O3/2007-02-01/MIPAS-E_IMK.200702.V5R_O3_224.nc/0/'
)
data = requests.get(url).json()
t0 = data['target'][0]
assert t0 == pytest.approx(0.098, abs=0.001)
def test_vds_file4mipas_esa_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'mipas_esa/O3/2002-07-31/MIP_NL__2PWDSI20020731_121351_000060462008_00124_02182_1000_11.nc/0/' # noqa
)
data = requests.get(url).json()
assert data['o3_retrieval_mds']['dsr_time'] == pytest.approx(
81433778.551209, abs=0.001)
def test_vds_file4mls_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'mls/O3/2009-11-01/MLS-Aura_L2GP-O3_v04-20-c01_2009d331.he5/0/'
)
data = requests.get(url).json()
t0 = data['data_fields']['L2gpValue'][0] * 1e8
assert t0 == pytest.approx(1.909, abs=0.001)
def test_vds_file4smiles_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'smiles/O3/2009-11-01/SMILES_L2_O3_B_008-11-0502_20091112.he5/0/'
)
data = requests.get(url).json()
t0 = data['data_fields']['L2Value'][0] * 1e7
assert t0 == pytest.approx(1.310, abs=0.001)
def test_vds_file4sageIII_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'sageIII/O3/2002-09-01/g3a.ssp.00386710v04.h5/0/'
)
data = requests.get(url).json()
t0 = data['Temperature'][0]
assert t0 == pytest.approx(274.028, abs=0.001)
def test_vds_file4osiris_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'osiris/O3/2014-10-01/OSIRIS-Odin_L2-O3-Limb-MART_v5-07_2014m1027.he5/0/' # noqa
)
data = requests.get(url).json()
t0 = data['data_fields']['O3'][12] * 1e7
assert t0 == pytest.approx(1.717, abs=0.001)
def test_vds_file4smr_is_readable(odinapi_service):
url = (
'{}/rest_api/v4/vds_external/'.format(odinapi_service)
+ 'smr/O3/2002-09-01/SM_AC2ab-SCH_5018_C11DC6_021.L2P/0/'
)
data = requests.get(url).json()
t0 = data['Data']['Profiles'][0] * 1e11
assert t0 == pytest.approx(1.250, abs=0.001)
| 34.883871
| 124
| 0.666358
|
31d1e81fab7c319539a6c6775d0fcc2a842ffcca
| 17,994
|
py
|
Python
|
pepper_teleoperation/pepper_gui_old.py
|
FraPorta/pepper_openpose_teloperation
|
a327b717ae11ff84e22a0744b96c72b998a25c84
|
[
"Apache-2.0"
] | 6
|
2021-05-12T02:22:24.000Z
|
2022-03-08T14:08:10.000Z
|
pepper_teleoperation/pepper_gui_old.py
|
elggem/pepper_openpose_teleoperation
|
e31bc0b12bd8511dbce9e4449610a08ebe32c184
|
[
"Apache-2.0"
] | null | null | null |
pepper_teleoperation/pepper_gui_old.py
|
elggem/pepper_openpose_teleoperation
|
e31bc0b12bd8511dbce9e4449610a08ebe32c184
|
[
"Apache-2.0"
] | 3
|
2021-11-24T12:37:43.000Z
|
2022-03-08T07:38:12.000Z
|
import Tkinter as tk
import ttk
import argparse
import qi
# import time
import sys
import pyglet
from PIL import ImageTk, Image
from GUI_material.image_label import ImageLabel
from speech_thread import SpeechThread
from pepper_approach_control_thread import PepperApproachControl
from Queue import Queue
# Colors
red = '#d63d41'
dark_red = '#c82b2e'
darkest_red = '#52373b'
light_red = '#eb9ea0'
orange = '#ec5633'
pyglet.font.add_file('GUI_material\Roboto-Medium.ttf')
class PepperGui:
def __init__(self, master, session):
#create buttons,entries,etc
self.master = master
self.session = session
self.teleop = tk.IntVar()
self.approach = tk.IntVar()
# Instantiate queue and class for speech recognition
self.q_speech = Queue()
self.q_record = Queue()
self.q_pepper = Queue()
self.q_appr_teleop = Queue()
self.st = None
font='Roboto-Medium'
# font='Gotham'
btn_txt_size = 12
# Colors
red = '#d63d41'
dark_red = '#c82b2e'
darkest_red = '#52373b'
light_red = '#eb9ea0'
orange = '#ec5633'
# Master init
self.master.title("Pepper Control")
self.master.geometry("1000x562")
self.master.configure(bg=red)
self.master.resizable(False, False)
# Background
# image = Image.open('GUI_material/background.png')
IMAGE_PATH = 'GUI_material/background.png'
WIDTH, HEIGTH = 1000, 562
self.canvas = tk.Canvas(self.master, width=WIDTH, height=HEIGTH, bd=0, highlightthickness=0, relief='ridge')
self.canvas.pack()
img = ImageTk.PhotoImage(Image.open(IMAGE_PATH).resize((WIDTH, HEIGTH), Image.ANTIALIAS))
self.canvas.background = img # Keep a reference in case this code is put in a function.
bg = self.canvas.create_image(0, 0, anchor=tk.NW, image=img)
# BUTTONS
# Button start recording
self.btn_rec = tk.Button(self.master,
text="Start Talking",
bg=darkest_red,
fg='white',
font=(font, btn_txt_size),
activebackground=dark_red,
activeforeground='white',
width=15,
height=2,
disabledforeground=light_red,
relief=tk.FLAT,
state=tk.DISABLED,
command=self.start_talk)
self.btn_rec.pack()
self.btn_rec.place(x=80, y=80)
# Button start pepper approach and teleoperation
self.btn_pepper = tk.Button(self.master,
text="Start Pepper",
bg=darkest_red,
fg='white',
font=(font, btn_txt_size),
activebackground=dark_red,
activeforeground='white',
width=15,
height=2,
disabledforeground=light_red,
relief=tk.FLAT,
state=tk.DISABLED,
command=self.start_pepper)
self.btn_pepper.pack()
self.btn_pepper.place(x=80, y=245)
# Button connect to Pepper
self.btn_connect = tk.Button(self.master,
text="Connect to Pepper",
bg=orange,
fg='white',
font=(font, btn_txt_size),
activebackground=dark_red,
activeforeground='white',
width=20,
height=2,
disabledforeground="white",
anchor=tk.CENTER,
relief=tk.FLAT,
command=self.connect_pepper)
self.btn_connect.pack()
self.btn_connect.place(relx=0.4, y=448)
# Gifs
self.gif = ImageLabel(self.master)
self.gif.config(relief="flat", borderwidth=0)
self.gif.pack()
self.gif.place(x=257, y=74)
self.gif.load('GUI_material/voice_transp_frame.gif')
gif_path = 'GUI_material/load_white_frame.gif'
self.gif_load = ImageLabel(self.master)
self.gif_load.config(relief="flat", borderwidth=0)
self.gif_load.pack()
self.gif_load.place(x=257, y=240)
self.gif_load.load(gif_path)
# Labels
self.txt_1 = tk.Label(self.master,
bg=red,
fg='white',
font=(font,12,'bold'))
self.txt_1.place(x=350, y=50)
self.txt_1.configure(text="Recognized text")
self.txt = tk.Label(self.master,
bg=dark_red,
bd=3,
fg='white',
font=(font,12),
width=62,
height=2,
relief=tk.FLAT,
anchor='w')
self.txt.place(x=350, y=83)
self.txt.configure(text=" ")
self.txt_pepper_1 = tk.Label(self.master,
bg=red,
fg='white',
font=(font,12,'bold'))
self.txt_pepper_1.place(x=350, y=215)
self.txt_pepper_1.configure(text="Feedback")
self.txt_pepper = tk.Label(self.master,
bg=dark_red,
bd=3,
fg='white',
font=(font,12),
width=62,
height=2,
relief=tk.FLAT,
anchor='w')
self.txt_pepper.place(x=350, y=248)
self.txt_pepper.configure(text=" ")
self.lbl_conn = tk.Label(self.master,
bg=darkest_red,
fg=light_red,
font=(font,11),
width=30,
wraplength=0,
relief=tk.FLAT)
self.lbl_conn.place(x=358, y=520)
self.lbl_conn.configure(text="Press the button to connect")
# CheckBoxes
y=160
self.c_approach = tk.Checkbutton(self.master,
text = "Search User",
variable = self.approach,
onvalue = 1,
offvalue = 0,
font=(font,12),
bg=red,
fg='white',
relief=tk.FLAT,
selectcolor=light_red,
activebackground=red,
highlightthickness=0,
bd=0,
activeforeground='white')
self.c_approach.place(x=80, y=y)
# switch_on = tk.PhotoImage(width=50, height=50)
# switch_off = tk.PhotoImage(width=50, height=50)
self.c_teleop = tk.Checkbutton(self.master,
# image = switch_off,
# selectimage= switch_on,
text = "Teleoperate",
variable = self.teleop,
onvalue = 1,
offvalue = 0,
font=(font,12),
bg=red,
fg='white',
selectcolor=light_red,
activebackground=red,
activeforeground='white',
highlightthickness=0,
bd=0,
relief=tk.FLAT,
indicatoron=True)
self.c_teleop.place(x=80, y=y+30)
# Entries
self.text_ip = tk.Entry(self.master,
bg=darkest_red,
fg=light_red,
font=(font,12),
insertbackground=light_red,
disabledbackground=darkest_red,
width=13,
relief=tk.FLAT)
self.text_ip.insert(tk.END, "130.251.13.108")
self.text_ip.place(x=476, y=390)
self.lbl_ip = tk.Label(self.master,
bg=darkest_red,
fg=light_red,
font=(font,12,'bold'))
self.lbl_ip.place(x=395, y=390)
self.lbl_ip.configure(text="IP")
self.text_port = tk.Entry(self.master,
bg=darkest_red,
fg=light_red,
font=(font,12),
insertbackground=light_red,
disabledbackground=darkest_red,
width=4,
relief=tk.FLAT)
self.text_port.insert(tk.END, "9559")
self.text_port.place(x=550, y=410)
self.lbl_port = tk.Label(self.master,
bg=darkest_red,
fg=light_red,
font=(font,12,'bold'))
self.lbl_port.place(x=395, y=410)
self.lbl_port.configure(text="Port")
## method connect_pepper
#
# Starts the Session with given Ip and Port
def connect_pepper(self):
self.lbl_conn.configure(text="Trying to connect...")
session_connected = True
value_err = False
try:
self.ip = self.text_ip.get()
self.port = int(self.text_port.get())
except ValueError:
value_err = True
if value_err:
self.lbl_conn.configure(text="Check the port number")
else:
# Try to connect to the robot
try:
self.session.connect("tcp://" + self.ip + ":" + str(self.port))
except RuntimeError:
session_connected = False
self.lbl_conn.configure(text="Can't connect, please change ip")
# If the connection was successfull, unlock the other buttons and start the speech recognition thread
if session_connected:
self.btn_rec.configure(state=tk.NORMAL, bg=orange)
self.btn_pepper.configure(state=tk.NORMAL, bg=orange)
self.btn_connect.configure(state=tk.DISABLED, bg="#57aa03", text="Connected!")
self.text_ip.configure(state=tk.DISABLED)
self.text_port.configure(state=tk.DISABLED)
self.lbl_conn.place_forget()
# Create Speech Thread
self.st = SpeechThread(self.session, self.q_speech, self.q_record)
# Start Speech recognition Thread
self.st.start()
## method start_pepper
#
# Start Pepper approach/teleoperation
def start_pepper(self):
show_plot = True
gif_path = 'GUI_material/load_white.gif'
if self.approach.get() == 1 and self.teleop.get() == 1:
# Show gif
self.gif_load = ImageLabel(self.master)
self.gif_load.config(relief="flat", borderwidth=0)
self.gif_load.pack()
self.gif_load.place(x=257, y=240)
self.gif_load.load(gif_path)
approach_requested = True
approach_only = False
self.pac = PepperApproachControl(self.session, show_plot, approach_requested, approach_only, self.q_pepper, self.q_appr_teleop)
self.pac.start()
# Change button text and command
self.btn_pepper.configure(text="Stop Pepper", command=self.stop_pepper)
elif self.approach.get() == 0 and self.teleop.get() == 1:
# Show gif
self.gif_load = ImageLabel(self.master)
self.gif_load.config(relief="flat", borderwidth=0)
self.gif_load.pack()
self.gif_load.place(x=257, y=240)
self.gif_load.load(gif_path)
approach_requested = False
approach_only = False
self.pac = PepperApproachControl(self.session, show_plot, approach_requested, approach_only, self.q_pepper, self.q_appr_teleop)
self.pac.start()
# Change button text and command
self.btn_pepper.configure(text="Stop Pepper", command=self.stop_pepper)
elif self.approach.get() == 1 and self.teleop.get() == 0:
# Show gif
self.gif_load = ImageLabel(self.master)
self.gif_load.config(relief="flat", borderwidth=0)
self.gif_load.pack()
self.gif_load.place(x=257, y=240)
self.gif_load.load(gif_path)
approach_requested = True
approach_only = True
self.pac = PepperApproachControl(self.session, show_plot, approach_requested, approach_only, self.q_pepper, self.q_appr_teleop)
self.pac.start()
# Change button text and command
self.btn_pepper.configure(text="Stop Pepper", command=self.stop_pepper)
else:
self.txt_pepper.configure(text="Please select at least one between Search User and Teleoperate")
## method stop_pepper
#
# Stop Pepper approach/teleoperation
def stop_pepper(self):
# self.gif_load.place_forget()
# self.gif_load.pack_forget()
self.q_pepper.put(True)
gif_path = 'GUI_material/load_white_frame.gif'
self.gif_load = ImageLabel(self.master)
self.gif_load.config(relief="flat", borderwidth=0)
self.gif_load.pack()
self.gif_load.place(x=257, y=240)
self.gif_load.load(gif_path)
# Change button text and command
self.btn_pepper.configure(text="Start Pepper", command=self.start_pepper)
## method start_talk
#
# Button callback to start talking
def start_talk(self):
# Show gif
self.gif = ImageLabel(self.master)
self.gif.config(relief="flat", borderwidth=0)
self.gif.pack()
self.gif.place(x=257, y=74)
self.gif.load('GUI_material/voice_transp.gif')
# Change button text and command
self.btn_rec.configure(text="Stop Talking", command=self.stop_talk)
# Start recording for Speech to text
self.q_record.put("Rec")
## method stop_talk
#
# Stop recognizing voice and hide microphone gif
def stop_talk(self):
self.q_record.put("StopRec")
self.gif = ImageLabel(self.master)
self.gif.config(relief="flat", borderwidth=0)
self.gif.pack()
self.gif.place(x=257, y=74)
self.gif.load('GUI_material/voice_transp_frame.gif')
# self.gif.place_forget()
# self.gif.grid_forget()
# self.gif.pack_forget()
self.btn_rec.configure(text="Start Talking", command=self.start_talk)
## method on_closing
#
# Stop speech recognition thread and close the window
def on_closing(self):
self.q_record.put("StopRun")
# self.st.is_running = False
if self.st is not None:
if self.st.is_alive():
self.st.join()
self.master.destroy()
## method start
#
# Start the mainloop
def start(self):
self.master.protocol("WM_DELETE_WINDOW", self.on_closing)
self.master.after(500, func=self.check_queue)
self.master.mainloop()
## method check_queue
#
# Check every half a second if there is an entry in the queue
def check_queue(self):
# If the queue is not empty get the recognized text
if not self.q_speech.empty():
text = self.q_speech.get(block=False, timeout=None)
# Show recognized text
if text is not None:
self.txt.configure(text=text)
if not self.q_appr_teleop.empty():
string = self.q_appr_teleop.get(block=False, timeout=None)
if string is not None:
self.txt_pepper.configure(text=string)
self.master.after(500, func=self.check_queue)
if __name__ == '__main__':
# Start naoqi session
session = qi.Session()
# Start GUI
root = tk.Tk()
app = PepperGui(root, session)
app.start()
| 39.460526
| 139
| 0.477826
|
8b098c9f6456f77e720af387ec3a31ddb4ff2947
| 1,735
|
py
|
Python
|
official/vision/detection/modeling/optimizers.py
|
akshit-protonn/models
|
38c8c6fe4144c93d6aadd19981c2b90570c29eba
|
[
"Apache-2.0"
] | 82,518
|
2016-02-05T12:07:23.000Z
|
2022-03-31T23:09:47.000Z
|
official/vision/detection/modeling/optimizers.py
|
akshit-protonn/models
|
38c8c6fe4144c93d6aadd19981c2b90570c29eba
|
[
"Apache-2.0"
] | 9,021
|
2016-03-08T01:02:05.000Z
|
2022-03-31T08:06:35.000Z
|
official/vision/detection/modeling/optimizers.py
|
akshit-protonn/models
|
38c8c6fe4144c93d6aadd19981c2b90570c29eba
|
[
"Apache-2.0"
] | 54,341
|
2016-02-06T17:19:55.000Z
|
2022-03-31T10:27:44.000Z
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimizers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import numpy as np
import tensorflow as tf
class OptimizerFactory(object):
"""Class to generate optimizer function."""
def __init__(self, params):
"""Creates optimized based on the specified flags."""
if params.type == 'momentum':
self._optimizer = functools.partial(
tf.keras.optimizers.SGD,
momentum=params.momentum,
nesterov=params.nesterov)
elif params.type == 'adam':
self._optimizer = tf.keras.optimizers.Adam
elif params.type == 'adadelta':
self._optimizer = tf.keras.optimizers.Adadelta
elif params.type == 'adagrad':
self._optimizer = tf.keras.optimizers.Adagrad
elif params.type == 'rmsprop':
self._optimizer = functools.partial(
tf.keras.optimizers.RMSprop, momentum=params.momentum)
else:
raise ValueError('Unsupported optimizer type `{}`.'.format(params.type))
def __call__(self, learning_rate):
return self._optimizer(learning_rate=learning_rate)
| 34.019608
| 78
| 0.726225
|
f1ba63c108b25b95b8afd3a7d1769da745ad7e48
| 19,333
|
py
|
Python
|
scripts/choose_subclusters.py
|
jmeppley/np_read_clustering
|
88a23c729f42f2249926d2e3420f3c30e01bfddd
|
[
"MIT"
] | null | null | null |
scripts/choose_subclusters.py
|
jmeppley/np_read_clustering
|
88a23c729f42f2249926d2e3420f3c30e01bfddd
|
[
"MIT"
] | null | null | null |
scripts/choose_subclusters.py
|
jmeppley/np_read_clustering
|
88a23c729f42f2249926d2e3420f3c30e01bfddd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Given:
* cluster reads fasta
* cluster reads faa
* cluster reads faa PFAM hits (non-overlapping)
* cluster lastal results (aggregated)
* subcluster mcl results
* output locations (fasta dir + pdf file names)
* params:
* min_sub_size (10)
* sigma_cutoff (None)
* max_colored_genes (8)
* gene_cmap ('gnuplot2')
* max_synteny_reads (15)
* read_cmap ('cool')
Generate 2 pdf files:
* histograms of read lengths and pctids for subclusters
* read synteny plots for top reads of each subcluster
Generate a fasta file of reads for each subcluster
"""
import numpy
import os
import pandas
import re
from collections import Counter, defaultdict
from functools import partial
from itertools import zip_longest
from scipy import stats
import matplotlib
matplotlib.use('pdf')
from matplotlib import pyplot as plt, cm, colors
from matplotlib.patches import Polygon
from matplotlib.backends.backend_pdf import PdfPages
from snakemake.rules import Namedlist
from Bio import SeqIO
from hit_tables import parse_blast_m8, BLAST_PLUS
from edl import blastm8
BLACK = (0, 0, 0, 1)
def grouper_trim(iterable, n):
"Collect data into fixed-length chunks or blocks and trim last chunk (and all null values)"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return ([i for i in group if i is not None] for group in zip_longest(*args, fillvalue=None))
def main(input, output, params):
"""
input output should be namedlists (from snakemake)
params should be a dict (so we can fall back to defaults)
"""
# prodigal amino acid output
faa_file = input.faa
# cluster fasta reads
fasta_file = input.fasta
# PFAM results (non-overlapping)
dom_tbl_U = input.domtbl
# mcl results
mcl_file = input.mcl
# lastal table raw
lastal_file = input.lastal
# lastal table aggregated
agg_file = input.agg
## load clusters (just a list of reads in each cluster, sorted by size)
subclusters = load_clusters(mcl_file, params.get('min_sub_size', 10))
## load the fasta, keeping dict of lengths
cluster_reads = {r.id:r for r in SeqIO.parse(fasta_file, 'fasta')}
# use just the first word in the read id as a short name
read_lens = {r.id.split('-')[0]:len(r) for r in cluster_reads.values()}
## plot all cluster hitsts, applying sigma cutoff
subcluster_ids = plot_cluster_hists(subclusters, read_lens, agg_file, output.stats, output.hist_pdf, params)
## make the synteny plots for each good subcluster
plot_subcluster_synteny(subcluster_ids, subclusters, read_lens, lastal_file, faa_file, dom_tbl_U, output.gene_pdf, params)
## write out sub cluster fasta
os.makedirs(str(output.fasta_dir), exist_ok=True)
for subcluster_id in subcluster_ids:
with open(str(output.fasta_dir) + f"/subcluster.{subcluster_id}.fasta", 'wt') as fasta_out:
for read_id in subclusters[subcluster_id]:
fasta_out.write(cluster_reads[read_id].format('fasta'))
def plot_cluster_hists(subclusters,
read_lens,
agg_file,
stats_file,
pdf_file,
params
):
"""
For each subcluster plot:
* histogram aod all read-read mfracs
* histogram of all read lengths with overlaid normal dist
"""
# open PDF file
pdf = PdfPages(pdf_file)
mx_len = max(read_lens.values())
mn_len = min(read_lens.values())
window = [mn_len, mx_len]
sigma_cutoff = params.get('sigma_cutoff', -1)
# first pass to chose subclusters to keep and plot
cluster_stats = {}
for i, subcluster in enumerate(subclusters):
keep = True
if len(subcluster) < params.get('min_sub_size', 10):
break
# calculate best normal fit to length dist
cluster_lens = numpy.array([read_lens[r.split('-')[0]] for r in subcluster])
counts, bins = numpy.histogram(cluster_lens, bins=100, range=window)
#from scipy import stats
mu, sigma = stats.norm.fit(cluster_lens)
if sigma_cutoff > 0 and sigma > sigma_cutoff:
keep = False
# calculate the stats
X = numpy.array([numpy.mean((bins[i], bins[i-1])) for i in range(1,len(bins))])
tot_in, tot_out, n_in, n_out = numpy.zeros(4)
for x, count in zip(X, counts):
if x < mu - sigma or x > mu + sigma:
tot_out += count
n_out += 1
else:
tot_in += count
n_in += 1
mean_in = tot_in / n_in
mean_out = tot_out / n_out if n_out > 0 else 0
ratio = mean_in / mean_out
n_ratio = n_in / (n_out + n_in)
cluster_stats[i] = dict(zip(
['mu', 'sigma', 'ratio', 'n_ratio', 'N', 'keep', 'counts', 'bins', 'X'],
[mu, sigma, ratio, n_ratio, len(subcluster), keep, counts, bins, X]
))
# build cluster stats table
write_cols = ['mu', 'sigma', 'ratio', 'n_ratio', 'N', 'keep']
cl_st_table = pandas.DataFrame([[i,] + [d[k] for k in write_cols]
for i,d in cluster_stats.items()],
columns=['index'] + write_cols)
# write stats to file
cl_st_table.to_csv(stats_file, sep='\t', index=None)
# pull out list of good subclusters
subcluster_ids = list(cl_st_table.query('keep').index)
# load agg hits
agg_table = pandas.read_csv(agg_file, sep='\t')
# max 8 per page
mx_rows = 8
for page_sc_ids in grouper_trim(cluster_stats.keys(), mx_rows):
N = len(page_sc_ids)
fig, axes = plt.subplots(N, 4, figsize=[11 * N / mx_rows, 8.5], sharey="col", sharex="col", squeeze=False)
fig.subplots_adjust(hspace=.7, wspace=.6)
ax_rows = iter(axes)
for i, subcluster_id in enumerate(page_sc_ids):
axs = next(ax_rows)
# remove axes from top and right
for ax in axs:
for side in ['top', 'right']:
ax.spines[side].set_visible(False)
ax_sc_mf, ax_sc_id, ax_h_mf, ax_h_ln = axs
# get the subset of the agg table for this subcluster
subcluster = set(subclusters[subcluster_id])
sub_slice = (agg_table['query'].apply(lambda q: q in subcluster)
& agg_table.hit.apply(lambda h: h in subcluster))
agg_hits_cluster = agg_table[sub_slice] \
.eval('mean_len = (hlen + qlen) / 2') \
.eval('frac = mlen / mean_len')
mfrac_dict = agg_hits_cluster.set_index(['query','hit']).mfrac.to_dict()
# scatter plot mfrac and mean length
ax_sc_mf.scatter(agg_hits_cluster.mfrac.values,
agg_hits_cluster.mean_len.values,
marker='.',
alpha=.5
)
ax_sc_mf.set_ylabel ('mean_len')
# scatter plot of pctid and matched fraction
ax_sc_id.scatter(agg_hits_cluster.pctid.values,
agg_hits_cluster.frac.values,
marker='.',
alpha=.5
)
ax_sc_id.set_ylabel ('frac aln')
# plot hist of pairwise mfracs
h = ax_h_mf.hist(get_mfracs(subcluster, mfrac_dict=mfrac_dict), bins=100, range=[50,100])
# plot hist of read lens
sc_stats = cluster_stats[subcluster_id]
counts = sc_stats['counts']
X = sc_stats['X']
# recreate histogram from counts and X
ax_h_ln.bar(X, counts, color='blue')
# overlay norm dist
best_fit_line = stats.norm.pdf(X, sc_stats['mu'], sc_stats['sigma'])
best_fit_line = best_fit_line * counts.sum() / best_fit_line.sum()
p = ax_h_ln.plot(X, best_fit_line, color='red', alpha=.5)
ax_h_mf.set_ylabel(f"s.cl: {subcluster_id}")
ax_h_ln.set_ylabel(f"{len(subcluster)} {int(sc_stats['sigma'])}")
if i == N - 1:
xl = ax_sc_mf.set_xlabel("score")
xl = ax_h_ln.set_xlabel("length")
xl = ax_sc_id.set_xlabel ('match %ID')
xl = ax_h_mf.set_xlabel ('score')
# close plot and go to next pdf page
pdf.savefig(bbox_inches='tight')
plt.close()
pdf.close()
# save stats to file, but drop extra data first
write_cols = ['mu', 'sigma', 'ratio', 'n_ratio', 'N']
pandas.DataFrame([[i,] + [d[k] for k in write_cols]
for i,d in cluster_stats.items()],
columns=['index'] + write_cols).to_csv(stats_file, sep='\t', index=None)
return subcluster_ids
def get_N_colors(N, cmap_name='Dark2'):
""" given N and a colormap, get N evenly spaced colors"""
color_map=plt.get_cmap(cmap_name)
return [color_map(c) for c in numpy.linspace(0, 1, N)]
def get_scaled_color(value, minv=0, maxv=1, alpha=.75, reverse=False, cmap_name='cool'):
colormap = plt.get_cmap(cmap_name)
if reverse:
maxv, minv = minv, maxv
rangev = maxv - minv
color = colormap((value - minv) / rangev)
return color[:3] + (alpha,)
def get_mfracs(reads, mfrac_dict):
return [mfrac_dict.get((r1, r2), 0)
for r1 in reads
for r2 in reads
if r2 > r1
]
def plot_subcluster_synteny(subcluster_ids,
subclusters,
read_lens,
lastal_file,
faa_file,
dom_tbl_U,
pdf_file,
params
):
"""
For each subcluster:
* identify the N genes that appear in the most reads
* identify the M reads that have the most of the top genes
* plot
"""
## load the gene annotations
# first get positions from faa headers
read_genes = {}
for gene in SeqIO.parse(faa_file, 'fasta'):
gene_id, start, end, strand, _ = [b.strip() for b in gene.description.split("#")]
read, name, gene_no = re.search(r'^((\w+)-[^_]+)_(\d+)', gene_id).groups()
read_genes.setdefault(name, []).append(dict(
gene_id=gene_id,
start=int(start),
end=int(end),
strand=int(strand),
num=int(gene_no),
pfam=None,
))
# convert to dict of DataFrames from dict of lists of dicts
read_genes_tables = {read:pandas.DataFrame(genes).set_index('gene_id')
for read, genes in read_genes.items()}
# and add PFAM annotations
for read, hits in blastm8.generate_hits(dom_tbl_U, format='hmmsearchdom'):
read_id = read.split("-")[0]
read_genes_table = read_genes_tables[read_id]
for hit in hits:
gene_id = hit.read
# only assign PFAm if it's the first hit for the gene
if pandas.isna(read_genes_table.loc[gene_id, 'pfam']):
pfam = hit.hit
read_genes_table.loc[gene_id, 'pfam'] = pfam
# load all the read to read hits
read_hits = parse_blast_m8(lastal_file, format=BLAST_PLUS)
# now open the PDF file
pdf = PdfPages(pdf_file)
# for each good subcluster
for subcluster_id in subcluster_ids:
subcluster = set(subclusters[subcluster_id])
subcluster_names = {r.split('-')[0]:r for r in subcluster}
fig = plot_subcluster_genes(subcluster_id, subcluster_names, read_genes_tables, read_hits, read_lens, params)
# close plot and go to next pdf page
pdf.savefig(bbox_inches='tight')
plt.close()
pdf.close()
def plot_subcluster_genes(subcluster_id, subcluster_names, read_genes_tables, read_hits, read_lens, params):
"""
make a plot of gene positions:
ax1 has a scatter plot of mean position by pfam
ax2 has aligned genomes with top pfams colored
"""
# get the positions of the named PFAMs
pf_positions = defaultdict(list)
for read, gene_table in read_genes_tables.items():
if read in subcluster_names:
# do we want to flip the read dir? (too many strand < 1)
reverse = gene_table.eval('glen = strand * (end - start)').glen.sum() < 1
for start, end, pfam in gene_table[['start','end','pfam']].values:
if pandas.isna(pfam):
continue
if reverse:
start, end = [read_lens[read] - p for p in (start, end)]
# add mean post to list for this pfam
pf_positions[pfam].append((end + start) / 2)
# chose which genes to color
N = params.get('max_colored_genes', 8)
sorted_genes = sorted(pf_positions.keys(), key=lambda k: len(pf_positions[k]), reverse=True)
top_N_pfams = sorted_genes[:N]
gene_color_dict = dict(zip(top_N_pfams, get_N_colors(N, cmap_name=params.get('gene_cmap', 'Dark2'))))
# chose which reads to draw
M = params.get('max_synteny_reads', 20)
def count_top_pfams_in_read(read):
if read in read_genes_tables:
return sum(1 for p in read_genes_tables[read].pfam.values
if p in top_N_pfams)
return 0
top_M_reads = sorted(subcluster_names,
key=count_top_pfams_in_read,
reverse=True,
)[:M]
m = len(top_M_reads)
# calculate the sizes necessary to draw genes using the matplotlib arrow function
align_height = (7 * (m-1) / (M-1)) #use up to 7 in
figsize = [8.5, 4 + align_height]
fig, axes = plt.subplots(2,1, figsize=figsize, gridspec_kw={'height_ratios':[4,align_height]}, sharex='col')
fig.subplots_adjust(hspace=.1,)
## draw gene positions
ax = axes[0]
ax.set_title(f'PFAM annotations in subcluster {subcluster_id}')
n = params.get('max_plotted_genes', 18)
sorted_pf = sorted([p for p in sorted_genes[:n] if len(pf_positions[p]) > 1],
key=lambda p: numpy.mean(list(pf_positions[p])))
for i, p in enumerate(sorted_pf):
x,y = zip(*((gp,i) for gp in pf_positions[p]))
ax.scatter(x,y,
c=len(y) * [gene_color_dict.get(p, BLACK)],
ec=None, alpha=.5)
yt = ax.set_yticks(range(len(sorted_pf)))
ytl = ax.set_yticklabels(sorted_pf)
for label in ytl:
label.set_color(gene_color_dict.get(label.get_text(), BLACK))
## draw alignments
ax = axes[-1]
min_x = 0
max_x = max(read_lens[r] for r in subcluster_names)
range_x = max_x - min_x
range_y = M
thickness = .5
head_length = range_x * (thickness / range_y) * (figsize[1] / figsize[0])
cmap = params.get('read_cmap','cool')
min_pctid = read_hits.pctid.min()
pctid_range = 100 - min_pctid
get_conn_color = partial(get_scaled_color, minv=min_pctid, maxv=100, alpha=.75, cmap_name=cmap)
y = 0
pad = .1
prev_read = None
for name in top_M_reads:
read = subcluster_names[name]
read_length = read_lens[name]
if name in read_genes_tables:
gene_table = read_genes_tables[name]
# do we want to flip the read dir? (too many strand < 1)
reverse = gene_table.eval('glen = strand * (end - start)').glen.sum() < 1
# draw genes
for start, end, strand, pfam in gene_table[['start','end','strand','pfam']].values:
if reverse:
strand = -1 * strand
start = read_length - start
end = read_length - end
strand = int(strand)
hl = min(head_length, end-start)
al = max((end - start) - hl, .0001) * strand
ast = start if al > 0 else end
color = gene_color_dict.get(pfam, 'k')
plt.arrow(ast, y, al, 0, fc=color, ec=color,
lw=0,
width=thickness, head_width=thickness,
head_length=hl,
head_starts_at_zero=(int(strand) > 0))
else:
reverse=False
# connect matched segments for read pairs
if prev_read is not None:
# get hits between reads
pair_hits = read_hits.query(f'(hit == "{read}" and query == "{prev_read}") or '
f'(query == "{read}" and hit == "{prev_read}")') \
.query('hit != query') \
.sort_values('score', ascending=True)
# loop over hits
cols = ['query', 'hit', 'qstart', 'qend', 'hstart', 'hend', 'pctid']
for query, hit, qstart, qend, hstart, hend, pctid in pair_hits[cols].values:
# if hit was recorded the other way, flip hit/query
if query == prev_read:
qstart, qend, hstart, hend = hstart, hend, qstart, qend
# if either read is reversed, flip x coordinates
if reverse:
qstart = read_length - qstart
qend = read_length - qend
if prev_rev:
hstart = prev_len - hstart
hend = prev_len - hend
# draw connecting paralellogram
color = get_conn_color(pctid, alpha=.9)
xy = numpy.array([(hstart, y-1+pad),
(qstart, y-pad),
(qend, y-pad),
(hend, y-1+pad)])
ax.add_patch(Polygon(xy, fc=(.6,.6,.6,.2), ec=color))
# save read info for next one
prev_read = read
prev_rev = reverse
prev_len = read_length
# increment y value
y += 1
x = plt.xlim(min_x - 50, max_x + 50)
y = plt.ylim(-.5, y - .5)
plt.yticks(list(range(m)), top_M_reads)
plt.xlabel('read position')
cax = plt.axes([0.95, 0.15, 0.025, 0.4 * (align_height / 7)])
plt.colorbar(mappable=cm.ScalarMappable(norm=colors.Normalize(min_pctid, 100), cmap=cmap), cax=cax)
cl = cax.set_ylabel('alignment %ID')
return fig
def load_clusters(mcl_file, size_cutoff=10):
with open(mcl_file) as mcl_lines:
return [c for c in [line.strip().split() for line in mcl_lines] if len(c) >= size_cutoff]
# scriptify
if __name__ == "__main__":
try:
# assume this is called from snakemake
input = snakemake.input
output = snakemake.output
params = dict(snakemake.params.items())
except NameError:
# TODO: fallback to argparse if we call from the command line (for testing)
import argparse
raise Exception("Currently only works from snakemake, sorry")
main(input, output, params)
| 36.965583
| 126
| 0.572286
|
7319717d99ce8aa3bc9380e3dcd1cd81a55cd0fc
| 771
|
py
|
Python
|
code.py
|
mohrobati/HiddenMessageInSignal
|
9da4b557074b45def54ef265e0357930d6a39943
|
[
"MIT"
] | null | null | null |
code.py
|
mohrobati/HiddenMessageInSignal
|
9da4b557074b45def54ef265e0357930d6a39943
|
[
"MIT"
] | null | null | null |
code.py
|
mohrobati/HiddenMessageInSignal
|
9da4b557074b45def54ef265e0357930d6a39943
|
[
"MIT"
] | null | null | null |
from scipy.io import wavfile
from scipy.fftpack import fft, ifft
from matplotlib import pyplot as plt
import numpy as np
power = 0.02e7
def string_to_binary_ascii(string):
binary = []
for char in string:
binary.append("{:08b}".format(ord(char)))
return "".join(binary)
def put_code_in_signal(data, samplerate, songname, lim, length, filename):
codename = string_to_binary_ascii(songname)
c = fft(data)
for i in range(0, len(codename)):
for j in range(length):
if codename[i] == "1":
c[lim + i * length + j] = (power)
else:
c[lim + i * length + j] = (-power)
p = ifft(c).real.astype(np.int16)
wavfile.write(filename, samplerate, p)
plt.plot(c)
plt.show()
| 26.586207
| 74
| 0.609598
|
f5f048a414581dd9355e9537829032572ba44e48
| 763
|
py
|
Python
|
dy08/__main__.py
|
Funcoil/SolightDY08-PythonRpi
|
dfe841bee5d52844fd62ff0e466b464523bd70b1
|
[
"MITNFA"
] | 1
|
2019-07-03T20:35:30.000Z
|
2019-07-03T20:35:30.000Z
|
dy08/__main__.py
|
Funcoil/SolightDY08-PythonRpi
|
dfe841bee5d52844fd62ff0e466b464523bd70b1
|
[
"MITNFA"
] | null | null | null |
dy08/__main__.py
|
Funcoil/SolightDY08-PythonRpi
|
dfe841bee5d52844fd62ff0e466b464523bd70b1
|
[
"MITNFA"
] | null | null | null |
#!/usr/bin/python3
import sys
import pigpio
from dy08 import DY08
def main():
if len(sys.argv) < 3:
print("Error: insufficient number of arguments", file=sys.stderr)
print("Usage: dy08 ADDRESS on|off", file=sys.stderr)
sys.exit(1)
address = int(sys.argv[1])
if address < 0 or address > 1023:
print("Invalid address. Valid addresses are 0-1023 (inclusive)", file=sys.stderr)
sys.exit(1)
if sys.argv[2] == "on":
action = 1
elif sys.argv[2] == "off":
action = 0
else:
print("Invalid action. Valid actions are \"on\" and \"off\".", file=sys.stderr)
sys.exit(1)
dy08 = DY08(pigpio.pi(), 17)
dy08.send(address, action)
if __name__ == "__main__":
main()
| 23.121212
| 89
| 0.591088
|
40ca5237e96dc873763236aa9841dd0c6bdd29bb
| 5,301
|
py
|
Python
|
syn_net/models/rt2.py
|
wenhao-gao/SynNet
|
3a452af123f87c266e3d4dbc7e1bda85886e3c30
|
[
"MIT"
] | 14
|
2021-10-18T06:56:49.000Z
|
2022-03-01T01:32:10.000Z
|
syn_net/models/rt2.py
|
wenhao-gao/SynNet
|
3a452af123f87c266e3d4dbc7e1bda85886e3c30
|
[
"MIT"
] | 3
|
2021-10-19T20:58:09.000Z
|
2022-02-07T18:02:04.000Z
|
syn_net/models/rt2.py
|
wenhao-gao/SynNet
|
3a452af123f87c266e3d4dbc7e1bda85886e3c30
|
[
"MIT"
] | 4
|
2021-10-20T03:02:59.000Z
|
2022-01-25T22:12:47.000Z
|
"""
Reactant2 network (for predicting 2nd reactant).
"""
import time
import numpy as np
import torch
import pytorch_lightning as pl
from pytorch_lightning import loggers as pl_loggers
from syn_net.models.mlp import MLP, load_array
from scipy import sparse
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--featurize", type=str, default='fp',
help="Choose from ['fp', 'gin']")
parser.add_argument("-r", "--rxn_template", type=str, default='hb',
help="Choose from ['hb', 'pis']")
parser.add_argument("--radius", type=int, default=2,
help="Radius for Morgan fingerprint.")
parser.add_argument("--nbits", type=int, default=4096,
help="Number of Bits for Morgan fingerprint.")
parser.add_argument("--out_dim", type=int, default=256,
help="Output dimension.")
parser.add_argument("--ncpu", type=int, default=8,
help="Number of cpus")
parser.add_argument("--batch_size", type=int, default=64,
help="Batch size")
parser.add_argument("--epoch", type=int, default=2000,
help="Maximum number of epoches.")
args = parser.parse_args()
if args.out_dim == 300:
validation_option = 'nn_accuracy_gin'
elif args.out_dim == 4096:
validation_option = 'nn_accuracy_fp_4096'
elif args.out_dim == 256:
validation_option = 'nn_accuracy_fp_256'
elif args.out_dim == 200:
validation_option = 'nn_accuracy_rdkit2d'
else:
raise ValueError
main_dir = f'/pool001/whgao/data/synth_net/{args.rxn_template}_{args.featurize}_{args.radius}_{args.nbits}_{validation_option[12:]}/'
batch_size = args.batch_size
ncpu = args.ncpu
X = sparse.load_npz(main_dir + 'X_rt2_train.npz')
y = sparse.load_npz(main_dir + 'y_rt2_train.npz')
X = torch.Tensor(X.A)
y = torch.Tensor(y.A)
train_data_iter = load_array((X, y), batch_size, ncpu=ncpu, is_train=True)
X = sparse.load_npz(main_dir + 'X_rt2_valid.npz')
y = sparse.load_npz(main_dir + 'y_rt2_valid.npz')
X = torch.Tensor(X.A)
y = torch.Tensor(y.A)
_idx = np.random.choice(list(range(X.shape[0])), size=int(X.shape[0]/10), replace=False)
valid_data_iter = load_array((X[_idx], y[_idx]), batch_size, ncpu=ncpu, is_train=False)
pl.seed_everything(0)
if args.featurize == 'fp':
if args.rxn_template == 'hb':
mlp = MLP(input_dim=int(4 * args.nbits + 91),
output_dim=args.out_dim,
hidden_dim=3000,
num_layers=5,
dropout=0.5,
num_dropout_layers=1,
task='regression',
loss='mse',
valid_loss=validation_option,
optimizer='adam',
learning_rate=1e-4,
val_freq=10,
ncpu=ncpu)
elif args.rxn_template == 'pis':
mlp = MLP(input_dim=int(4 * args.nbits + 4700),
output_dim=args.out_dim,
hidden_dim=3000,
num_layers=5,
dropout=0.5,
num_dropout_layers=1,
task='regression',
loss='mse',
valid_loss=validation_option,
optimizer='adam',
learning_rate=1e-4,
val_freq=10,
ncpu=ncpu)
elif args.featurize == 'gin':
if args.rxn_template == 'hb':
mlp = MLP(input_dim=int(3 * args.nbits + args.out_dim + 91),
output_dim=args.out_dim,
hidden_dim=3000,
num_layers=5,
dropout=0.5,
num_dropout_layers=1,
task='regression',
loss='mse',
valid_loss=validation_option,
optimizer='adam',
learning_rate=1e-4,
val_freq=10,
ncpu=ncpu)
elif args.rxn_template == 'pis':
mlp = MLP(input_dim=int(3 * args.nbits + args.out_dim + 4700),
output_dim=args.out_dim,
hidden_dim=3000,
num_layers=5,
dropout=0.5,
num_dropout_layers=1,
task='regression',
loss='mse',
valid_loss=validation_option,
optimizer='adam',
learning_rate=1e-4,
val_freq=10,
ncpu=ncpu)
tb_logger = pl_loggers.TensorBoardLogger(
f'rt2_{args.rxn_template}_{args.featurize}_{args.radius}_{args.nbits}_{validation_option[12:]}_logs/'
)
trainer = pl.Trainer(gpus=[0], max_epochs=args.epoch, progress_bar_refresh_rate=20, logger=tb_logger)
t = time.time()
trainer.fit(mlp, train_data_iter, valid_data_iter)
print(time.time() - t, 's')
print('Finish!')
| 39.559701
| 139
| 0.528391
|
faedb7593b3fda8461621c6c98b5de97e2ad88e0
| 1,064
|
py
|
Python
|
test/integration/test_natural_language_understanding_v1.py
|
jsstylos/waston-developer-cloud-python-sdk
|
97de097b8c86622ab2f30f5386bb74321d28addf
|
[
"Apache-2.0"
] | 1,579
|
2015-10-08T14:02:17.000Z
|
2022-02-28T10:49:21.000Z
|
test/integration/test_natural_language_understanding_v1.py
|
jsstylos/waston-developer-cloud-python-sdk
|
97de097b8c86622ab2f30f5386bb74321d28addf
|
[
"Apache-2.0"
] | 749
|
2015-10-08T20:00:24.000Z
|
2022-03-21T21:33:17.000Z
|
test/integration/test_natural_language_understanding_v1.py
|
jsstylos/waston-developer-cloud-python-sdk
|
97de097b8c86622ab2f30f5386bb74321d28addf
|
[
"Apache-2.0"
] | 1,006
|
2015-10-24T06:30:58.000Z
|
2022-03-23T07:10:04.000Z
|
# coding: utf-8
from unittest import TestCase
import os
import ibm_watson
import pytest
import json
import time
from ibm_watson.natural_language_understanding_v1 import Features, EntitiesOptions, KeywordsOptions
@pytest.mark.skipif(os.getenv('NATURAL_LANGUAGE_UNDERSTANDING_APIKEY') is None,
reason='requires NATURAL_LANGUAGE_UNDERSTANDING_APIKEY')
class TestNaturalLanguageUnderstandingV1(TestCase):
def setUp(self):
self.natural_language_understanding = ibm_watson.NaturalLanguageUnderstandingV1(version='2018-03-16')
self.natural_language_understanding.set_default_headers({
'X-Watson-Learning-Opt-Out': '1',
'X-Watson-Test': '1'
})
def test_analyze(self):
response = self.natural_language_understanding.analyze(
text='Bruce Banner is the Hulk and Bruce Wayne is BATMAN! '
'Superman fears not Banner, but Wayne.',
features=Features(entities=EntitiesOptions(), keywords=KeywordsOptions())).get_result()
assert response is not None
| 39.407407
| 109
| 0.727444
|
fa29d119d7f86b87cac76097e1a7d655170eb3b1
| 776
|
py
|
Python
|
gnes/__init__.py
|
awesome-archive/gnes
|
21311f05747303d0acdc303f2ade830ef971f47d
|
[
"Apache-2.0"
] | null | null | null |
gnes/__init__.py
|
awesome-archive/gnes
|
21311f05747303d0acdc303f2ade830ef971f47d
|
[
"Apache-2.0"
] | null | null | null |
gnes/__init__.py
|
awesome-archive/gnes
|
21311f05747303d0acdc303f2ade830ef971f47d
|
[
"Apache-2.0"
] | null | null | null |
# Tencent is pleased to support the open source community by making GNES available.
#
# Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=low-comment-ratio
__version__ = '0.0.14'
| 40.842105
| 84
| 0.755155
|
f3d825b207f414ebc58ce7daba6b823336f53418
| 3,531
|
py
|
Python
|
sentence_transformers/models/T5.py
|
danielperezr88/sentence-transformers
|
56a7990c56c484e7948cf6400b54f27114bb267c
|
[
"Apache-2.0"
] | null | null | null |
sentence_transformers/models/T5.py
|
danielperezr88/sentence-transformers
|
56a7990c56c484e7948cf6400b54f27114bb267c
|
[
"Apache-2.0"
] | null | null | null |
sentence_transformers/models/T5.py
|
danielperezr88/sentence-transformers
|
56a7990c56c484e7948cf6400b54f27114bb267c
|
[
"Apache-2.0"
] | null | null | null |
from torch import nn
from transformers import T5Model, T5Tokenizer
import json
from typing import List, Dict, Optional
import os
import numpy as np
import logging
logger = logging.getLogger(__name__)
class T5(nn.Module):
"""DEPRECATED: Please use models.Transformer instead.
T5 model to generate token embeddings.
Each token is mapped to an output vector from BERT.
"""
def __init__(self, model_name_or_path: str, max_seq_length: int = 128, do_lower_case: Optional[bool] = None, task_identifier: str = 'stsb sentence1: ', model_args: Dict = {}, tokenizer_args: Dict = {}):
super(T5, self).__init__()
self.config_keys = ['max_seq_length', 'do_lower_case', 'task_identifier']
self.do_lower_case = do_lower_case
if max_seq_length > 512:
logger.warning("T5 only allows a max_seq_length of 512. Value will be set to 512")
max_seq_length = 512
self.max_seq_length = max_seq_length
if self.do_lower_case is not None:
tokenizer_args['do_lower_case'] = do_lower_case
self.t5model = T5Model.from_pretrained(model_name_or_path, **model_args)
self.tokenizer = T5Tokenizer.from_pretrained(model_name_or_path, **tokenizer_args)
self.task_identifier = task_identifier
def forward(self, features):
"""Returns token_embeddings, cls_token"""
output_states = self.t5model.encoder(input_ids=features['input_ids'], attention_mask=features['attention_mask'])
output_tokens = output_states[0]
cls_tokens = output_tokens[:, 0, :] # CLS token is first token
features.update({'token_embeddings': output_tokens, 'cls_token_embeddings': cls_tokens})
if len(output_states) > 1:
features.update({'all_layer_embeddings': output_states[1]})
return features
def get_word_embedding_dimension(self) -> int:
return self.t5model.config.hidden_size
def tokenize(self, text: str) -> List[int]:
"""
Tokenizes a text and maps tokens to token-ids
"""
return self.tokenizer.encode(self.task_identifier+text)
def get_sentence_features(self, tokens: List[int], pad_seq_length: int):
"""
Convert tokenized sentence in its embedding ids, segment ids and mask
:param tokens:
a tokenized sentence
:param pad_seq_length:
the maximal length of the sequence. Cannot be greater than self.sentence_transformer_config.max_seq_length
:return: embedding ids, segment ids and mask for the sentence
"""
pad_seq_length = min(pad_seq_length, self.max_seq_length)
return self.tokenizer.prepare_for_model(tokens, max_length=pad_seq_length, padding='max_length', return_tensors='pt', truncation=True, prepend_batch_axis=True)
def get_config_dict(self):
return {key: self.__dict__[key] for key in self.config_keys}
def save(self, output_path: str):
self.t5model.save_pretrained(output_path)
self.tokenizer.save_pretrained(output_path)
with open(os.path.join(output_path, 'sentence_T5_config.json'), 'w') as fOut:
json.dump(self.get_config_dict(), fOut, indent=2)
@staticmethod
def load(input_path: str):
with open(os.path.join(input_path, 'sentence_T5_config.json')) as fIn:
config = json.load(fIn)
return T5(model_name_or_path=input_path, **config)
| 38.380435
| 207
| 0.672897
|
1766e80508f634c0df0f5cfd10c2addefd79f650
| 2,735
|
py
|
Python
|
settings.py
|
zz7a5pe4/webvirtmgr
|
92d6bbd2d05073fd3a9535f9b7c0d41b45efb41c
|
[
"Apache-2.0"
] | 1
|
2019-05-29T00:21:56.000Z
|
2019-05-29T00:21:56.000Z
|
settings.py
|
zz7a5pe4/webvirtmgr
|
92d6bbd2d05073fd3a9535f9b7c0d41b45efb41c
|
[
"Apache-2.0"
] | null | null | null |
settings.py
|
zz7a5pe4/webvirtmgr
|
92d6bbd2d05073fd3a9535f9b7c0d41b45efb41c
|
[
"Apache-2.0"
] | null | null | null |
# Django settings for webvirtmgr project.
import os
import sys
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
if ROOT_PATH not in sys.path:
sys.path.append(ROOT_PATH)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Admin', 'root@localhost'),
)
MANAGERS = ADMINS
DB_PATH = os.path.join(ROOT_PATH, 'webvirtmgr.db')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': DB_PATH, # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
TIME_ZONE = 'Europe/Zaporozhye'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = False
USE_L10N = True
MEDIA_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'webvirtmgr/polls/media'))
MEDIA_URL = 'media/'
STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'webvirtmgr/polls/static'))
STATIC_URL = 'static/'
ADMIN_MEDIA_PREFIX = 'static/admin/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = 'fc*a)88#3de3-a@=qrb3ip=vob00nt1jcx*=a%by^*302=6x96'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'webvirtmgr.urls'
TEMPLATE_DIRS = (
os.path.abspath(os.path.join(ROOT_PATH, '..', 'webvirtmgr/polls/templates')),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'webvirtmgr.polls',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| 32.176471
| 123
| 0.632907
|
be5d998c76f0fff251398d19d59e59da810c3324
| 2,064
|
py
|
Python
|
Baselines/OLTR/Codes/algorithms/baselines/pairwise.py
|
nju-websoft/TRAVERS
|
54b58e29bebc3cd9c01a2d47efcc25b2d2b98876
|
[
"Apache-2.0"
] | 55
|
2018-08-31T13:13:08.000Z
|
2021-12-09T06:12:04.000Z
|
Baselines/OLTR/Codes/algorithms/baselines/pairwise.py
|
nju-websoft/TRAVERS
|
54b58e29bebc3cd9c01a2d47efcc25b2d2b98876
|
[
"Apache-2.0"
] | 1
|
2018-10-03T12:52:10.000Z
|
2020-09-10T12:57:57.000Z
|
Baselines/OLTR/Codes/algorithms/baselines/pairwise.py
|
nju-websoft/TRAVERS
|
54b58e29bebc3cd9c01a2d47efcc25b2d2b98876
|
[
"Apache-2.0"
] | 7
|
2018-11-28T13:34:47.000Z
|
2021-02-12T23:45:48.000Z
|
# -*- coding: utf-8 -*-
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import numpy as np
import utils.rankings as rnk
from algorithms.PDGD.pdgd import PDGD
# Pairwise Baseline from Hofmann
class Pairwise(PDGD):
def __init__(self, epsilon,
*args, **kargs):
super(Pairwise, self).__init__(*args, **kargs)
self.epsilon = epsilon
def _create_train_ranking(self, query_id, query_feat, inverted):
assert inverted == False
n_docs = query_feat.shape[0]
k = np.minimum(self.n_results, n_docs)
self.doc_scores = self.model.score(query_feat)
exploit = rnk.rank_query(self.doc_scores, inverted=False, n_results=k)
explore = np.random.permutation(np.arange(n_docs))
coinflips = np.random.uniform(size=k) > self.epsilon
self.ranking = -np.ones(k, dtype=np.int32)
exploit_i = 0
explore_i = 0
for i in range(k):
if coinflips[i]:
while exploit[exploit_i] in self.ranking:
exploit_i += 1
self.ranking[i] = exploit[exploit_i]
exploit_i += 1
else:
while explore[explore_i] in self.ranking:
explore_i += 1
self.ranking[i] = explore[explore_i]
explore_i += 1
self._last_query_feat = query_feat
return self.ranking
def _update_to_clicks(self, clicks):
n_docs = self.ranking.shape[0]
cur_k = np.minimum(n_docs, self.n_results)
included = np.ones(cur_k, dtype=np.int32)
if not clicks[-1]:
included[1:] = np.cumsum(clicks[::-1])[:0:-1]
neg_ind = np.where(np.logical_xor(clicks, included))[0]
pos_ind = np.where(clicks)[0]
n_pos = pos_ind.shape[0]
n_neg = neg_ind.shape[0]
n_pairs = n_pos*n_neg
if n_pairs == 0:
return
pos_r_ind = self.ranking[pos_ind]
neg_r_ind = self.ranking[neg_ind]
all_w = np.zeros(n_pos + n_neg)
all_w[:n_pos] = n_neg
all_w[n_pos:] = -n_pos
all_ind = np.concatenate([pos_r_ind, neg_r_ind])
self.model.update_to_documents(all_ind,
all_w)
| 28.273973
| 74
| 0.643411
|
b6dc91367413d82ed1b9a693482d683ef17943e5
| 36
|
py
|
Python
|
src/JenkinsLibrary/__init__.py
|
okgolove/robotframework-jenkins
|
11882bea09dd29875b9a3df4591c7e4c43a188e4
|
[
"MIT"
] | 8
|
2018-03-02T21:43:10.000Z
|
2020-08-25T01:37:22.000Z
|
src/JenkinsLibrary/__init__.py
|
okgolove/robotframework-jenkins
|
11882bea09dd29875b9a3df4591c7e4c43a188e4
|
[
"MIT"
] | 6
|
2018-03-06T19:18:45.000Z
|
2021-06-02T04:35:15.000Z
|
src/JenkinsLibrary/__init__.py
|
okgolove/robotframework-jenkins
|
11882bea09dd29875b9a3df4591c7e4c43a188e4
|
[
"MIT"
] | 9
|
2019-02-12T16:55:47.000Z
|
2022-01-30T02:42:49.000Z
|
from .library import JenkinsLibrary
| 18
| 35
| 0.861111
|
f9662e0fc1e34d2cc8fc5de9416b7db1ca54ef52
| 8,858
|
py
|
Python
|
app/controllers/dns/zones.py
|
white8086/SnitchDNS
|
e4fd92bd06f5ad7a9a514f11ab29817b8be5c415
|
[
"MIT"
] | 1
|
2021-04-14T12:06:40.000Z
|
2021-04-14T12:06:40.000Z
|
app/controllers/dns/zones.py
|
rosscdh/SnitchDNS
|
e4fd92bd06f5ad7a9a514f11ab29817b8be5c415
|
[
"MIT"
] | null | null | null |
app/controllers/dns/zones.py
|
rosscdh/SnitchDNS
|
e4fd92bd06f5ad7a9a514f11ab29817b8be5c415
|
[
"MIT"
] | null | null | null |
from . import bp
from flask_login import current_user, login_required
from flask import render_template, redirect, url_for, flash, request, send_file
from app.lib.base.provider import Provider
@bp.route('/', methods=['GET'])
@login_required
def index():
results_per_page = 20
provider = Provider()
zones = provider.dns_zones()
tags = provider.tags()
search = request.args.get('search', '').strip()
search_tags = request.args.getlist('tags')
page = int(request.args.get('page', 1))
if page <= 0:
page = 1
user_id = None if current_user.admin else current_user.id
page_url = 'tags=' + '&tags='.join(search_tags)
page_url += "&search={0}&page=".format(search)
return render_template(
'dns/zones/index.html',
zones=zones.get_user_zones_paginated(user_id, order_by='domain', page=page, per_page=results_per_page, search=search, tags=search_tags),
page=page,
per_page=results_per_page,
page_url=page_url,
search=search,
search_tags=search_tags,
tags=tags.all(user_id=user_id, order_by='asc', order_column='name')
)
@bp.route('/<int:dns_zone_id>/view', methods=['GET'])
@login_required
def zone_view(dns_zone_id):
provider = Provider()
zones = provider.dns_zones()
records = provider.dns_records()
if not zones.can_access(dns_zone_id, current_user.id):
flash('Access Denied', 'error')
return redirect(url_for('home.index'))
zone = zones.get(dns_zone_id)
if not zone:
flash('Zone not found', 'error')
return redirect(url_for('home.index'))
return render_template(
'dns/zones/view.html',
zone=zone,
records=records.get_zone_records(dns_zone_id, order_column='type'),
section='records',
tab='records'
)
@bp.route('/<int:dns_zone_id>/edit', methods=['GET'])
@login_required
def zone_edit(dns_zone_id):
provider = Provider()
zones = provider.dns_zones()
tags = provider.tags()
zone = None
dns_zone_id = 0 if dns_zone_id < 0 else dns_zone_id
if dns_zone_id > 0:
if not zones.can_access(dns_zone_id, current_user.id):
flash('Access Denied', 'error')
return redirect(url_for('home.index'))
zone = zones.get(dns_zone_id)
if not zone:
flash('Zone not found', 'error')
return redirect(url_for('home.index'))
username = current_user.username if zone is None else zone.username
user_id = zone.user_id if dns_zone_id > 0 else current_user.id
return render_template(
'dns/zones/edit.html',
dns_zone_id=dns_zone_id,
user_domain=zones.get_user_base_domain(username),
zone=zone,
tags=tags.all(user_id=user_id, order_column='name', order_by='asc')
)
@bp.route('/<int:dns_zone_id>/edit/save', methods=['POST'])
@login_required
def zone_edit_save(dns_zone_id):
dns_zone_id = 0 if dns_zone_id < 0 else dns_zone_id
return __zone_create() if dns_zone_id == 0 else __zone_update(dns_zone_id)
@bp.route('/<int:dns_zone_id>/delete', methods=['POST'])
@login_required
def zone_delete(dns_zone_id):
provider = Provider()
zones = provider.dns_zones()
if not zones.can_access(dns_zone_id, current_user.id):
flash('Access Denied', 'error')
return redirect(url_for('home.index'))
zone = zones.get(dns_zone_id)
if not zone:
flash('Could not get zone', 'error')
return redirect(url_for('dns.index'))
elif zone.master:
flash('You cannot delete a master zone', 'error')
return redirect(url_for('dns.index'))
# Not using the instance's .delete() attribute because we first need to delete all child records.
if not zones.delete(dns_zone_id):
flash('Could not delete zone', 'error')
return redirect(url_for('dns.index'))
flash('Zone deleted', 'success')
return redirect(url_for('dns.index'))
@bp.route('/delete', methods=['POST'])
@login_required
def zone_group_delete():
provider = Provider()
zones = provider.dns_zones()
search = request.form['search'].strip()
search_tags = request.form['tags'].strip().split(',')
zones.group_delete(current_user.id, search=search, tags=search_tags)
flash('Zone(s) deleted', 'success')
return redirect(url_for('dns.index'))
def __zone_create():
provider = Provider()
zones = provider.dns_zones()
dns_zone_id = 0
domain = request.form['domain'].strip().lower()
active = True if int(request.form.get('active', 0)) == 1 else False
catch_all = True if int(request.form.get('catch_all', 0)) == 1 else False
forwarding = True if int(request.form.get('forwarding', 0)) == 1 else False
tags = request.form.getlist('tags')
zone = zones.new(domain, active, catch_all, forwarding, current_user.id, update_old_logs=True)
if isinstance(zone, list):
for error in zone:
flash(error, 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
zone = zones.save_tags(zone, tags)
if not zone:
flash('Could not save zone tags', 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
flash('Zone created', 'success')
return redirect(url_for('dns.zone_view', dns_zone_id=zone.id))
def __zone_update(dns_zone_id):
provider = Provider()
zones = provider.dns_zones()
if not zones.can_access(dns_zone_id, current_user.id):
flash('Access Denied', 'error')
return redirect(url_for('home.index'))
zone = zones.get(dns_zone_id)
if not zone:
flash('Could not get zone', 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
domain = request.form['domain'].strip().lower() if not zone.master else zone.domain
active = True if int(request.form.get('active', 0)) == 1 else False
catch_all = True if int(request.form.get('catch_all', 0)) == 1 else False
forwarding = True if int(request.form.get('forwarding', 0)) == 1 else False
tags = request.form.getlist('tags')
if len(domain) == 0:
flash('Invalid domain', 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
if zones.has_duplicate(dns_zone_id, domain):
flash('This domain already exists.', 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
zone = zones.update(zone.id, domain, active, catch_all, forwarding, zone.user_id, master=zone.master, update_old_logs=True)
if isinstance(zone, list):
for error in zone:
flash(error, 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
zone = zones.save_tags(zone, tags)
if not zone:
flash('Could not save zone tags', 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=dns_zone_id))
flash('Zone saved', 'success')
return redirect(url_for('dns.zone_view', dns_zone_id=zone.id))
@bp.route('/create/log/<int:query_log_id>', methods=['POST'])
@login_required
def zone_create_from_log(query_log_id):
provider = Provider()
logging = provider.dns_logs()
zones = provider.dns_zones()
log = logging.get(query_log_id)
if not log:
flash('Could not retrieve log record', 'error')
return redirect(url_for('home.index'))
if log.dns_zone_id > 0:
# This means that the zone exists.
if not zones.can_access(log.dns_zone_id, current_user.id):
# This error is misleading on purpose to prevent zone enumeration. Not that it's important by meh.
flash('Could not retrieve log record', 'error')
return redirect(url_for('home.index'))
flash('Zone already exists', 'error')
return redirect(url_for('dns.zone_view', dns_zone_id=log.dns_zone_id))
zone = zones.new(log.domain, True, False, False, current_user.id, update_old_logs=True)
if isinstance(zone, list):
for error in zone:
flash(error, 'error')
return redirect(url_for('dns.zone_edit', dns_zone_id=0))
flash('Zone created', 'success')
return redirect(url_for('dns.zone_view', dns_zone_id=zone.id))
@bp.route('/export', methods=['POST'])
@login_required
def zones_export():
provider = Provider()
zones = provider.dns_zones()
search = request.form['search'].strip()
search_tags = request.form['tags'].strip().split(',')
result = zones.export(user_id=current_user.id, export_zones=True, export_records=True, compress_export=True, search=search, tags=search_tags)
if not result:
flash('Could not generate export file.', 'error')
return redirect(url_for('dns.index'))
# And download.
return send_file(result['zip'], attachment_filename='snitch_export.zip', as_attachment=True)
| 33.80916
| 145
| 0.666968
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.