hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0f47a357985cff6f4eeac24a15d22f1bc05ba0d0
| 617
|
py
|
Python
|
leapvision/solpixelator.py
|
isai-solis/sync-week-austin-2018
|
ac2477aff4b2f78a9d7b7249fd0512c5bb1d1fc0
|
[
"MIT"
] | null | null | null |
leapvision/solpixelator.py
|
isai-solis/sync-week-austin-2018
|
ac2477aff4b2f78a9d7b7249fd0512c5bb1d1fc0
|
[
"MIT"
] | null | null | null |
leapvision/solpixelator.py
|
isai-solis/sync-week-austin-2018
|
ac2477aff4b2f78a9d7b7249fd0512c5bb1d1fc0
|
[
"MIT"
] | null | null | null |
import cv2
class Pixelator(object):
'''
Represents a tracker for a single object. The algorithm used depends on the
tracker_type parameter passed to the constructor.
'''
def __init__(self, shrinkRatio = None, growRatio = None):
if not shrinkRatio:
self.shrinkRatio = 0.1
if not growRatio:
self.growRatio = 10.0
def pixelate(self, src):
resized = cv2.resize(src, (0, 0), fx = 0.05, fy = 0.05, interpolation = INTER_NEAREST)
pixelated = cv2.resize(resized, (0,0), fx = 20.0, fy = 20.0, interpolation = INTER_NEAREST)
| 29.380952
| 99
| 0.612642
|
018938023755c5b57f218b90260956c183d7ccdb
| 105,451
|
py
|
Python
|
mavsdk/generated/telemetry.py
|
huynhthehainam/MAVSDK-Python
|
b68abc53d2b08086f75859565f5600044b68a0d2
|
[
"BSD-3-Clause"
] | null | null | null |
mavsdk/generated/telemetry.py
|
huynhthehainam/MAVSDK-Python
|
b68abc53d2b08086f75859565f5600044b68a0d2
|
[
"BSD-3-Clause"
] | null | null | null |
mavsdk/generated/telemetry.py
|
huynhthehainam/MAVSDK-Python
|
b68abc53d2b08086f75859565f5600044b68a0d2
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from .._base import AsyncBase
from ..generated import telemetry_pb2, telemetry_pb2_grpc
from enum import Enum
class FixType(Enum):
"""
GPS fix type.
Values
------
NO_GPS
No GPS connected
NO_FIX
No position information, GPS is connected
FIX_2D
2D position
FIX_3D
3D position
FIX_DGPS
DGPS/SBAS aided 3D position
RTK_FLOAT
RTK float, 3D position
RTK_FIXED
RTK Fixed, 3D position
"""
NO_GPS = 0
NO_FIX = 1
FIX_2D = 2
FIX_3D = 3
FIX_DGPS = 4
RTK_FLOAT = 5
RTK_FIXED = 6
def translate_to_rpc(self, rpcFixType):
if self == FixType.NO_GPS:
return telemetry_pb2.FIX_TYPE_NO_GPS
if self == FixType.NO_FIX:
return telemetry_pb2.FIX_TYPE_NO_FIX
if self == FixType.FIX_2D:
return telemetry_pb2.FIX_TYPE_FIX_2D
if self == FixType.FIX_3D:
return telemetry_pb2.FIX_TYPE_FIX_3D
if self == FixType.FIX_DGPS:
return telemetry_pb2.FIX_TYPE_FIX_DGPS
if self == FixType.RTK_FLOAT:
return telemetry_pb2.FIX_TYPE_RTK_FLOAT
if self == FixType.RTK_FIXED:
return telemetry_pb2.FIX_TYPE_RTK_FIXED
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == telemetry_pb2.FIX_TYPE_NO_GPS:
return FixType.NO_GPS
if rpc_enum_value == telemetry_pb2.FIX_TYPE_NO_FIX:
return FixType.NO_FIX
if rpc_enum_value == telemetry_pb2.FIX_TYPE_FIX_2D:
return FixType.FIX_2D
if rpc_enum_value == telemetry_pb2.FIX_TYPE_FIX_3D:
return FixType.FIX_3D
if rpc_enum_value == telemetry_pb2.FIX_TYPE_FIX_DGPS:
return FixType.FIX_DGPS
if rpc_enum_value == telemetry_pb2.FIX_TYPE_RTK_FLOAT:
return FixType.RTK_FLOAT
if rpc_enum_value == telemetry_pb2.FIX_TYPE_RTK_FIXED:
return FixType.RTK_FIXED
def __str__(self):
return self.name
class FlightMode(Enum):
"""
Flight modes.
For more information about flight modes, check out
https://docs.px4.io/master/en/config/flight_mode.html.
Values
------
UNKNOWN
Mode not known
READY
Armed and ready to take off
TAKEOFF
Taking off
HOLD
Holding (hovering in place (or circling for fixed-wing vehicles)
MISSION
In mission
RETURN_TO_LAUNCH
Returning to launch position (then landing)
LAND
Landing
OFFBOARD
In 'offboard' mode
FOLLOW_ME
In 'follow-me' mode
MANUAL
In 'Manual' mode
ALTCTL
In 'Altitude Control' mode
POSCTL
In 'Position Control' mode
ACRO
In 'Acro' mode
STABILIZED
In 'Stabilize' mode
RATTITUDE
In 'Rattitude' mode
"""
UNKNOWN = 0
READY = 1
TAKEOFF = 2
HOLD = 3
MISSION = 4
RETURN_TO_LAUNCH = 5
LAND = 6
OFFBOARD = 7
FOLLOW_ME = 8
MANUAL = 9
ALTCTL = 10
POSCTL = 11
ACRO = 12
STABILIZED = 13
RATTITUDE = 14
def translate_to_rpc(self, rpcFlightMode):
if self == FlightMode.UNKNOWN:
return telemetry_pb2.FLIGHT_MODE_UNKNOWN
if self == FlightMode.READY:
return telemetry_pb2.FLIGHT_MODE_READY
if self == FlightMode.TAKEOFF:
return telemetry_pb2.FLIGHT_MODE_TAKEOFF
if self == FlightMode.HOLD:
return telemetry_pb2.FLIGHT_MODE_HOLD
if self == FlightMode.MISSION:
return telemetry_pb2.FLIGHT_MODE_MISSION
if self == FlightMode.RETURN_TO_LAUNCH:
return telemetry_pb2.FLIGHT_MODE_RETURN_TO_LAUNCH
if self == FlightMode.LAND:
return telemetry_pb2.FLIGHT_MODE_LAND
if self == FlightMode.OFFBOARD:
return telemetry_pb2.FLIGHT_MODE_OFFBOARD
if self == FlightMode.FOLLOW_ME:
return telemetry_pb2.FLIGHT_MODE_FOLLOW_ME
if self == FlightMode.MANUAL:
return telemetry_pb2.FLIGHT_MODE_MANUAL
if self == FlightMode.ALTCTL:
return telemetry_pb2.FLIGHT_MODE_ALTCTL
if self == FlightMode.POSCTL:
return telemetry_pb2.FLIGHT_MODE_POSCTL
if self == FlightMode.ACRO:
return telemetry_pb2.FLIGHT_MODE_ACRO
if self == FlightMode.STABILIZED:
return telemetry_pb2.FLIGHT_MODE_STABILIZED
if self == FlightMode.RATTITUDE:
return telemetry_pb2.FLIGHT_MODE_RATTITUDE
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_UNKNOWN:
return FlightMode.UNKNOWN
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_READY:
return FlightMode.READY
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_TAKEOFF:
return FlightMode.TAKEOFF
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_HOLD:
return FlightMode.HOLD
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_MISSION:
return FlightMode.MISSION
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_RETURN_TO_LAUNCH:
return FlightMode.RETURN_TO_LAUNCH
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_LAND:
return FlightMode.LAND
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_OFFBOARD:
return FlightMode.OFFBOARD
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_FOLLOW_ME:
return FlightMode.FOLLOW_ME
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_MANUAL:
return FlightMode.MANUAL
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_ALTCTL:
return FlightMode.ALTCTL
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_POSCTL:
return FlightMode.POSCTL
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_ACRO:
return FlightMode.ACRO
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_STABILIZED:
return FlightMode.STABILIZED
if rpc_enum_value == telemetry_pb2.FLIGHT_MODE_RATTITUDE:
return FlightMode.RATTITUDE
def __str__(self):
return self.name
class StatusTextType(Enum):
"""
Status types.
Values
------
INFO
Information or other
WARNING
Warning
CRITICAL
Critical
"""
INFO = 0
WARNING = 1
CRITICAL = 2
def translate_to_rpc(self, rpcStatusTextType):
if self == StatusTextType.INFO:
return telemetry_pb2.STATUS_TEXT_TYPE_INFO
if self == StatusTextType.WARNING:
return telemetry_pb2.STATUS_TEXT_TYPE_WARNING
if self == StatusTextType.CRITICAL:
return telemetry_pb2.STATUS_TEXT_TYPE_CRITICAL
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == telemetry_pb2.STATUS_TEXT_TYPE_INFO:
return StatusTextType.INFO
if rpc_enum_value == telemetry_pb2.STATUS_TEXT_TYPE_WARNING:
return StatusTextType.WARNING
if rpc_enum_value == telemetry_pb2.STATUS_TEXT_TYPE_CRITICAL:
return StatusTextType.CRITICAL
def __str__(self):
return self.name
class LandedState(Enum):
"""
Landed State enumeration.
Values
------
UNKNOWN
Landed state is unknown
ON_GROUND
The vehicle is on the ground
IN_AIR
The vehicle is in the air
TAKING_OFF
The vehicle is taking off
LANDING
The vehicle is landing
"""
UNKNOWN = 0
ON_GROUND = 1
IN_AIR = 2
TAKING_OFF = 3
LANDING = 4
def translate_to_rpc(self, rpcLandedState):
if self == LandedState.UNKNOWN:
return telemetry_pb2.LANDED_STATE_UNKNOWN
if self == LandedState.ON_GROUND:
return telemetry_pb2.LANDED_STATE_ON_GROUND
if self == LandedState.IN_AIR:
return telemetry_pb2.LANDED_STATE_IN_AIR
if self == LandedState.TAKING_OFF:
return telemetry_pb2.LANDED_STATE_TAKING_OFF
if self == LandedState.LANDING:
return telemetry_pb2.LANDED_STATE_LANDING
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == telemetry_pb2.LANDED_STATE_UNKNOWN:
return LandedState.UNKNOWN
if rpc_enum_value == telemetry_pb2.LANDED_STATE_ON_GROUND:
return LandedState.ON_GROUND
if rpc_enum_value == telemetry_pb2.LANDED_STATE_IN_AIR:
return LandedState.IN_AIR
if rpc_enum_value == telemetry_pb2.LANDED_STATE_TAKING_OFF:
return LandedState.TAKING_OFF
if rpc_enum_value == telemetry_pb2.LANDED_STATE_LANDING:
return LandedState.LANDING
def __str__(self):
return self.name
class Position:
"""
Position type in global coordinates.
Parameters
----------
latitude_deg : double
Latitude in degrees (range: -90 to +90)
longitude_deg : double
Longitude in degrees (range: -180 to +180)
absolute_altitude_m : float
Altitude AMSL (above mean sea level) in metres
relative_altitude_m : float
Altitude relative to takeoff altitude in metres
"""
def __init__(
self,
latitude_deg,
longitude_deg,
absolute_altitude_m,
relative_altitude_m):
""" Initializes the Position object """
self.latitude_deg = latitude_deg
self.longitude_deg = longitude_deg
self.absolute_altitude_m = absolute_altitude_m
self.relative_altitude_m = relative_altitude_m
def __equals__(self, to_compare):
""" Checks if two Position are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Position object
return \
(self.latitude_deg == to_compare.latitude_deg) and \
(self.longitude_deg == to_compare.longitude_deg) and \
(self.absolute_altitude_m == to_compare.absolute_altitude_m) and \
(self.relative_altitude_m == to_compare.relative_altitude_m)
except AttributeError:
return False
def __str__(self):
""" Position in string representation """
struct_repr = ", ".join([
"latitude_deg: " + str(self.latitude_deg),
"longitude_deg: " + str(self.longitude_deg),
"absolute_altitude_m: " + str(self.absolute_altitude_m),
"relative_altitude_m: " + str(self.relative_altitude_m)
])
return f"Position: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcPosition):
""" Translates a gRPC struct to the SDK equivalent """
return Position(
rpcPosition.latitude_deg,
rpcPosition.longitude_deg,
rpcPosition.absolute_altitude_m,
rpcPosition.relative_altitude_m
)
def translate_to_rpc(self, rpcPosition):
""" Translates this SDK object into its gRPC equivalent """
rpcPosition.latitude_deg = self.latitude_deg
rpcPosition.longitude_deg = self.longitude_deg
rpcPosition.absolute_altitude_m = self.absolute_altitude_m
rpcPosition.relative_altitude_m = self.relative_altitude_m
class Quaternion:
"""
Quaternion type.
All rotations and axis systems follow the right-hand rule.
The Hamilton quaternion product definition is used.
A zero-rotation quaternion is represented by (1,0,0,0).
The quaternion could also be written as w + xi + yj + zk.
For more info see: https://en.wikipedia.org/wiki/Quaternion
Parameters
----------
w : float
Quaternion entry 0, also denoted as a
x : float
Quaternion entry 1, also denoted as b
y : float
Quaternion entry 2, also denoted as c
z : float
Quaternion entry 3, also denoted as d
"""
def __init__(
self,
w,
x,
y,
z):
""" Initializes the Quaternion object """
self.w = w
self.x = x
self.y = y
self.z = z
def __equals__(self, to_compare):
""" Checks if two Quaternion are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Quaternion object
return \
(self.w == to_compare.w) and \
(self.x == to_compare.x) and \
(self.y == to_compare.y) and \
(self.z == to_compare.z)
except AttributeError:
return False
def __str__(self):
""" Quaternion in string representation """
struct_repr = ", ".join([
"w: " + str(self.w),
"x: " + str(self.x),
"y: " + str(self.y),
"z: " + str(self.z)
])
return f"Quaternion: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcQuaternion):
""" Translates a gRPC struct to the SDK equivalent """
return Quaternion(
rpcQuaternion.w,
rpcQuaternion.x,
rpcQuaternion.y,
rpcQuaternion.z
)
def translate_to_rpc(self, rpcQuaternion):
""" Translates this SDK object into its gRPC equivalent """
rpcQuaternion.w = self.w
rpcQuaternion.x = self.x
rpcQuaternion.y = self.y
rpcQuaternion.z = self.z
class EulerAngle:
"""
Euler angle type.
All rotations and axis systems follow the right-hand rule.
The Euler angles follow the convention of a 3-2-1 intrinsic Tait-Bryan rotation sequence.
For more info see https://en.wikipedia.org/wiki/Euler_angles
Parameters
----------
roll_deg : float
Roll angle in degrees, positive is banking to the right
pitch_deg : float
Pitch angle in degrees, positive is pitching nose up
yaw_deg : float
Yaw angle in degrees, positive is clock-wise seen from above
"""
def __init__(
self,
roll_deg,
pitch_deg,
yaw_deg):
""" Initializes the EulerAngle object """
self.roll_deg = roll_deg
self.pitch_deg = pitch_deg
self.yaw_deg = yaw_deg
def __equals__(self, to_compare):
""" Checks if two EulerAngle are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# EulerAngle object
return \
(self.roll_deg == to_compare.roll_deg) and \
(self.pitch_deg == to_compare.pitch_deg) and \
(self.yaw_deg == to_compare.yaw_deg)
except AttributeError:
return False
def __str__(self):
""" EulerAngle in string representation """
struct_repr = ", ".join([
"roll_deg: " + str(self.roll_deg),
"pitch_deg: " + str(self.pitch_deg),
"yaw_deg: " + str(self.yaw_deg)
])
return f"EulerAngle: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcEulerAngle):
""" Translates a gRPC struct to the SDK equivalent """
return EulerAngle(
rpcEulerAngle.roll_deg,
rpcEulerAngle.pitch_deg,
rpcEulerAngle.yaw_deg
)
def translate_to_rpc(self, rpcEulerAngle):
""" Translates this SDK object into its gRPC equivalent """
rpcEulerAngle.roll_deg = self.roll_deg
rpcEulerAngle.pitch_deg = self.pitch_deg
rpcEulerAngle.yaw_deg = self.yaw_deg
class AngularVelocityBody:
"""
Angular velocity type.
Parameters
----------
roll_rad_s : float
Roll angular velocity
pitch_rad_s : float
Pitch angular velocity
yaw_rad_s : float
Yaw angular velocity
"""
def __init__(
self,
roll_rad_s,
pitch_rad_s,
yaw_rad_s):
""" Initializes the AngularVelocityBody object """
self.roll_rad_s = roll_rad_s
self.pitch_rad_s = pitch_rad_s
self.yaw_rad_s = yaw_rad_s
def __equals__(self, to_compare):
""" Checks if two AngularVelocityBody are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# AngularVelocityBody object
return \
(self.roll_rad_s == to_compare.roll_rad_s) and \
(self.pitch_rad_s == to_compare.pitch_rad_s) and \
(self.yaw_rad_s == to_compare.yaw_rad_s)
except AttributeError:
return False
def __str__(self):
""" AngularVelocityBody in string representation """
struct_repr = ", ".join([
"roll_rad_s: " + str(self.roll_rad_s),
"pitch_rad_s: " + str(self.pitch_rad_s),
"yaw_rad_s: " + str(self.yaw_rad_s)
])
return f"AngularVelocityBody: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcAngularVelocityBody):
""" Translates a gRPC struct to the SDK equivalent """
return AngularVelocityBody(
rpcAngularVelocityBody.roll_rad_s,
rpcAngularVelocityBody.pitch_rad_s,
rpcAngularVelocityBody.yaw_rad_s
)
def translate_to_rpc(self, rpcAngularVelocityBody):
""" Translates this SDK object into its gRPC equivalent """
rpcAngularVelocityBody.roll_rad_s = self.roll_rad_s
rpcAngularVelocityBody.pitch_rad_s = self.pitch_rad_s
rpcAngularVelocityBody.yaw_rad_s = self.yaw_rad_s
class GpsInfo:
"""
GPS information type.
Parameters
----------
num_satellites : int32_t
Number of visible satellites in use
fix_type : FixType
Fix type
"""
def __init__(
self,
num_satellites,
fix_type):
""" Initializes the GpsInfo object """
self.num_satellites = num_satellites
self.fix_type = fix_type
def __equals__(self, to_compare):
""" Checks if two GpsInfo are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# GpsInfo object
return \
(self.num_satellites == to_compare.num_satellites) and \
(self.fix_type == to_compare.fix_type)
except AttributeError:
return False
def __str__(self):
""" GpsInfo in string representation """
struct_repr = ", ".join([
"num_satellites: " + str(self.num_satellites),
"fix_type: " + str(self.fix_type)
])
return f"GpsInfo: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcGpsInfo):
""" Translates a gRPC struct to the SDK equivalent """
return GpsInfo(
rpcGpsInfo.num_satellites,
FixType.translate_from_rpc(rpcGpsInfo.fix_type)
)
def translate_to_rpc(self, rpcGpsInfo):
""" Translates this SDK object into its gRPC equivalent """
rpcGpsInfo.num_satellites = self.num_satellites
self.fix_type.translate_to_rpc(rpcGpsInfo.fix_type)
class Battery:
"""
Battery type.
Parameters
----------
voltage_v : float
Voltage in volts
remaining_percent : float
Estimated battery remaining (range: 0.0 to 1.0)
"""
def __init__(
self,
voltage_v,
remaining_percent):
""" Initializes the Battery object """
self.voltage_v = voltage_v
self.remaining_percent = remaining_percent
def __equals__(self, to_compare):
""" Checks if two Battery are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Battery object
return \
(self.voltage_v == to_compare.voltage_v) and \
(self.remaining_percent == to_compare.remaining_percent)
except AttributeError:
return False
def __str__(self):
""" Battery in string representation """
struct_repr = ", ".join([
"voltage_v: " + str(self.voltage_v),
"remaining_percent: " + str(self.remaining_percent)
])
return f"Battery: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcBattery):
""" Translates a gRPC struct to the SDK equivalent """
return Battery(
rpcBattery.voltage_v,
rpcBattery.remaining_percent
)
def translate_to_rpc(self, rpcBattery):
""" Translates this SDK object into its gRPC equivalent """
rpcBattery.voltage_v = self.voltage_v
rpcBattery.remaining_percent = self.remaining_percent
class Health:
"""
Health type.
Parameters
----------
is_gyrometer_calibration_ok : bool
True if the gyrometer is calibrated
is_accelerometer_calibration_ok : bool
True if the accelerometer is calibrated
is_magnetometer_calibration_ok : bool
True if the magnetometer is calibrated
is_level_calibration_ok : bool
True if the vehicle has a valid level calibration
is_local_position_ok : bool
True if the local position estimate is good enough to fly in 'position control' mode
is_global_position_ok : bool
True if the global position estimate is good enough to fly in 'position control' mode
is_home_position_ok : bool
True if the home position has been initialized properly
"""
def __init__(
self,
is_gyrometer_calibration_ok,
is_accelerometer_calibration_ok,
is_magnetometer_calibration_ok,
is_level_calibration_ok,
is_local_position_ok,
is_global_position_ok,
is_home_position_ok):
""" Initializes the Health object """
self.is_gyrometer_calibration_ok = is_gyrometer_calibration_ok
self.is_accelerometer_calibration_ok = is_accelerometer_calibration_ok
self.is_magnetometer_calibration_ok = is_magnetometer_calibration_ok
self.is_level_calibration_ok = is_level_calibration_ok
self.is_local_position_ok = is_local_position_ok
self.is_global_position_ok = is_global_position_ok
self.is_home_position_ok = is_home_position_ok
def __equals__(self, to_compare):
""" Checks if two Health are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Health object
return \
(self.is_gyrometer_calibration_ok == to_compare.is_gyrometer_calibration_ok) and \
(self.is_accelerometer_calibration_ok == to_compare.is_accelerometer_calibration_ok) and \
(self.is_magnetometer_calibration_ok == to_compare.is_magnetometer_calibration_ok) and \
(self.is_level_calibration_ok == to_compare.is_level_calibration_ok) and \
(self.is_local_position_ok == to_compare.is_local_position_ok) and \
(self.is_global_position_ok == to_compare.is_global_position_ok) and \
(self.is_home_position_ok == to_compare.is_home_position_ok)
except AttributeError:
return False
def __str__(self):
""" Health in string representation """
struct_repr = ", ".join([
"is_gyrometer_calibration_ok: " + str(self.is_gyrometer_calibration_ok),
"is_accelerometer_calibration_ok: " + str(self.is_accelerometer_calibration_ok),
"is_magnetometer_calibration_ok: " + str(self.is_magnetometer_calibration_ok),
"is_level_calibration_ok: " + str(self.is_level_calibration_ok),
"is_local_position_ok: " + str(self.is_local_position_ok),
"is_global_position_ok: " + str(self.is_global_position_ok),
"is_home_position_ok: " + str(self.is_home_position_ok)
])
return f"Health: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcHealth):
""" Translates a gRPC struct to the SDK equivalent """
return Health(
rpcHealth.is_gyrometer_calibration_ok,
rpcHealth.is_accelerometer_calibration_ok,
rpcHealth.is_magnetometer_calibration_ok,
rpcHealth.is_level_calibration_ok,
rpcHealth.is_local_position_ok,
rpcHealth.is_global_position_ok,
rpcHealth.is_home_position_ok
)
def translate_to_rpc(self, rpcHealth):
""" Translates this SDK object into its gRPC equivalent """
rpcHealth.is_gyrometer_calibration_ok = self.is_gyrometer_calibration_ok
rpcHealth.is_accelerometer_calibration_ok = self.is_accelerometer_calibration_ok
rpcHealth.is_magnetometer_calibration_ok = self.is_magnetometer_calibration_ok
rpcHealth.is_level_calibration_ok = self.is_level_calibration_ok
rpcHealth.is_local_position_ok = self.is_local_position_ok
rpcHealth.is_global_position_ok = self.is_global_position_ok
rpcHealth.is_home_position_ok = self.is_home_position_ok
class RcStatus:
"""
Remote control status type.
Parameters
----------
was_available_once : bool
True if an RC signal has been available once
is_available : bool
True if the RC signal is available now
signal_strength_percent : float
Signal strength (range: 0 to 100)
"""
def __init__(
self,
was_available_once,
is_available,
signal_strength_percent):
""" Initializes the RcStatus object """
self.was_available_once = was_available_once
self.is_available = is_available
self.signal_strength_percent = signal_strength_percent
def __equals__(self, to_compare):
""" Checks if two RcStatus are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# RcStatus object
return \
(self.was_available_once == to_compare.was_available_once) and \
(self.is_available == to_compare.is_available) and \
(self.signal_strength_percent == to_compare.signal_strength_percent)
except AttributeError:
return False
def __str__(self):
""" RcStatus in string representation """
struct_repr = ", ".join([
"was_available_once: " + str(self.was_available_once),
"is_available: " + str(self.is_available),
"signal_strength_percent: " + str(self.signal_strength_percent)
])
return f"RcStatus: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcRcStatus):
""" Translates a gRPC struct to the SDK equivalent """
return RcStatus(
rpcRcStatus.was_available_once,
rpcRcStatus.is_available,
rpcRcStatus.signal_strength_percent
)
def translate_to_rpc(self, rpcRcStatus):
""" Translates this SDK object into its gRPC equivalent """
rpcRcStatus.was_available_once = self.was_available_once
rpcRcStatus.is_available = self.is_available
rpcRcStatus.signal_strength_percent = self.signal_strength_percent
class StatusText:
"""
StatusText information type.
Parameters
----------
type : StatusTextType
Message type
text : std::string
MAVLink status message
"""
def __init__(
self,
type,
text):
""" Initializes the StatusText object """
self.type = type
self.text = text
def __equals__(self, to_compare):
""" Checks if two StatusText are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# StatusText object
return \
(self.type == to_compare.type) and \
(self.text == to_compare.text)
except AttributeError:
return False
def __str__(self):
""" StatusText in string representation """
struct_repr = ", ".join([
"type: " + str(self.type),
"text: " + str(self.text)
])
return f"StatusText: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcStatusText):
""" Translates a gRPC struct to the SDK equivalent """
return StatusText(
StatusTextType.translate_from_rpc(rpcStatusText.type),
rpcStatusText.text
)
def translate_to_rpc(self, rpcStatusText):
""" Translates this SDK object into its gRPC equivalent """
self.type.translate_to_rpc(rpcStatusText.type)
rpcStatusText.text = self.text
class ActuatorControlTarget:
"""
Actuator control target type.
Parameters
----------
group : int32_t
An actuator control group is e.g. 'attitude' for the core flight controls, or 'gimbal' for a payload.
controls : [float]
Controls normed from -1 to 1, where 0 is neutral position.
"""
def __init__(
self,
group,
controls):
""" Initializes the ActuatorControlTarget object """
self.group = group
self.controls = controls
def __equals__(self, to_compare):
""" Checks if two ActuatorControlTarget are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# ActuatorControlTarget object
return \
(self.group == to_compare.group) and \
(self.controls == to_compare.controls)
except AttributeError:
return False
def __str__(self):
""" ActuatorControlTarget in string representation """
struct_repr = ", ".join([
"group: " + str(self.group),
"controls: " + str(self.controls)
])
return f"ActuatorControlTarget: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcActuatorControlTarget):
""" Translates a gRPC struct to the SDK equivalent """
return ActuatorControlTarget(
rpcActuatorControlTarget.group,
rpcActuatorControlTarget.controls
)
def translate_to_rpc(self, rpcActuatorControlTarget):
""" Translates this SDK object into its gRPC equivalent """
rpcActuatorControlTarget.group = self.group
for elem in self.controls:
rpcActuatorControlTarget.controls.append(elem)
class ActuatorOutputStatus:
"""
Actuator output status type.
Parameters
----------
active : uint32_t
Active outputs
actuator : [float]
Servo/motor output values
"""
def __init__(
self,
active,
actuator):
""" Initializes the ActuatorOutputStatus object """
self.active = active
self.actuator = actuator
def __equals__(self, to_compare):
""" Checks if two ActuatorOutputStatus are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# ActuatorOutputStatus object
return \
(self.active == to_compare.active) and \
(self.actuator == to_compare.actuator)
except AttributeError:
return False
def __str__(self):
""" ActuatorOutputStatus in string representation """
struct_repr = ", ".join([
"active: " + str(self.active),
"actuator: " + str(self.actuator)
])
return f"ActuatorOutputStatus: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcActuatorOutputStatus):
""" Translates a gRPC struct to the SDK equivalent """
return ActuatorOutputStatus(
rpcActuatorOutputStatus.active,
rpcActuatorOutputStatus.actuator
)
def translate_to_rpc(self, rpcActuatorOutputStatus):
""" Translates this SDK object into its gRPC equivalent """
rpcActuatorOutputStatus.active = self.active
for elem in self.actuator:
rpcActuatorOutputStatus.actuator.append(elem)
class Covariance:
"""
Covariance type.
Row-major representation of a 6x6 cross-covariance matrix
upper right triangle.
Set first to NaN if unknown.
Parameters
----------
covariance_matrix : [float]
Representation of a covariance matrix.
"""
def __init__(
self,
covariance_matrix):
""" Initializes the Covariance object """
self.covariance_matrix = covariance_matrix
def __equals__(self, to_compare):
""" Checks if two Covariance are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Covariance object
return \
(self.covariance_matrix == to_compare.covariance_matrix)
except AttributeError:
return False
def __str__(self):
""" Covariance in string representation """
struct_repr = ", ".join([
"covariance_matrix: " + str(self.covariance_matrix)
])
return f"Covariance: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcCovariance):
""" Translates a gRPC struct to the SDK equivalent """
return Covariance(
rpcCovariance.covariance_matrix
)
def translate_to_rpc(self, rpcCovariance):
""" Translates this SDK object into its gRPC equivalent """
for elem in self.covariance_matrix:
rpcCovariance.covariance_matrix.append(elem)
class VelocityBody:
"""
Velocity type, represented in the Body (X Y Z) frame and in metres/second.
Parameters
----------
x_m_s : float
Velocity in X in metres/second
y_m_s : float
Velocity in Y in metres/second
z_m_s : float
Velocity in Z in metres/second
"""
def __init__(
self,
x_m_s,
y_m_s,
z_m_s):
""" Initializes the VelocityBody object """
self.x_m_s = x_m_s
self.y_m_s = y_m_s
self.z_m_s = z_m_s
def __equals__(self, to_compare):
""" Checks if two VelocityBody are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# VelocityBody object
return \
(self.x_m_s == to_compare.x_m_s) and \
(self.y_m_s == to_compare.y_m_s) and \
(self.z_m_s == to_compare.z_m_s)
except AttributeError:
return False
def __str__(self):
""" VelocityBody in string representation """
struct_repr = ", ".join([
"x_m_s: " + str(self.x_m_s),
"y_m_s: " + str(self.y_m_s),
"z_m_s: " + str(self.z_m_s)
])
return f"VelocityBody: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcVelocityBody):
""" Translates a gRPC struct to the SDK equivalent """
return VelocityBody(
rpcVelocityBody.x_m_s,
rpcVelocityBody.y_m_s,
rpcVelocityBody.z_m_s
)
def translate_to_rpc(self, rpcVelocityBody):
""" Translates this SDK object into its gRPC equivalent """
rpcVelocityBody.x_m_s = self.x_m_s
rpcVelocityBody.y_m_s = self.y_m_s
rpcVelocityBody.z_m_s = self.z_m_s
class PositionBody:
"""
Position type, represented in the Body (X Y Z) frame
Parameters
----------
x_m : float
X Position in metres.
y_m : float
Y Position in metres.
z_m : float
Z Position in metres.
"""
def __init__(
self,
x_m,
y_m,
z_m):
""" Initializes the PositionBody object """
self.x_m = x_m
self.y_m = y_m
self.z_m = z_m
def __equals__(self, to_compare):
""" Checks if two PositionBody are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# PositionBody object
return \
(self.x_m == to_compare.x_m) and \
(self.y_m == to_compare.y_m) and \
(self.z_m == to_compare.z_m)
except AttributeError:
return False
def __str__(self):
""" PositionBody in string representation """
struct_repr = ", ".join([
"x_m: " + str(self.x_m),
"y_m: " + str(self.y_m),
"z_m: " + str(self.z_m)
])
return f"PositionBody: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcPositionBody):
""" Translates a gRPC struct to the SDK equivalent """
return PositionBody(
rpcPositionBody.x_m,
rpcPositionBody.y_m,
rpcPositionBody.z_m
)
def translate_to_rpc(self, rpcPositionBody):
""" Translates this SDK object into its gRPC equivalent """
rpcPositionBody.x_m = self.x_m
rpcPositionBody.y_m = self.y_m
rpcPositionBody.z_m = self.z_m
class Odometry:
"""
Odometry message type.
Parameters
----------
time_usec : uint64_t
Timestamp (0 to use Backend timestamp).
frame_id : MavFrame
Coordinate frame of reference for the pose data.
child_frame_id : MavFrame
Coordinate frame of reference for the velocity in free space (twist) data.
position_body : PositionBody
Position.
q : Quaternion
Quaternion components, w, x, y, z (1 0 0 0 is the null-rotation).
velocity_body : VelocityBody
Linear velocity (m/s).
angular_velocity_body : AngularVelocityBody
Angular velocity (rad/s).
pose_covariance : Covariance
Pose cross-covariance matrix.
velocity_covariance : Covariance
Velocity cross-covariance matrix.
"""
class MavFrame(Enum):
"""
Mavlink frame id
Values
------
UNDEF
Frame is undefined.
BODY_NED
Setpoint in body NED frame. This makes sense if all position control is externalized - e.g. useful to command 2 m/s^2 acceleration to the right.
VISION_NED
Odometry local coordinate frame of data given by a vision estimation system, Z-down (x: north, y: east, z: down).
ESTIM_NED
Odometry local coordinate frame of data given by an estimator running onboard the vehicle, Z-down (x: north, y: east, z: down).
"""
UNDEF = 0
BODY_NED = 1
VISION_NED = 2
ESTIM_NED = 3
def translate_to_rpc(self, rpcMavFrame):
if self == Odometry.MavFrame.UNDEF:
return telemetry_pb2.Odometry.MAV_FRAME_UNDEF
if self == Odometry.MavFrame.BODY_NED:
return telemetry_pb2.Odometry.MAV_FRAME_BODY_NED
if self == Odometry.MavFrame.VISION_NED:
return telemetry_pb2.Odometry.MAV_FRAME_VISION_NED
if self == Odometry.MavFrame.ESTIM_NED:
return telemetry_pb2.Odometry.MAV_FRAME_ESTIM_NED
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == telemetry_pb2.Odometry.MAV_FRAME_UNDEF:
return Odometry.MavFrame.UNDEF
if rpc_enum_value == telemetry_pb2.Odometry.MAV_FRAME_BODY_NED:
return Odometry.MavFrame.BODY_NED
if rpc_enum_value == telemetry_pb2.Odometry.MAV_FRAME_VISION_NED:
return Odometry.MavFrame.VISION_NED
if rpc_enum_value == telemetry_pb2.Odometry.MAV_FRAME_ESTIM_NED:
return Odometry.MavFrame.ESTIM_NED
def __str__(self):
return self.name
def __init__(
self,
time_usec,
frame_id,
child_frame_id,
position_body,
q,
velocity_body,
angular_velocity_body,
pose_covariance,
velocity_covariance):
""" Initializes the Odometry object """
self.time_usec = time_usec
self.frame_id = frame_id
self.child_frame_id = child_frame_id
self.position_body = position_body
self.q = q
self.velocity_body = velocity_body
self.angular_velocity_body = angular_velocity_body
self.pose_covariance = pose_covariance
self.velocity_covariance = velocity_covariance
def __equals__(self, to_compare):
""" Checks if two Odometry are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Odometry object
return \
(self.time_usec == to_compare.time_usec) and \
(self.frame_id == to_compare.frame_id) and \
(self.child_frame_id == to_compare.child_frame_id) and \
(self.position_body == to_compare.position_body) and \
(self.q == to_compare.q) and \
(self.velocity_body == to_compare.velocity_body) and \
(self.angular_velocity_body == to_compare.angular_velocity_body) and \
(self.pose_covariance == to_compare.pose_covariance) and \
(self.velocity_covariance == to_compare.velocity_covariance)
except AttributeError:
return False
def __str__(self):
""" Odometry in string representation """
struct_repr = ", ".join([
"time_usec: " + str(self.time_usec),
"frame_id: " + str(self.frame_id),
"child_frame_id: " + str(self.child_frame_id),
"position_body: " + str(self.position_body),
"q: " + str(self.q),
"velocity_body: " + str(self.velocity_body),
"angular_velocity_body: " + str(self.angular_velocity_body),
"pose_covariance: " + str(self.pose_covariance),
"velocity_covariance: " + str(self.velocity_covariance)
])
return f"Odometry: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcOdometry):
""" Translates a gRPC struct to the SDK equivalent """
return Odometry(
rpcOdometry.time_usec,
Odometry.MavFrame.translate_from_rpc(rpcOdometry.frame_id),
Odometry.MavFrame.translate_from_rpc(rpcOdometry.child_frame_id),
PositionBody.translate_from_rpc(rpcOdometry.position_body),
Quaternion.translate_from_rpc(rpcOdometry.q),
VelocityBody.translate_from_rpc(rpcOdometry.velocity_body),
AngularVelocityBody.translate_from_rpc(rpcOdometry.angular_velocity_body),
Covariance.translate_from_rpc(rpcOdometry.pose_covariance),
Covariance.translate_from_rpc(rpcOdometry.velocity_covariance)
)
def translate_to_rpc(self, rpcOdometry):
""" Translates this SDK object into its gRPC equivalent """
rpcOdometry.time_usec = self.time_usec
self.frame_id.translate_to_rpc(rpcOdometry.frame_id)
self.child_frame_id.translate_to_rpc(rpcOdometry.child_frame_id)
self.position_body.translate_to_rpc(rpcOdometry.position_body)
self.q.translate_to_rpc(rpcOdometry.q)
self.velocity_body.translate_to_rpc(rpcOdometry.velocity_body)
self.angular_velocity_body.translate_to_rpc(rpcOdometry.angular_velocity_body)
self.pose_covariance.translate_to_rpc(rpcOdometry.pose_covariance)
self.velocity_covariance.translate_to_rpc(rpcOdometry.velocity_covariance)
class PositionNed:
"""
PositionNed message type.
Parameters
----------
north_m : float
Position along north direction in metres
east_m : float
Position along east direction in metres
down_m : float
Position along down direction in metres
"""
def __init__(
self,
north_m,
east_m,
down_m):
""" Initializes the PositionNed object """
self.north_m = north_m
self.east_m = east_m
self.down_m = down_m
def __equals__(self, to_compare):
""" Checks if two PositionNed are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# PositionNed object
return \
(self.north_m == to_compare.north_m) and \
(self.east_m == to_compare.east_m) and \
(self.down_m == to_compare.down_m)
except AttributeError:
return False
def __str__(self):
""" PositionNed in string representation """
struct_repr = ", ".join([
"north_m: " + str(self.north_m),
"east_m: " + str(self.east_m),
"down_m: " + str(self.down_m)
])
return f"PositionNed: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcPositionNed):
""" Translates a gRPC struct to the SDK equivalent """
return PositionNed(
rpcPositionNed.north_m,
rpcPositionNed.east_m,
rpcPositionNed.down_m
)
def translate_to_rpc(self, rpcPositionNed):
""" Translates this SDK object into its gRPC equivalent """
rpcPositionNed.north_m = self.north_m
rpcPositionNed.east_m = self.east_m
rpcPositionNed.down_m = self.down_m
class VelocityNed:
"""
VelocityNed message type.
Parameters
----------
north_m_s : float
Velocity along north direction in metres per second
east_m_s : float
Velocity along east direction in metres per second
down_m_s : float
Velocity along down direction in metres per second
"""
def __init__(
self,
north_m_s,
east_m_s,
down_m_s):
""" Initializes the VelocityNed object """
self.north_m_s = north_m_s
self.east_m_s = east_m_s
self.down_m_s = down_m_s
def __equals__(self, to_compare):
""" Checks if two VelocityNed are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# VelocityNed object
return \
(self.north_m_s == to_compare.north_m_s) and \
(self.east_m_s == to_compare.east_m_s) and \
(self.down_m_s == to_compare.down_m_s)
except AttributeError:
return False
def __str__(self):
""" VelocityNed in string representation """
struct_repr = ", ".join([
"north_m_s: " + str(self.north_m_s),
"east_m_s: " + str(self.east_m_s),
"down_m_s: " + str(self.down_m_s)
])
return f"VelocityNed: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcVelocityNed):
""" Translates a gRPC struct to the SDK equivalent """
return VelocityNed(
rpcVelocityNed.north_m_s,
rpcVelocityNed.east_m_s,
rpcVelocityNed.down_m_s
)
def translate_to_rpc(self, rpcVelocityNed):
""" Translates this SDK object into its gRPC equivalent """
rpcVelocityNed.north_m_s = self.north_m_s
rpcVelocityNed.east_m_s = self.east_m_s
rpcVelocityNed.down_m_s = self.down_m_s
class PositionVelocityNed:
"""
PositionVelocityNed message type.
Parameters
----------
position : PositionNed
Position (NED)
velocity : VelocityNed
Velocity (NED)
"""
def __init__(
self,
position,
velocity):
""" Initializes the PositionVelocityNed object """
self.position = position
self.velocity = velocity
def __equals__(self, to_compare):
""" Checks if two PositionVelocityNed are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# PositionVelocityNed object
return \
(self.position == to_compare.position) and \
(self.velocity == to_compare.velocity)
except AttributeError:
return False
def __str__(self):
""" PositionVelocityNed in string representation """
struct_repr = ", ".join([
"position: " + str(self.position),
"velocity: " + str(self.velocity)
])
return f"PositionVelocityNed: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcPositionVelocityNed):
""" Translates a gRPC struct to the SDK equivalent """
return PositionVelocityNed(
PositionNed.translate_from_rpc(rpcPositionVelocityNed.position),
VelocityNed.translate_from_rpc(rpcPositionVelocityNed.velocity)
)
def translate_to_rpc(self, rpcPositionVelocityNed):
""" Translates this SDK object into its gRPC equivalent """
self.position.translate_to_rpc(rpcPositionVelocityNed.position)
self.velocity.translate_to_rpc(rpcPositionVelocityNed.velocity)
class GroundTruth:
"""
GroundTruth message type.
Parameters
----------
latitude_deg : double
Latitude in degrees (range: -90 to +90)
longitude_deg : double
Longitude in degrees (range: -180 to 180)
absolute_altitude_m : float
Altitude AMSL (above mean sea level) in metres
"""
def __init__(
self,
latitude_deg,
longitude_deg,
absolute_altitude_m):
""" Initializes the GroundTruth object """
self.latitude_deg = latitude_deg
self.longitude_deg = longitude_deg
self.absolute_altitude_m = absolute_altitude_m
def __equals__(self, to_compare):
""" Checks if two GroundTruth are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# GroundTruth object
return \
(self.latitude_deg == to_compare.latitude_deg) and \
(self.longitude_deg == to_compare.longitude_deg) and \
(self.absolute_altitude_m == to_compare.absolute_altitude_m)
except AttributeError:
return False
def __str__(self):
""" GroundTruth in string representation """
struct_repr = ", ".join([
"latitude_deg: " + str(self.latitude_deg),
"longitude_deg: " + str(self.longitude_deg),
"absolute_altitude_m: " + str(self.absolute_altitude_m)
])
return f"GroundTruth: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcGroundTruth):
""" Translates a gRPC struct to the SDK equivalent """
return GroundTruth(
rpcGroundTruth.latitude_deg,
rpcGroundTruth.longitude_deg,
rpcGroundTruth.absolute_altitude_m
)
def translate_to_rpc(self, rpcGroundTruth):
""" Translates this SDK object into its gRPC equivalent """
rpcGroundTruth.latitude_deg = self.latitude_deg
rpcGroundTruth.longitude_deg = self.longitude_deg
rpcGroundTruth.absolute_altitude_m = self.absolute_altitude_m
class FixedwingMetrics:
"""
FixedwingMetrics message type.
Parameters
----------
airspeed_m_s : float
Current indicated airspeed (IAS) in metres per second
throttle_percentage : float
Current throttle setting (0 to 100)
climb_rate_m_s : float
Current climb rate in metres per second
"""
def __init__(
self,
airspeed_m_s,
throttle_percentage,
climb_rate_m_s):
""" Initializes the FixedwingMetrics object """
self.airspeed_m_s = airspeed_m_s
self.throttle_percentage = throttle_percentage
self.climb_rate_m_s = climb_rate_m_s
def __equals__(self, to_compare):
""" Checks if two FixedwingMetrics are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# FixedwingMetrics object
return \
(self.airspeed_m_s == to_compare.airspeed_m_s) and \
(self.throttle_percentage == to_compare.throttle_percentage) and \
(self.climb_rate_m_s == to_compare.climb_rate_m_s)
except AttributeError:
return False
def __str__(self):
""" FixedwingMetrics in string representation """
struct_repr = ", ".join([
"airspeed_m_s: " + str(self.airspeed_m_s),
"throttle_percentage: " + str(self.throttle_percentage),
"climb_rate_m_s: " + str(self.climb_rate_m_s)
])
return f"FixedwingMetrics: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcFixedwingMetrics):
""" Translates a gRPC struct to the SDK equivalent """
return FixedwingMetrics(
rpcFixedwingMetrics.airspeed_m_s,
rpcFixedwingMetrics.throttle_percentage,
rpcFixedwingMetrics.climb_rate_m_s
)
def translate_to_rpc(self, rpcFixedwingMetrics):
""" Translates this SDK object into its gRPC equivalent """
rpcFixedwingMetrics.airspeed_m_s = self.airspeed_m_s
rpcFixedwingMetrics.throttle_percentage = self.throttle_percentage
rpcFixedwingMetrics.climb_rate_m_s = self.climb_rate_m_s
class AccelerationFrd:
"""
AccelerationFrd message type.
Parameters
----------
forward_m_s2 : float
Acceleration in forward direction in metres per second^2
right_m_s2 : float
Acceleration in right direction in metres per second^2
down_m_s2 : float
Acceleration in down direction in metres per second^2
"""
def __init__(
self,
forward_m_s2,
right_m_s2,
down_m_s2):
""" Initializes the AccelerationFrd object """
self.forward_m_s2 = forward_m_s2
self.right_m_s2 = right_m_s2
self.down_m_s2 = down_m_s2
def __equals__(self, to_compare):
""" Checks if two AccelerationFrd are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# AccelerationFrd object
return \
(self.forward_m_s2 == to_compare.forward_m_s2) and \
(self.right_m_s2 == to_compare.right_m_s2) and \
(self.down_m_s2 == to_compare.down_m_s2)
except AttributeError:
return False
def __str__(self):
""" AccelerationFrd in string representation """
struct_repr = ", ".join([
"forward_m_s2: " + str(self.forward_m_s2),
"right_m_s2: " + str(self.right_m_s2),
"down_m_s2: " + str(self.down_m_s2)
])
return f"AccelerationFrd: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcAccelerationFrd):
""" Translates a gRPC struct to the SDK equivalent """
return AccelerationFrd(
rpcAccelerationFrd.forward_m_s2,
rpcAccelerationFrd.right_m_s2,
rpcAccelerationFrd.down_m_s2
)
def translate_to_rpc(self, rpcAccelerationFrd):
""" Translates this SDK object into its gRPC equivalent """
rpcAccelerationFrd.forward_m_s2 = self.forward_m_s2
rpcAccelerationFrd.right_m_s2 = self.right_m_s2
rpcAccelerationFrd.down_m_s2 = self.down_m_s2
class AngularVelocityFrd:
"""
AngularVelocityFrd message type.
Parameters
----------
forward_rad_s : float
Angular velocity in forward direction in radians per second
right_rad_s : float
Angular velocity in right direction in radians per second
down_rad_s : float
Angular velocity in Down direction in radians per second
"""
def __init__(
self,
forward_rad_s,
right_rad_s,
down_rad_s):
""" Initializes the AngularVelocityFrd object """
self.forward_rad_s = forward_rad_s
self.right_rad_s = right_rad_s
self.down_rad_s = down_rad_s
def __equals__(self, to_compare):
""" Checks if two AngularVelocityFrd are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# AngularVelocityFrd object
return \
(self.forward_rad_s == to_compare.forward_rad_s) and \
(self.right_rad_s == to_compare.right_rad_s) and \
(self.down_rad_s == to_compare.down_rad_s)
except AttributeError:
return False
def __str__(self):
""" AngularVelocityFrd in string representation """
struct_repr = ", ".join([
"forward_rad_s: " + str(self.forward_rad_s),
"right_rad_s: " + str(self.right_rad_s),
"down_rad_s: " + str(self.down_rad_s)
])
return f"AngularVelocityFrd: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcAngularVelocityFrd):
""" Translates a gRPC struct to the SDK equivalent """
return AngularVelocityFrd(
rpcAngularVelocityFrd.forward_rad_s,
rpcAngularVelocityFrd.right_rad_s,
rpcAngularVelocityFrd.down_rad_s
)
def translate_to_rpc(self, rpcAngularVelocityFrd):
""" Translates this SDK object into its gRPC equivalent """
rpcAngularVelocityFrd.forward_rad_s = self.forward_rad_s
rpcAngularVelocityFrd.right_rad_s = self.right_rad_s
rpcAngularVelocityFrd.down_rad_s = self.down_rad_s
class MagneticFieldFrd:
"""
MagneticFieldFrd message type.
Parameters
----------
forward_gauss : float
Magnetic field in forward direction measured in Gauss
right_gauss : float
Magnetic field in East direction measured in Gauss
down_gauss : float
Magnetic field in Down direction measured in Gauss
"""
def __init__(
self,
forward_gauss,
right_gauss,
down_gauss):
""" Initializes the MagneticFieldFrd object """
self.forward_gauss = forward_gauss
self.right_gauss = right_gauss
self.down_gauss = down_gauss
def __equals__(self, to_compare):
""" Checks if two MagneticFieldFrd are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# MagneticFieldFrd object
return \
(self.forward_gauss == to_compare.forward_gauss) and \
(self.right_gauss == to_compare.right_gauss) and \
(self.down_gauss == to_compare.down_gauss)
except AttributeError:
return False
def __str__(self):
""" MagneticFieldFrd in string representation """
struct_repr = ", ".join([
"forward_gauss: " + str(self.forward_gauss),
"right_gauss: " + str(self.right_gauss),
"down_gauss: " + str(self.down_gauss)
])
return f"MagneticFieldFrd: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcMagneticFieldFrd):
""" Translates a gRPC struct to the SDK equivalent """
return MagneticFieldFrd(
rpcMagneticFieldFrd.forward_gauss,
rpcMagneticFieldFrd.right_gauss,
rpcMagneticFieldFrd.down_gauss
)
def translate_to_rpc(self, rpcMagneticFieldFrd):
""" Translates this SDK object into its gRPC equivalent """
rpcMagneticFieldFrd.forward_gauss = self.forward_gauss
rpcMagneticFieldFrd.right_gauss = self.right_gauss
rpcMagneticFieldFrd.down_gauss = self.down_gauss
class Imu:
"""
Imu message type.
Parameters
----------
acceleration_frd : AccelerationFrd
Acceleration
angular_velocity_frd : AngularVelocityFrd
Angular velocity
magnetic_field_frd : MagneticFieldFrd
Magnetic field
temperature_degc : float
Temperature
"""
def __init__(
self,
acceleration_frd,
angular_velocity_frd,
magnetic_field_frd,
temperature_degc):
""" Initializes the Imu object """
self.acceleration_frd = acceleration_frd
self.angular_velocity_frd = angular_velocity_frd
self.magnetic_field_frd = magnetic_field_frd
self.temperature_degc = temperature_degc
def __equals__(self, to_compare):
""" Checks if two Imu are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# Imu object
return \
(self.acceleration_frd == to_compare.acceleration_frd) and \
(self.angular_velocity_frd == to_compare.angular_velocity_frd) and \
(self.magnetic_field_frd == to_compare.magnetic_field_frd) and \
(self.temperature_degc == to_compare.temperature_degc)
except AttributeError:
return False
def __str__(self):
""" Imu in string representation """
struct_repr = ", ".join([
"acceleration_frd: " + str(self.acceleration_frd),
"angular_velocity_frd: " + str(self.angular_velocity_frd),
"magnetic_field_frd: " + str(self.magnetic_field_frd),
"temperature_degc: " + str(self.temperature_degc)
])
return f"Imu: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcImu):
""" Translates a gRPC struct to the SDK equivalent """
return Imu(
AccelerationFrd.translate_from_rpc(rpcImu.acceleration_frd),
AngularVelocityFrd.translate_from_rpc(rpcImu.angular_velocity_frd),
MagneticFieldFrd.translate_from_rpc(rpcImu.magnetic_field_frd),
rpcImu.temperature_degc
)
def translate_to_rpc(self, rpcImu):
""" Translates this SDK object into its gRPC equivalent """
self.acceleration_frd.translate_to_rpc(rpcImu.acceleration_frd)
self.angular_velocity_frd.translate_to_rpc(rpcImu.angular_velocity_frd)
self.magnetic_field_frd.translate_to_rpc(rpcImu.magnetic_field_frd)
rpcImu.temperature_degc = self.temperature_degc
class TelemetryResult:
"""
Result type.
Parameters
----------
result : Result
Result enum value
result_str : std::string
Human-readable English string describing the result
"""
class Result(Enum):
"""
Possible results returned for telemetry requests.
Values
------
UNKNOWN
Unknown result
SUCCESS
Success: the telemetry command was accepted by the vehicle
NO_SYSTEM
No system connected
CONNECTION_ERROR
Connection error
BUSY
Vehicle is busy
COMMAND_DENIED
Command refused by vehicle
TIMEOUT
Request timed out
"""
UNKNOWN = 0
SUCCESS = 1
NO_SYSTEM = 2
CONNECTION_ERROR = 3
BUSY = 4
COMMAND_DENIED = 5
TIMEOUT = 6
def translate_to_rpc(self, rpcResult):
if self == TelemetryResult.Result.UNKNOWN:
return telemetry_pb2.TelemetryResult.RESULT_UNKNOWN
if self == TelemetryResult.Result.SUCCESS:
return telemetry_pb2.TelemetryResult.RESULT_SUCCESS
if self == TelemetryResult.Result.NO_SYSTEM:
return telemetry_pb2.TelemetryResult.RESULT_NO_SYSTEM
if self == TelemetryResult.Result.CONNECTION_ERROR:
return telemetry_pb2.TelemetryResult.RESULT_CONNECTION_ERROR
if self == TelemetryResult.Result.BUSY:
return telemetry_pb2.TelemetryResult.RESULT_BUSY
if self == TelemetryResult.Result.COMMAND_DENIED:
return telemetry_pb2.TelemetryResult.RESULT_COMMAND_DENIED
if self == TelemetryResult.Result.TIMEOUT:
return telemetry_pb2.TelemetryResult.RESULT_TIMEOUT
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_UNKNOWN:
return TelemetryResult.Result.UNKNOWN
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_SUCCESS:
return TelemetryResult.Result.SUCCESS
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_NO_SYSTEM:
return TelemetryResult.Result.NO_SYSTEM
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_CONNECTION_ERROR:
return TelemetryResult.Result.CONNECTION_ERROR
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_BUSY:
return TelemetryResult.Result.BUSY
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_COMMAND_DENIED:
return TelemetryResult.Result.COMMAND_DENIED
if rpc_enum_value == telemetry_pb2.TelemetryResult.RESULT_TIMEOUT:
return TelemetryResult.Result.TIMEOUT
def __str__(self):
return self.name
def __init__(
self,
result,
result_str):
""" Initializes the TelemetryResult object """
self.result = result
self.result_str = result_str
def __equals__(self, to_compare):
""" Checks if two TelemetryResult are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# TelemetryResult object
return \
(self.result == to_compare.result) and \
(self.result_str == to_compare.result_str)
except AttributeError:
return False
def __str__(self):
""" TelemetryResult in string representation """
struct_repr = ", ".join([
"result: " + str(self.result),
"result_str: " + str(self.result_str)
])
return f"TelemetryResult: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcTelemetryResult):
""" Translates a gRPC struct to the SDK equivalent """
return TelemetryResult(
TelemetryResult.Result.translate_from_rpc(rpcTelemetryResult.result),
rpcTelemetryResult.result_str
)
def translate_to_rpc(self, rpcTelemetryResult):
""" Translates this SDK object into its gRPC equivalent """
self.result.translate_to_rpc(rpcTelemetryResult.result)
rpcTelemetryResult.result_str = self.result_str
class TelemetryError(Exception):
""" Raised when a TelemetryResult is a fail code """
def __init__(self, result, origin, *params):
self._result = result
self._origin = origin
self._params = params
def __str__(self):
return f"{self._result.result}: '{self._result.result_str}'; origin: {self._origin}; params: {self._params}"
class Telemetry(AsyncBase):
"""
Allow users to get vehicle telemetry and state information
(e.g. battery, GPS, RC connection, flight mode etc.) and set telemetry update rates.
Generated by dcsdkgen - MAVSDK Telemetry API
"""
# Plugin name
name = "Telemetry"
def _setup_stub(self, channel):
""" Setups the api stub """
self._stub = telemetry_pb2_grpc.TelemetryServiceStub(channel)
def _extract_result(self, response):
""" Returns the response status and description """
return TelemetryResult.translate_from_rpc(response.telemetry_result)
async def position(self):
"""
Subscribe to 'position' updates.
Yields
-------
position : Position
The next position
"""
request = telemetry_pb2.SubscribePositionRequest()
position_stream = self._stub.SubscribePosition(request)
try:
async for response in position_stream:
yield Position.translate_from_rpc(response.position)
finally:
position_stream.cancel()
async def home(self):
"""
Subscribe to 'home position' updates.
Yields
-------
home : Position
The next home position
"""
request = telemetry_pb2.SubscribeHomeRequest()
home_stream = self._stub.SubscribeHome(request)
try:
async for response in home_stream:
yield Position.translate_from_rpc(response.home)
finally:
home_stream.cancel()
async def in_air(self):
"""
Subscribe to in-air updates.
Yields
-------
is_in_air : bool
The next 'in-air' state
"""
request = telemetry_pb2.SubscribeInAirRequest()
in_air_stream = self._stub.SubscribeInAir(request)
try:
async for response in in_air_stream:
yield response.is_in_air
finally:
in_air_stream.cancel()
async def landed_state(self):
"""
Subscribe to landed state updates
Yields
-------
landed_state : LandedState
The next 'landed' state
"""
request = telemetry_pb2.SubscribeLandedStateRequest()
landed_state_stream = self._stub.SubscribeLandedState(request)
try:
async for response in landed_state_stream:
yield LandedState.translate_from_rpc(response.landed_state)
finally:
landed_state_stream.cancel()
async def armed(self):
"""
Subscribe to armed updates.
Yields
-------
is_armed : bool
The next 'armed' state
"""
request = telemetry_pb2.SubscribeArmedRequest()
armed_stream = self._stub.SubscribeArmed(request)
try:
async for response in armed_stream:
yield response.is_armed
finally:
armed_stream.cancel()
async def attitude_quaternion(self):
"""
Subscribe to 'attitude' updates (quaternion).
Yields
-------
attitude_quaternion : Quaternion
The next attitude (quaternion)
"""
request = telemetry_pb2.SubscribeAttitudeQuaternionRequest()
attitude_quaternion_stream = self._stub.SubscribeAttitudeQuaternion(request)
try:
async for response in attitude_quaternion_stream:
yield Quaternion.translate_from_rpc(response.attitude_quaternion)
finally:
attitude_quaternion_stream.cancel()
async def attitude_euler(self):
"""
Subscribe to 'attitude' updates (Euler).
Yields
-------
attitude_euler : EulerAngle
The next attitude (Euler)
"""
request = telemetry_pb2.SubscribeAttitudeEulerRequest()
attitude_euler_stream = self._stub.SubscribeAttitudeEuler(request)
try:
async for response in attitude_euler_stream:
yield EulerAngle.translate_from_rpc(response.attitude_euler)
finally:
attitude_euler_stream.cancel()
async def attitude_angular_velocity_body(self):
"""
Subscribe to 'attitude' updates (angular velocity)
Yields
-------
attitude_angular_velocity_body : AngularVelocityBody
The next angular velocity (rad/s)
"""
request = telemetry_pb2.SubscribeAttitudeAngularVelocityBodyRequest()
attitude_angular_velocity_body_stream = self._stub.SubscribeAttitudeAngularVelocityBody(request)
try:
async for response in attitude_angular_velocity_body_stream:
yield AngularVelocityBody.translate_from_rpc(response.attitude_angular_velocity_body)
finally:
attitude_angular_velocity_body_stream.cancel()
async def camera_attitude_quaternion(self):
"""
Subscribe to 'camera attitude' updates (quaternion).
Yields
-------
attitude_quaternion : Quaternion
The next camera attitude (quaternion)
"""
request = telemetry_pb2.SubscribeCameraAttitudeQuaternionRequest()
camera_attitude_quaternion_stream = self._stub.SubscribeCameraAttitudeQuaternion(request)
try:
async for response in camera_attitude_quaternion_stream:
yield Quaternion.translate_from_rpc(response.attitude_quaternion)
finally:
camera_attitude_quaternion_stream.cancel()
async def camera_attitude_euler(self):
"""
Subscribe to 'camera attitude' updates (Euler).
Yields
-------
attitude_euler : EulerAngle
The next camera attitude (Euler)
"""
request = telemetry_pb2.SubscribeCameraAttitudeEulerRequest()
camera_attitude_euler_stream = self._stub.SubscribeCameraAttitudeEuler(request)
try:
async for response in camera_attitude_euler_stream:
yield EulerAngle.translate_from_rpc(response.attitude_euler)
finally:
camera_attitude_euler_stream.cancel()
async def velocity_ned(self):
"""
Subscribe to 'ground speed' updates (NED).
Yields
-------
velocity_ned : VelocityNed
The next velocity (NED)
"""
request = telemetry_pb2.SubscribeVelocityNedRequest()
velocity_ned_stream = self._stub.SubscribeVelocityNed(request)
try:
async for response in velocity_ned_stream:
yield VelocityNed.translate_from_rpc(response.velocity_ned)
finally:
velocity_ned_stream.cancel()
async def gps_info(self):
"""
Subscribe to 'GPS info' updates.
Yields
-------
gps_info : GpsInfo
The next 'GPS info' state
"""
request = telemetry_pb2.SubscribeGpsInfoRequest()
gps_info_stream = self._stub.SubscribeGpsInfo(request)
try:
async for response in gps_info_stream:
yield GpsInfo.translate_from_rpc(response.gps_info)
finally:
gps_info_stream.cancel()
async def battery(self):
"""
Subscribe to 'battery' updates.
Yields
-------
battery : Battery
The next 'battery' state
"""
request = telemetry_pb2.SubscribeBatteryRequest()
battery_stream = self._stub.SubscribeBattery(request)
try:
async for response in battery_stream:
yield Battery.translate_from_rpc(response.battery)
finally:
battery_stream.cancel()
async def flight_mode(self):
"""
Subscribe to 'flight mode' updates.
Yields
-------
flight_mode : FlightMode
The next flight mode
"""
request = telemetry_pb2.SubscribeFlightModeRequest()
flight_mode_stream = self._stub.SubscribeFlightMode(request)
try:
async for response in flight_mode_stream:
yield FlightMode.translate_from_rpc(response.flight_mode)
finally:
flight_mode_stream.cancel()
async def health(self):
"""
Subscribe to 'health' updates.
Yields
-------
health : Health
The next 'health' state
"""
request = telemetry_pb2.SubscribeHealthRequest()
health_stream = self._stub.SubscribeHealth(request)
try:
async for response in health_stream:
yield Health.translate_from_rpc(response.health)
finally:
health_stream.cancel()
async def rc_status(self):
"""
Subscribe to 'RC status' updates.
Yields
-------
rc_status : RcStatus
The next RC status
"""
request = telemetry_pb2.SubscribeRcStatusRequest()
rc_status_stream = self._stub.SubscribeRcStatus(request)
try:
async for response in rc_status_stream:
yield RcStatus.translate_from_rpc(response.rc_status)
finally:
rc_status_stream.cancel()
async def status_text(self):
"""
Subscribe to 'status text' updates.
Yields
-------
status_text : StatusText
The next 'status text'
"""
request = telemetry_pb2.SubscribeStatusTextRequest()
status_text_stream = self._stub.SubscribeStatusText(request)
try:
async for response in status_text_stream:
yield StatusText.translate_from_rpc(response.status_text)
finally:
status_text_stream.cancel()
async def actuator_control_target(self):
"""
Subscribe to 'actuator control target' updates.
Yields
-------
actuator_control_target : ActuatorControlTarget
The next actuator control target
"""
request = telemetry_pb2.SubscribeActuatorControlTargetRequest()
actuator_control_target_stream = self._stub.SubscribeActuatorControlTarget(request)
try:
async for response in actuator_control_target_stream:
yield ActuatorControlTarget.translate_from_rpc(response.actuator_control_target)
finally:
actuator_control_target_stream.cancel()
async def actuator_output_status(self):
"""
Subscribe to 'actuator output status' updates.
Yields
-------
actuator_output_status : ActuatorOutputStatus
The next actuator output status
"""
request = telemetry_pb2.SubscribeActuatorOutputStatusRequest()
actuator_output_status_stream = self._stub.SubscribeActuatorOutputStatus(request)
try:
async for response in actuator_output_status_stream:
yield ActuatorOutputStatus.translate_from_rpc(response.actuator_output_status)
finally:
actuator_output_status_stream.cancel()
async def odometry(self):
"""
Subscribe to 'odometry' updates.
Yields
-------
odometry : Odometry
The next odometry status
"""
request = telemetry_pb2.SubscribeOdometryRequest()
odometry_stream = self._stub.SubscribeOdometry(request)
try:
async for response in odometry_stream:
yield Odometry.translate_from_rpc(response.odometry)
finally:
odometry_stream.cancel()
async def position_velocity_ned(self):
"""
Subscribe to 'position velocity' updates.
Yields
-------
position_velocity_ned : PositionVelocityNed
The next position and velocity status
"""
request = telemetry_pb2.SubscribePositionVelocityNedRequest()
position_velocity_ned_stream = self._stub.SubscribePositionVelocityNed(request)
try:
async for response in position_velocity_ned_stream:
yield PositionVelocityNed.translate_from_rpc(response.position_velocity_ned)
finally:
position_velocity_ned_stream.cancel()
async def ground_truth(self):
"""
Subscribe to 'ground truth' updates.
Yields
-------
ground_truth : GroundTruth
Ground truth position information available in simulation
"""
request = telemetry_pb2.SubscribeGroundTruthRequest()
ground_truth_stream = self._stub.SubscribeGroundTruth(request)
try:
async for response in ground_truth_stream:
yield GroundTruth.translate_from_rpc(response.ground_truth)
finally:
ground_truth_stream.cancel()
async def fixedwing_metrics(self):
"""
Subscribe to 'fixedwing metrics' updates.
Yields
-------
fixedwing_metrics : FixedwingMetrics
The next fixedwing metrics
"""
request = telemetry_pb2.SubscribeFixedwingMetricsRequest()
fixedwing_metrics_stream = self._stub.SubscribeFixedwingMetrics(request)
try:
async for response in fixedwing_metrics_stream:
yield FixedwingMetrics.translate_from_rpc(response.fixedwing_metrics)
finally:
fixedwing_metrics_stream.cancel()
async def imu(self):
"""
Subscribe to 'IMU' updates.
Yields
-------
imu : Imu
The next IMU status
"""
request = telemetry_pb2.SubscribeImuRequest()
imu_stream = self._stub.SubscribeImu(request)
try:
async for response in imu_stream:
yield Imu.translate_from_rpc(response.imu)
finally:
imu_stream.cancel()
async def health_all_ok(self):
"""
Subscribe to 'HealthAllOk' updates.
Yields
-------
is_health_all_ok : bool
The next 'health all ok' status
"""
request = telemetry_pb2.SubscribeHealthAllOkRequest()
health_all_ok_stream = self._stub.SubscribeHealthAllOk(request)
try:
async for response in health_all_ok_stream:
yield response.is_health_all_ok
finally:
health_all_ok_stream.cancel()
async def unix_epoch_time(self):
"""
Subscribe to 'unix epoch time' updates.
Yields
-------
time_us : uint64_t
The next 'unix epoch time' status
"""
request = telemetry_pb2.SubscribeUnixEpochTimeRequest()
unix_epoch_time_stream = self._stub.SubscribeUnixEpochTime(request)
try:
async for response in unix_epoch_time_stream:
yield response.time_us
finally:
unix_epoch_time_stream.cancel()
async def set_rate_position(self, rate_hz):
"""
Set rate to 'position' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRatePositionRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRatePosition(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_position()", rate_hz)
async def set_rate_home(self, rate_hz):
"""
Set rate to 'home position' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateHomeRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateHome(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_home()", rate_hz)
async def set_rate_in_air(self, rate_hz):
"""
Set rate to in-air updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateInAirRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateInAir(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_in_air()", rate_hz)
async def set_rate_landed_state(self, rate_hz):
"""
Set rate to landed state updates
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateLandedStateRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateLandedState(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_landed_state()", rate_hz)
async def set_rate_attitude(self, rate_hz):
"""
Set rate to 'attitude' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateAttitudeRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateAttitude(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_attitude()", rate_hz)
async def set_rate_camera_attitude(self, rate_hz):
"""
Set rate of camera attitude updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateCameraAttitudeRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateCameraAttitude(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_camera_attitude()", rate_hz)
async def set_rate_velocity_ned(self, rate_hz):
"""
Set rate to 'ground speed' updates (NED).
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateVelocityNedRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateVelocityNed(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_velocity_ned()", rate_hz)
async def set_rate_gps_info(self, rate_hz):
"""
Set rate to 'GPS info' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateGpsInfoRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateGpsInfo(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_gps_info()", rate_hz)
async def set_rate_battery(self, rate_hz):
"""
Set rate to 'battery' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateBatteryRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateBattery(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_battery()", rate_hz)
async def set_rate_rc_status(self, rate_hz):
"""
Set rate to 'RC status' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateRcStatusRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateRcStatus(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_rc_status()", rate_hz)
async def set_rate_actuator_control_target(self, rate_hz):
"""
Set rate to 'actuator control target' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateActuatorControlTargetRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateActuatorControlTarget(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_actuator_control_target()", rate_hz)
async def set_rate_actuator_output_status(self, rate_hz):
"""
Set rate to 'actuator output status' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateActuatorOutputStatusRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateActuatorOutputStatus(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_actuator_output_status()", rate_hz)
async def set_rate_odometry(self, rate_hz):
"""
Set rate to 'odometry' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateOdometryRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateOdometry(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_odometry()", rate_hz)
async def set_rate_position_velocity_ned(self, rate_hz):
"""
Set rate to 'position velocity' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRatePositionVelocityNedRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRatePositionVelocityNed(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_position_velocity_ned()", rate_hz)
async def set_rate_ground_truth(self, rate_hz):
"""
Set rate to 'ground truth' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateGroundTruthRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateGroundTruth(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_ground_truth()", rate_hz)
async def set_rate_fixedwing_metrics(self, rate_hz):
"""
Set rate to 'fixedwing metrics' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateFixedwingMetricsRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateFixedwingMetrics(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_fixedwing_metrics()", rate_hz)
async def set_rate_imu(self, rate_hz):
"""
Set rate to 'IMU' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateImuRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateImu(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_imu()", rate_hz)
async def set_rate_unix_epoch_time(self, rate_hz):
"""
Set rate to 'unix epoch time' updates.
Parameters
----------
rate_hz : double
The requested rate (in Hertz)
Raises
------
TelemetryError
If the request fails. The error contains the reason for the failure.
"""
request = telemetry_pb2.SetRateUnixEpochTimeRequest()
request.rate_hz = rate_hz
response = await self._stub.SetRateUnixEpochTime(request)
result = self._extract_result(response)
if result.result is not TelemetryResult.Result.SUCCESS:
raise TelemetryError(result, "set_rate_unix_epoch_time()", rate_hz)
| 26.935121
| 158
| 0.557178
|
9a8ff570b30caf63e530ae9382f589c51a00531a
| 11,260
|
py
|
Python
|
alf/utils/dist_utils_test.py
|
zhuboli/alf
|
b357565638c9336ebd88cecb9766a17d72d5d0c3
|
[
"Apache-2.0"
] | null | null | null |
alf/utils/dist_utils_test.py
|
zhuboli/alf
|
b357565638c9336ebd88cecb9766a17d72d5d0c3
|
[
"Apache-2.0"
] | null | null | null |
alf/utils/dist_utils_test.py
|
zhuboli/alf
|
b357565638c9336ebd88cecb9766a17d72d5d0c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2019 Horizon Robotics. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl import logging
from absl.testing import parameterized
from collections import namedtuple
import torch
import torch.distributions as td
import alf
import alf.utils.dist_utils as dist_utils
ActionDistribution = namedtuple('ActionDistribution', ['a', 'b'])
class EstimatedEntropyTest(parameterized.TestCase, alf.test.TestCase):
def setUp(self):
self.skipTest("estimate_entropy is not implemented yet")
def assertArrayAlmostEqual(self, x, y, eps):
self.assertLess(tf.reduce_max(tf.abs(x - y)), eps)
@parameterized.parameters(False, True)
def test_estimated_entropy(self, assume_reparametrization):
logging.info("assume_reparametrization=%s" % assume_reparametrization)
num_samples = 1000000
seed_stream = tfp.util.SeedStream(
seed=1, salt='test_estimated_entropy')
batch_shape = (2, )
loc = tf.random.normal(shape=batch_shape, seed=seed_stream())
scale = tf.abs(tf.random.normal(shape=batch_shape, seed=seed_stream()))
class DistributionSpecTest(alf.test.TestCase):
def test_normal(self):
dist = td.Normal(
loc=torch.tensor([1., 2.]), scale=torch.tensor([0.5, 0.25]))
spec = dist_utils.DistributionSpec.from_distribution(dist)
params1 = {
'loc': torch.tensor([0.5, 1.5]),
'scale': torch.tensor([2., 4.])
}
dist1 = spec.build_distribution(params1)
self.assertEqual(type(dist1), td.Normal)
self.assertEqual(dist1.mean, params1['loc'])
self.assertEqual(dist1.stddev, params1['scale'])
def test_categorical(self):
dist = td.Categorical(logits=torch.tensor([1., 2.]))
spec = dist_utils.DistributionSpec.from_distribution(dist)
params1 = {'logits': torch.tensor([0.5, 1.5])}
dist1 = spec.build_distribution(params1)
self.assertEqual(type(dist1), td.Categorical)
# Categorical distribution will substract logsumexp(logits) from logits.
# So dist1.logits is not equal to the supplied logits
d = dist1.logits - params1['logits']
self.assertAlmostEqual(d[0], d[1])
def test_diag_multivariate_normal(self):
dist = dist_utils.DiagMultivariateNormal(
torch.tensor([[1., 2.], [2., 2.]]),
torch.tensor([[2., 3.], [1., 1.]]))
spec = dist_utils.DistributionSpec.from_distribution(dist)
params1 = {
'loc': torch.tensor([[0.5, 1.5], [1.0, 1.0]]),
'scale': torch.tensor([[2., 4.], [2., 1.]])
}
dist1 = spec.build_distribution(params1)
self.assertEqual(dist1.event_shape, dist.event_shape)
self.assertEqual(type(dist1), dist_utils.DiagMultivariateNormal)
self.assertEqual(type(dist1.base_dist), td.Normal)
self.assertEqual(dist1.base_dist.mean, params1['loc'])
self.assertEqual(dist1.base_dist.stddev, params1['scale'])
self.assertRaises(RuntimeError, spec.build_distribution,
{'loc': torch.tensor([1., 2.])})
def test_diag_multivariate_cauchy(self):
dist = dist_utils.DiagMultivariateCauchy(
torch.tensor([[1., 2.], [2., 2.]]),
torch.tensor([[2., 3.], [1., 1.]]))
spec = dist_utils.DistributionSpec.from_distribution(dist)
params1 = {
'loc': torch.tensor([[0.5, 1.5], [1.0, 1.0]]),
'scale': torch.tensor([[2., 4.], [2., 1.]])
}
dist1 = spec.build_distribution(params1)
self.assertEqual(dist1.event_shape, dist.event_shape)
self.assertEqual(type(dist1), dist_utils.DiagMultivariateCauchy)
self.assertEqual(type(dist1.base_dist), dist_utils.StableCauchy)
self.assertEqual(dist1.base_dist.loc, params1['loc'])
self.assertEqual(dist1.base_dist.scale, params1['scale'])
self.assertRaises(RuntimeError, spec.build_distribution,
{'loc': torch.tensor([1., 2.])})
def test_transformed(self):
normal_dist = dist_utils.DiagMultivariateNormal(
torch.tensor([[1., 2.], [2., 2.]]),
torch.tensor([[2., 3.], [1., 1.]]))
transforms = [dist_utils.SigmoidTransform()]
dist = td.TransformedDistribution(
base_distribution=normal_dist, transforms=transforms)
spec = dist_utils.DistributionSpec.from_distribution(dist)
params1 = {
'loc': torch.tensor([[0.5, 1.5], [1.0, 1.0]]),
'scale': torch.tensor([[2., 4.], [2., 1.]])
}
dist1 = spec.build_distribution(params1)
self.assertEqual(type(dist1), td.TransformedDistribution)
self.assertEqual(dist1.event_shape, dist.event_shape)
self.assertEqual(dist1.transforms, transforms)
self.assertEqual(
type(dist1.base_dist), dist_utils.DiagMultivariateNormal)
self.assertEqual(type(dist1.base_dist.base_dist), td.Normal)
self.assertEqual(dist1.base_dist.base_dist.mean, params1['loc'])
self.assertEqual(dist1.base_dist.base_dist.stddev, params1['scale'])
def test_inversion(self):
x = torch.tensor([-10.0, -8.6, -2.0, 0, 2, 8.6, 10.0])
loc = torch.tensor([0.5])
scale = torch.tensor([1.5])
transforms = [
dist_utils.StableTanh(),
dist_utils.AffineTransform(loc=loc, scale=scale)
]
y = x
# forward
for transform in transforms:
y = transform(y)
# inverse
x_recovered = y
for transform in reversed(transforms):
x_recovered = transform.inv(x_recovered)
self.assertTensorEqual(x, x_recovered)
self.assertTrue(x is x_recovered)
class TestConversions(alf.test.TestCase):
def test_conversions(self):
dists = {
't':
torch.tensor([[1., 2., 4.], [3., 3., 1.]]),
'd':
dist_utils.DiagMultivariateNormal(
torch.tensor([[1., 2.], [2., 2.]]),
torch.tensor([[2., 3.], [1., 1.]]))
}
params = dist_utils.distributions_to_params(dists)
dists_spec = dist_utils.extract_spec(dists, from_dim=1)
self.assertEqual(dists_spec['t'],
alf.TensorSpec(shape=(3, ), dtype=torch.float32))
self.assertEqual(type(dists_spec['d']), dist_utils.DistributionSpec)
self.assertEqual(len(params), 2)
self.assertEqual(dists['t'], params['t'])
self.assertEqual(dists['d'].base_dist.mean, params['d']['loc'])
self.assertEqual(dists['d'].base_dist.stddev, params['d']['scale'])
dists1 = dist_utils.params_to_distributions(params, dists_spec)
self.assertEqual(len(dists1), 2)
self.assertEqual(dists1['t'], dists['t'])
self.assertEqual(type(dists1['d']), type(dists['d']))
params_spec = dist_utils.to_distribution_param_spec(dists_spec)
alf.nest.assert_same_structure(params_spec, params)
params1_spec = dist_utils.extract_spec(params)
self.assertEqual(params_spec, params1_spec)
class TestActionSamplingCategorical(alf.test.TestCase):
def test_action_sampling_categorical(self):
m = torch.distributions.categorical.Categorical(
torch.Tensor([0.25, 0.75]))
M = m.expand([10])
epsilon = 0.0
action_expected = torch.Tensor([1]).repeat(10)
action_obtained = dist_utils.epsilon_greedy_sample(M, epsilon)
self.assertTrue((action_expected == action_obtained).all())
class TestActionSamplingNormal(alf.test.TestCase):
def test_action_sampling_normal(self):
m = torch.distributions.normal.Normal(
torch.Tensor([0.3, 0.7]), torch.Tensor([1.0, 1.0]))
M = m.expand([10, 2])
epsilon = 0.0
action_expected = torch.Tensor([0.3, 0.7]).repeat(10, 1)
action_obtained = dist_utils.epsilon_greedy_sample(M, epsilon)
self.assertTrue((action_expected == action_obtained).all())
class TestActionSamplingTransformedNormal(alf.test.TestCase):
def test_action_sampling_transformed_normal(self):
def _get_transformed_normal(means, stds):
normal_dist = td.Independent(td.Normal(loc=means, scale=stds), 1)
transforms = [
dist_utils.StableTanh(),
dist_utils.AffineTransform(
loc=torch.tensor(0.), scale=torch.tensor(5.0))
]
squashed_dist = td.TransformedDistribution(
base_distribution=normal_dist, transforms=transforms)
return squashed_dist, transforms
means = torch.Tensor([0.3, 0.7])
dist, transforms = _get_transformed_normal(
means=means, stds=torch.Tensor([1.0, 1.0]))
mode = dist_utils.get_mode(dist)
transformed_mode = means
for transform in transforms:
transformed_mode = transform(transformed_mode)
self.assertTrue((transformed_mode == mode).all())
epsilon = 0.0
action_obtained = dist_utils.epsilon_greedy_sample(dist, epsilon)
self.assertTrue((transformed_mode == action_obtained).all())
class TestActionSamplingTransformedCategorical(alf.test.TestCase):
def test_action_sampling_transformed_categorical(self):
def _get_transformed_categorical(probs):
categorical_dist = td.Independent(td.Categorical(probs=probs), 1)
return categorical_dist
probs = torch.Tensor([[0.3, 0.5, 0.2], [0.6, 0.4, 0.0]])
dist = _get_transformed_categorical(probs=probs)
mode = dist_utils.get_mode(dist)
expected_mode = torch.argmax(probs, dim=1)
self.assertTensorEqual(expected_mode, mode)
epsilon = 0.0
action_obtained = dist_utils.epsilon_greedy_sample(dist, epsilon)
self.assertTensorEqual(expected_mode, action_obtained)
class TestRSampleActionDistribution(alf.test.TestCase):
def test_rsample_action_distribution(self):
c = torch.distributions.categorical.Categorical(
torch.Tensor([0.25, 0.75]))
C = c.expand([10])
self.assertRaises(AssertionError,
dist_utils.rsample_action_distribution, C)
n = torch.distributions.normal.Normal(
torch.Tensor([0.3, 0.7]), torch.Tensor([1.0, 1.0]))
N = n.expand([10, 2])
action_distribution = ActionDistribution(a=C, b=N)
self.assertRaises(AssertionError,
dist_utils.rsample_action_distribution,
action_distribution)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
alf.test.main()
| 40.503597
| 80
| 0.637567
|
d46d37ec27ed35c9a1d5822903b257152ebccfa8
| 2,945
|
py
|
Python
|
ownpaste/__init__.py
|
rafaelmartins/ownpaste
|
b9a2d8765173facfc778b4074853c7204e9f2d43
|
[
"BSD-3-Clause"
] | 3
|
2016-12-08T18:40:33.000Z
|
2019-08-29T15:21:36.000Z
|
ownpaste/__init__.py
|
rafaelmartins/ownpaste
|
b9a2d8765173facfc778b4074853c7204e9f2d43
|
[
"BSD-3-Clause"
] | null | null | null |
ownpaste/__init__.py
|
rafaelmartins/ownpaste
|
b9a2d8765173facfc778b4074853c7204e9f2d43
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
ownpaste
~~~~~~~~
Main package.
:copyright: (c) 2012-2013 by Rafael Goncalves Martins
:license: BSD, see LICENSE for more details.
"""
# ignore useless warnings about modules already imported.
import warnings
warnings.filterwarnings('ignore', r'module.*already imported', UserWarning)
from flask import Flask, _request_ctx_stack
from flask_script import Manager
from werkzeug.exceptions import default_exceptions
from ownpaste.auth import HTTPDigestAuth
from ownpaste.script import GeneratePw, DbVersionControl, DbUpgrade, \
DbDowngrade, DbVersion
from ownpaste.models import Ip, Paste, db
from ownpaste.utils import error_handler
from ownpaste.views import views
import ownpaste.version
__version__ = ownpaste.version.version
api_version = ownpaste.version.api_version
def create_app(config_file=None):
app = Flask(__name__)
auth = HTTPDigestAuth()
app.config.setdefault('PYGMENTS_STYLE', 'friendly')
app.config.setdefault('PYGMENTS_LINENOS', True)
app.config.setdefault('PER_PAGE', 20)
app.config.setdefault('SQLALCHEMY_DATABASE_URI',
'sqlite:////tmp/ownpaste.db')
app.config.setdefault('SQLALCHEMY_TRACK_MODIFICATIONS', False)
app.config.setdefault('REALM', 'ownpaste')
app.config.setdefault('USERNAME', 'ownpaste')
app.config.setdefault('PASSWORD', auth.a1('test', app.config['USERNAME'],
app.config['REALM']))
app.config.setdefault('IP_BLOCK_HITS', 10)
app.config.setdefault('IP_BLOCK_TIMEOUT', 60) # in minutes
app.config.setdefault('TIMEZONE', 'UTC')
app.config.from_envvar('OWNPASTE_SETTINGS', True)
if config_file is not None:
app.config.from_pyfile(config_file)
db.init_app(app)
# register default error handler
# based on: http://flask.pocoo.org/snippets/15/
for _exc in default_exceptions:
app.register_error_handler(_exc, error_handler)
del _exc
app.register_error_handler(401, auth.challenge)
app.register_blueprint(views)
@app.before_first_request
def before_first_request():
if (not app.debug) and app.config['PASSWORD'] == auth.a1('test'):
raise RuntimeError('You should provide a password!!')
return app
def create_script():
manager = Manager(create_app, with_default_commands=True)
manager.add_option('-c', '--config-file', dest='config_file',
required=False)
@manager.shell
def _make_context():
return dict(app=_request_ctx_stack.top.app, db=db, Paste=Paste, Ip=Ip)
manager.add_command('generatepw', GeneratePw())
manager.add_command('db_version_control', DbVersionControl())
manager.add_command('db_upgrade', DbUpgrade())
manager.add_command('db_downgrade', DbDowngrade())
manager.add_command('db_version', DbVersion())
return manager
def main():
create_script().run()
| 33.089888
| 78
| 0.703226
|
97377439f481f5bde93225112dfffbc2644f45ee
| 7,944
|
py
|
Python
|
api/views.py
|
KolibriSolutions/BepMarketplace
|
c47d252fd744cde6b927e37c34d7a103c6162be5
|
[
"BSD-3-Clause"
] | 1
|
2019-06-29T15:24:24.000Z
|
2019-06-29T15:24:24.000Z
|
api/views.py
|
KolibriSolutions/BepMarketplace
|
c47d252fd744cde6b927e37c34d7a103c6162be5
|
[
"BSD-3-Clause"
] | 2
|
2020-01-12T17:47:33.000Z
|
2020-01-12T17:47:45.000Z
|
api/views.py
|
KolibriSolutions/BepMarketplace
|
c47d252fd744cde6b927e37c34d7a103c6162be5
|
[
"BSD-3-Clause"
] | 2
|
2019-06-29T15:24:26.000Z
|
2020-01-08T15:15:03.000Z
|
# Bep Marketplace ELE
# Copyright (c) 2016-2021 Kolibri Solutions
# License: See LICENSE file or https://github.com/KolibriSolutions/BepMarketplace/blob/master/LICENSE
#
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db.models import Q
from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest
from django.shortcuts import get_object_or_404, render
from django.urls import reverse
from django.conf import settings
from index.decorators import group_required
from proposals.decorators import can_edit_project, can_downgrade_project, can_upgrade_project
from api.utils import get_status_str
from general_mail import mail_proposal_all, send_mail
from general_view import get_grouptype
from proposals.models import Proposal
from proposals.utils import get_all_proposals
from support.models import GroupAdministratorThrough, CapacityGroup
from timeline.utils import get_timeslot, get_timephase_number
from tracking.models import ProposalStatusChange
@login_required
def api_info(request):
return render(request, 'api/api.html')
@group_required('type1staff', 'type2staff', 'type2staffunverified', 'type3staff', 'type4staff')
@can_upgrade_project
def upgrade_status_api(request, pk):
"""
API call to increase the status of a proposal.
:param request:
:param pk: id of proposal
:return:
"""
obj = get_object_or_404(Proposal, pk=pk)
oldstatus = obj.Status
if oldstatus == 2:
# per default go to publish from 4, 3 is only used if it is explicitly downgraded
newstatus = 4
else:
newstatus = obj.Status + 1
obj.Status = newstatus
obj.save()
mail_proposal_all(request, obj)
notification = ProposalStatusChange()
notification.Subject = obj
notification.Actor = request.user
notification.StatusFrom = oldstatus
notification.StatusTo = newstatus
notification.save()
if obj.Status > 3:
for assistant in obj.Assistants.all():
if get_grouptype("2u") in assistant.groups.all():
verify_assistant_fn(assistant)
if obj.Status == 4:
# put the object in cache if status goes from 3->4
cache.set('proposal_{}'.format(pk), obj, settings.PROJECT_OBJECT_CACHE_DURATION)
cache.delete('listproposalsbodyhtml')
return HttpResponse(get_status_str(obj.Status))
@group_required('type1staff', 'type2staff', 'type2staffunverified', 'type3staff', 'type4staff')
@can_downgrade_project
def downgrade_status_api(request, pk, message=''):
"""
API call to decrease the status of a proposal.
:param request:
:param pk: id of proposal
:param message: message why the proposal was downgraded
:return:
"""
obj = get_object_or_404(Proposal, pk=pk)
oldstatus = obj.Status
if oldstatus == 4:
# track head downgrade to 3, owner downgrade to 4
if request.user == obj.Track.Head:
newstatus = 3
else:
newstatus = 2
else:
newstatus = oldstatus - 1
obj.Status = newstatus
obj.save()
mail_proposal_all(request, obj, message)
notification = ProposalStatusChange()
notification.Subject = obj
notification.Message = message
notification.Actor = request.user
notification.StatusFrom = oldstatus
notification.StatusTo = newstatus
notification.save()
# destroy the cache for this if oldstatus was published
if oldstatus == 4:
if 'listproposalsbodyhtml' in cache:
cache.delete('listproposalsbodyhtml')
if 'proposal_{}'.format(pk) in cache:
cache.delete('proposal_{}'.format(pk))
if 'proposaldetail{}'.format(pk) in cache:
cache.delete('proposaldetail{}'.format(pk))
return HttpResponse(get_status_str(obj.Status))
@group_required('type3staff')
def verify_assistant(request, pk):
"""
API call to verify an type2staffunverified assistant as a type2staff.
:param request:
:param pk: id of the assistant-user
:return:
"""
account = get_object_or_404(User, pk=pk)
if get_grouptype("2u") not in account.groups.all():
return HttpResponse("This account is already verified")
if verify_assistant_fn(account):
return HttpResponse("Account verified!")
else:
return HttpResponse("Verify failed!")
def verify_assistant_fn(user):
"""
Verify an unverified user and mail a confirmation.
:param user:
:return:
"""
account_group = User.groups.through.objects.get(user=user)
account_group.group = get_grouptype("2")
account_group.save()
# inform the user of verification.
send_mail("user groups changed", "email/user_groups_changed.html",
{'oldgroups': 'type2staff unverified',
'newgroups': 'type2staff',
'message': 'Your account is now verified!',
'user': user},
user.email)
return True
@login_required
def list_public_projects_api(request):
"""
Return all public proposals (=type 4) ordered by group as JSON
:param request:
:return: JSON response
"""
data = {}
for group in CapacityGroup.objects.all():
data[group.ShortName] = {
"name": group.ShortName,
"projects": [prop.id for prop in
get_all_proposals().filter(Q(Status=4) & Q(Group=group) & Q(Private__isnull=True))]
}
return JsonResponse(data)
@login_required
def list_public_projects_titles_api(request):
"""
Get all public proposals (=status 4) titles as JSON
:param request:
:return: JSON response
"""
data = {}
for prop in get_all_proposals().filter(Q(Status=4) & Q(Private__isnull=True)):
data[prop.id] = prop.Title
return JsonResponse(data)
@login_required
def detail_proposal_api(request, pk):
"""
Get detailed information of given proposal as JSON
:param request:
:param pk: id of the proposal
:return:
"""
prop = get_object_or_404(Proposal, pk=pk)
if prop.Status != 4 or prop.Private.exists():
return HttpResponse("Not allowed", status=403)
return JsonResponse({
"id": prop.id,
"detaillink": reverse("proposals:details", kwargs={'pk': prop.id}),
"title": prop.Title,
"group": prop.Group.ShortName,
"track": str(prop.Track),
"reponsible": str(prop.ResponsibleStaff),
"assistants": [str(u) for u in list(prop.Assistants.all())],
"generaldescription": prop.GeneralDescription,
"taskdescription": prop.StudentsTaskDescription,
})
@login_required
def list_published_api(request):
"""
JSON list of all published proposals with some detail info.
:param request:
:return:
"""
props = get_all_proposals().filter(Q(Status=4) & Q(Private__isnull=True))
prop_list = []
for prop in props:
prop_list.append({
"id": prop.id,
"detaillink": reverse("proposals:details", args=[prop.id]),
"title": prop.Title,
"group": prop.Group.ShortName,
"track": str(prop.Track),
"reponsible": str(prop.ResponsibleStaff),
"assistants": [str(u) for u in list(prop.Assistants.all())],
})
return JsonResponse(prop_list, safe=False)
@group_required('type3staff')
def get_group_admins(request, pk, type):
group = get_object_or_404(CapacityGroup, pk=pk)
if type == 'read':
return JsonResponse([g.User.id for g in GroupAdministratorThrough.objects.filter(Group=group, Super=False)],
safe=False)
elif type == 'write':
return JsonResponse([g.User.id for g in GroupAdministratorThrough.objects.filter(Group=group, Super=True)],
safe=False)
else:
return HttpResponseBadRequest()
| 31.275591
| 116
| 0.67145
|
19ee001917f09312bbbc23f776c126a1d65e2eda
| 5,434
|
py
|
Python
|
mmflow/models/flow_estimators/flownet.py
|
hologerry/mmflow
|
40caf064851bd95317424e31cc137c0007a2bece
|
[
"Apache-2.0"
] | 481
|
2021-11-16T07:04:23.000Z
|
2022-03-31T22:21:21.000Z
|
mmflow/models/flow_estimators/flownet.py
|
hologerry/mmflow
|
40caf064851bd95317424e31cc137c0007a2bece
|
[
"Apache-2.0"
] | 72
|
2021-11-16T12:25:55.000Z
|
2022-03-28T13:10:45.000Z
|
mmflow/models/flow_estimators/flownet.py
|
hologerry/mmflow
|
40caf064851bd95317424e31cc137c0007a2bece
|
[
"Apache-2.0"
] | 48
|
2021-11-16T06:48:46.000Z
|
2022-03-30T12:46:40.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
from typing import Dict, Optional, Sequence, Tuple, Union
from mmcv.utils import Config
from numpy import ndarray
from torch import Tensor
from ..builder import FLOW_ESTIMATORS, build_encoder
from .pwcnet import PWCNet
@FLOW_ESTIMATORS.register_module()
class FlowNetS(PWCNet):
"""FlowNetS flow estimator."""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
def forward_train(
self,
imgs: Tensor,
flow_gt: Tensor,
valid: Optional[Tensor] = None,
img_metas: Optional[Sequence[dict]] = None) -> Dict[str, Tensor]:
"""Forward function for FlowNetS when model training.
Args:
imgs (Tensor): The concatenated input images.
flow_gt (Tensor): The ground truth of optical flow.
Defaults to None.
valid (Tensor, optional): The valid mask. Defaults to None.
img_metas (Sequence[dict], optional): meta data of image to revert
the flow to original ground truth size. Defaults to None.
Returns:
Dict[str, Tensor]: The losses of output.
"""
feat = self.encoder(imgs)
return self.decoder.forward_train(
feat,
flow_gt=flow_gt,
valid=valid,
return_multi_level_flow=self.freeze_net)
def forward_test(
self,
imgs: Tensor,
img_metas: Optional[Sequence[dict]] = None
) -> Sequence[Dict[str, ndarray]]:
"""Forward function for FlowNetS when model testing.
Args:
imgs (Tensor): The concatenated input images.
img_metas (Sequence[dict], optional): meta data of image to revert
the flow to original ground truth size. Defaults to None.
Returns:
Sequence[Dict[str, ndarray]]: the batch of predicted optical flow
with the same size of images after augmentation.
"""
H, W = imgs.shape[2:]
feat = self.encoder(imgs)
return self.decoder.forward_test(
feat,
H=H,
W=W,
return_multi_level_flow=self.freeze_net,
img_metas=img_metas)
@FLOW_ESTIMATORS.register_module()
class FlowNetC(PWCNet):
"""FlowNetC flow estimator.
Args:
corr_level (str): The level to calculate the correlation.
corr_encoder (Config): The config of correaltion encoder.
"""
def __init__(self, corr_level: str, corr_encoder: Config, *args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
self.corr_level = corr_level
self.corr_encoder = build_encoder(corr_encoder)
def extract_feat(
self, imgs: Tensor) -> Tuple[Dict[str, Tensor], Dict[str, Tensor]]:
"""Extract features from images.
Args:
imgs (Tensor): The concatenated input images.
Returns:
Tuple[Dict[str, Tensor], Dict[str, Tensor]]: The feature pyramid
from the first image and the feature pyramid from feature
correlation.
"""
in_channels = self.encoder.in_channels
img1 = imgs[:, :in_channels, ...]
img2 = imgs[:, in_channels:, ...]
feat1 = self.encoder(img1)
feat2 = self.encoder(img2)
return feat1, self.corr_encoder(feat1[self.corr_level],
feat2[self.corr_level])
def forward_train(
self,
imgs: Tensor,
flow_gt: Tensor,
valid: Optional[Tensor] = None,
img_metas: Optional[Sequence[dict]] = None) -> Dict[str, Tensor]:
"""Forward function for FlowNetC when model training.
Args:
imgs (Tensor): The concatenated input images.
flow_gt (Tensor): The ground truth of optical flow.
Defaults to None.
valid (Tensor, optional): The valid mask. Defaults to None.
img_metas (Sequence[dict], optional): meta data of image to revert
the flow to original ground truth size. Defaults to None.
Returns:
Dict[str, Tensor]: The losses of output.
"""
feat1, corr_feat = self.extract_feat(imgs)
return self.decoder.forward_train(
feat1,
corr_feat,
flow_gt=flow_gt,
valid=valid,
return_multi_level_flow=self.freeze_net)
def forward_test(
self,
imgs: Tensor,
img_metas: Optional[Sequence[dict]] = None
) -> Union[Dict[str, Tensor], Sequence[ndarray]]:
"""Forward function for FlowNetC when model testing.
Args:
imgs (Tensor): The concatenated input images.
img_metas (Sequence[dict], optional): meta data of image to revert
the flow to original ground truth size. Defaults to None.
Returns:
Sequence[Dict[str, ndarray]]: the batch of predicted optical flow
with the same size of images after augmentation.
"""
H, W = imgs.shape[2:]
feat1, corr_feat = self.extract_feat(imgs)
return self.decoder.forward_test(
feat1,
corr_feat,
H=H,
W=W,
return_multi_level_flow=self.freeze_net,
img_metas=img_metas)
| 32.538922
| 79
| 0.586676
|
96d9af06f78769e7c79fe178aae4325d13461d17
| 6,564
|
py
|
Python
|
sib_api_v3_sdk/models/get_contact_campaign_stats_links.py
|
Kalesberg/APIv3-python-library
|
f65e7ea1ddd94a0c095a983dd40233fd8472c08a
|
[
"MIT"
] | null | null | null |
sib_api_v3_sdk/models/get_contact_campaign_stats_links.py
|
Kalesberg/APIv3-python-library
|
f65e7ea1ddd94a0c095a983dd40233fd8472c08a
|
[
"MIT"
] | null | null | null |
sib_api_v3_sdk/models/get_contact_campaign_stats_links.py
|
Kalesberg/APIv3-python-library
|
f65e7ea1ddd94a0c095a983dd40233fd8472c08a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
SendinBlue API
SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | # noqa: E501
OpenAPI spec version: 3.0.0
Contact: contact@sendinblue.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GetContactCampaignStatsLinks(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'count': 'int',
'event_time': 'datetime',
'ip': 'str',
'url': 'str'
}
attribute_map = {
'count': 'count',
'event_time': 'eventTime',
'ip': 'ip',
'url': 'url'
}
def __init__(self, count=None, event_time=None, ip=None, url=None): # noqa: E501
"""GetContactCampaignStatsLinks - a model defined in Swagger""" # noqa: E501
self._count = None
self._event_time = None
self._ip = None
self._url = None
self.discriminator = None
self.count = count
self.event_time = event_time
self.ip = ip
self.url = url
@property
def count(self):
"""Gets the count of this GetContactCampaignStatsLinks. # noqa: E501
Number of clicks on this link for the campaign # noqa: E501
:return: The count of this GetContactCampaignStatsLinks. # noqa: E501
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this GetContactCampaignStatsLinks.
Number of clicks on this link for the campaign # noqa: E501
:param count: The count of this GetContactCampaignStatsLinks. # noqa: E501
:type: int
"""
if count is None:
raise ValueError("Invalid value for `count`, must not be `None`") # noqa: E501
self._count = count
@property
def event_time(self):
"""Gets the event_time of this GetContactCampaignStatsLinks. # noqa: E501
UTC date-time of the event # noqa: E501
:return: The event_time of this GetContactCampaignStatsLinks. # noqa: E501
:rtype: datetime
"""
return self._event_time
@event_time.setter
def event_time(self, event_time):
"""Sets the event_time of this GetContactCampaignStatsLinks.
UTC date-time of the event # noqa: E501
:param event_time: The event_time of this GetContactCampaignStatsLinks. # noqa: E501
:type: datetime
"""
if event_time is None:
raise ValueError("Invalid value for `event_time`, must not be `None`") # noqa: E501
self._event_time = event_time
@property
def ip(self):
"""Gets the ip of this GetContactCampaignStatsLinks. # noqa: E501
IP from which the user has clicked on the link # noqa: E501
:return: The ip of this GetContactCampaignStatsLinks. # noqa: E501
:rtype: str
"""
return self._ip
@ip.setter
def ip(self, ip):
"""Sets the ip of this GetContactCampaignStatsLinks.
IP from which the user has clicked on the link # noqa: E501
:param ip: The ip of this GetContactCampaignStatsLinks. # noqa: E501
:type: str
"""
if ip is None:
raise ValueError("Invalid value for `ip`, must not be `None`") # noqa: E501
self._ip = ip
@property
def url(self):
"""Gets the url of this GetContactCampaignStatsLinks. # noqa: E501
URL of the clicked link # noqa: E501
:return: The url of this GetContactCampaignStatsLinks. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this GetContactCampaignStatsLinks.
URL of the clicked link # noqa: E501
:param url: The url of this GetContactCampaignStatsLinks. # noqa: E501
:type: str
"""
if url is None:
raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501
self._url = url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetContactCampaignStatsLinks):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 32.334975
| 820
| 0.584857
|
f8004f59dcbfffbc586f0bebdad89400a7170a75
| 2,684
|
py
|
Python
|
setup.py
|
jpeyret/pynoorm
|
d6f7e0e102bb0eb4865beff75cf671b560ebc8b2
|
[
"MIT"
] | 2
|
2016-04-14T23:11:06.000Z
|
2016-06-04T22:39:10.000Z
|
setup.py
|
jpeyret/pynoorm
|
d6f7e0e102bb0eb4865beff75cf671b560ebc8b2
|
[
"MIT"
] | null | null | null |
setup.py
|
jpeyret/pynoorm
|
d6f7e0e102bb0eb4865beff75cf671b560ebc8b2
|
[
"MIT"
] | 1
|
2022-01-16T15:19:16.000Z
|
2022-01-16T15:19:16.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import re
if sys.version_info < (2, 7):
raise Exception("PyNoORM requires Python 2.7 or higher.")
if sys.version_info >= (3,):
if sys.version_info < (3, 3):
raise Exception("PyNoORM requires Python 3.3 or higher")
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# with open('README.rst') as readme_file:
# readme = readme_file.read()
with open("HISTORY.rst") as history_file:
history = history_file.read()
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
def parse_readme(text):
"""start on reStructuredText banner and end at software declaration"""
start = re.compile("~~~~~~~", re.IGNORECASE)
end = re.compile("Free software:", re.IGNORECASE)
from_ = to_ = description = None
lines = text.split("\n")
for lineno, line in enumerate(lines):
if from_ is None and start.search(line):
from_ = lineno - 1
description = lines[from_].strip()
if to_ is None and end.search(line):
to_ = lineno
return description, "\n".join(lines[from_:to_])
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as r_file:
description, readme = parse_readme(r_file.read())
assert description.strip()
assert readme.strip()
setup(
name="pynoorm",
version="1.0.2",
description=description,
long_description=readme + "\n\n" + history,
author="JL Peyret",
author_email="jpeyret@gmail.com",
url="https://github.com/jpeyret/pynoorm",
packages=["pynoorm"],
package_dir={"pynoorm": "pynoorm"},
include_package_data=True,
install_requires=requirements,
license="MIT License",
zip_safe=False,
keywords="sql database multiplatform",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
"Operating System :: OS Independent",
],
test_suite="tests",
tests_require=test_requirements,
)
| 26.313725
| 75
| 0.63152
|
e5249f1949f6091b0db785b8738bfcf51e426cb9
| 3,312
|
py
|
Python
|
rl_agent/qnetwork.py
|
juheyne/minesweeper
|
89cc07a2ce5f8b6699eebd32d4d4197483184299
|
[
"MIT"
] | 1
|
2018-05-22T09:45:31.000Z
|
2018-05-22T09:45:31.000Z
|
rl_agent/qnetwork.py
|
juheyne/minesweeper
|
89cc07a2ce5f8b6699eebd32d4d4197483184299
|
[
"MIT"
] | null | null | null |
rl_agent/qnetwork.py
|
juheyne/minesweeper
|
89cc07a2ce5f8b6699eebd32d4d4197483184299
|
[
"MIT"
] | null | null | null |
import numpy as np
import random
import tensorflow as tf
import tensorflow.contrib.slim as slim
class QNetwork:
def __init__(self, field_size, num_actions):
# None shapes are for batch sizes
size_final_layer = 256
self.input = tf.placeholder(shape=[None, field_size, field_size, 2], dtype=tf.float32)
self.conv1 = slim.conv2d(inputs=self.input,
num_outputs=64,
kernel_size=[5, 5],
stride=[1, 1],
padding='VALID',
biases_initializer=None)
self.conv2 = slim.conv2d(inputs=self.conv1,
num_outputs=128,
kernel_size=[2, 2],
stride=[1, 1],
padding='VALID',
biases_initializer=None)
self.conv3 = slim.conv2d(inputs=self.conv2,
num_outputs=size_final_layer,
kernel_size=[3, 3],
stride=[1, 1],
padding='VALID',
biases_initializer=None)
# Take output of convolution and create fully connected layer, if understood correctly
self.stream = slim.flatten(self.conv3)
# self.stream = slim.flatten(self.input) # Test for directly learning on the input
xavier_init = tf.contrib.layers.xavier_initializer()
# First dimension is batch_size
self.W = tf.Variable(xavier_init([self.stream.get_shape().as_list()[1], num_actions]))
self.Qout = tf.matmul(self.stream, self.W)
self.predict = tf.argmax(self.Qout, 1)
# Create action layer
self.targetQ = tf.placeholder(shape=[None], dtype=tf.float32)
self.actions = tf.placeholder(shape=[None], dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions, num_actions, dtype=tf.float32)
self.Q = tf.reduce_sum(tf.multiply(self.Qout, self.actions_onehot), axis=1)
self.td_error = tf.square(self.targetQ - self.Q)
self.loss = tf.reduce_mean(self.td_error)
self.trainer = tf.train.AdamOptimizer(learning_rate=0.0001)
self.updateModel = self.trainer.minimize(self.loss)
class ExperienceBuffer:
"""Experience Buffer contains samples with (state, action, reward, next_state, done)"""
def __init__(self, buffer_size=10000):
self.buffer = []
self.buffer_size = buffer_size
def add(self, experience):
if len(self.buffer) + len(experience) >= self.buffer_size:
self.buffer[0:(len(experience)+len(self.buffer))-self.buffer_size] = []
self.buffer.extend(experience)
def sample(self, size):
return np.reshape(np.array(random.sample(self.buffer, size)), [size, 5])
def update_target_graph(tfvars, tau):
total_vars = len(tfvars)
op_holder = []
for idx, var in enumerate(tfvars[0:total_vars//2]):
op_holder.append(tfvars[idx+total_vars//2].assign((var.value()*tau) + ((1-tau)*tfvars[idx+total_vars//2].value())))
return op_holder
def update_target(op_holder, sess):
for op in op_holder:
sess.run(op)
| 41.4
| 123
| 0.580012
|
cba382737c1792c67c0a8796df9218d2306205b0
| 30,877
|
py
|
Python
|
services/core/Darksky/tests/test_darksky.py
|
craig8/volttron
|
2a954311d323effa3b79c2a53f6e8c3bb9664e1c
|
[
"Apache-2.0",
"BSD-2-Clause"
] | 1
|
2020-06-08T16:54:28.000Z
|
2020-06-08T16:54:28.000Z
|
services/core/Darksky/tests/test_darksky.py
|
craig8/volttron
|
2a954311d323effa3b79c2a53f6e8c3bb9664e1c
|
[
"Apache-2.0",
"BSD-2-Clause"
] | 8
|
2016-10-07T22:49:28.000Z
|
2022-02-23T00:57:58.000Z
|
services/core/Darksky/tests/test_darksky.py
|
craig8/volttron
|
2a954311d323effa3b79c2a53f6e8c3bb9664e1c
|
[
"Apache-2.0",
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright 2020, Battelle Memorial Institute.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government. Neither the United States Government nor the
# United States Department of Energy, nor Battelle, nor any of their
# employees, nor any jurisdiction or organization that has cooperated in the
# development of these materials, makes any warranty, express or
# implied, or assumes any legal liability or responsibility for the accuracy,
# completeness, or usefulness or any information, apparatus, product,
# software, or process disclosed, or represents that its use would not infringe
# privately owned rights. Reference herein to any specific commercial product,
# process, or service by trade name, trademark, manufacturer, or otherwise
# does not necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by
# BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
import pytest
import os
import copy
import gevent
import sqlite3
import json
import logging
from datetime import datetime, timedelta
from mock import MagicMock
from volttron.platform.agent.utils import get_aware_utc_now, format_timestamp
from volttron.platform.messaging.health import STATUS_GOOD
from volttron.platform import get_services_core
from volttron.platform.agent import utils
__version__ = "0.1.0"
utils.setup_logging()
_log = logging.getLogger(__name__)
API_KEY = os.environ.get('DARKSKY_KEY')
darksky_service = {
'weather_service': get_services_core('Darksky'),
'identity': 'platform.darksky',
'max_size_gb': None,
'api_key': API_KEY,
'poll_locations': [],
'poll_interval': 5,
'performance_mode': False,
'api_calls_limit': 100
}
darksky_perf = {
'weather_service': get_services_core('Darksky'),
'identity': 'platform.darksky_perf',
'max_size_gb': None,
'api_key': API_KEY,
'poll_locations': [],
'poll_interval': 5,
'performance_mode': True,
'api_calls_limit': 100
}
# TODO add test case for testing api_call_limit: -1
polling_service = {
'weather_service': get_services_core('DarkskyAgent'),
'max_size_gb': None,
'api_key': '902a708bcc2c20fdcd91962640ef5d1b',
'poll_interval': 5
}
# global variable. Set to skip the module
pytestmark = pytest.mark.skipif(not API_KEY, reason="No API key found. Darksky weather API key needs to be set in "
"the environment variable DARKSKY_KEY")
@pytest.fixture(scope="function")
def cleanup_cache(volttron_instance, query_agent, weather):
weather_uuid = weather[0]
identity = weather[1]
tables = ["get_current_weather", "get_hourly_forecast", "get_minutely_forecast", "get_daily_forecast"]
version = query_agent.vip.rpc.call(identity, 'get_version').get(timeout=3)
cwd = volttron_instance.volttron_home
database_file = "/".join([cwd, "agents", weather_uuid, "darkskyagent-" + version, "darkskyagent-" + version +
".agent-data", "weather.sqlite"])
_log.debug(database_file)
sqlite_connection = sqlite3.connect(database_file)
cursor = sqlite_connection.cursor()
for table in tables:
query = "DELETE FROM {};".format(table)
_log.debug(query)
cursor.execute(query)
try:
cursor.execute("DELETE FROM API_CALLS;")
except Exception as e:
print(e)
sqlite_connection.commit()
@pytest.fixture(scope="module")
def query_agent(request, volttron_instance):
# 1: Start a fake agent to query the historian agent in volttron_instance2
agent = volttron_instance.build_agent()
agent.poll_callback = MagicMock(name="poll_callback")
# subscribe to weather poll results
agent.vip.pubsub.subscribe(
peer='pubsub',
prefix="weather/poll/current",
callback=agent.poll_callback).get()
# 2: add a tear down method to stop the fake
# agent that published to message bus
def stop_agent():
print("In teardown method of query_agent")
agent.core.stop()
request.addfinalizer(stop_agent)
return agent
@pytest.fixture(scope="module", params=[darksky_service, darksky_perf])
def weather(request, volttron_instance):
print("** Setting up weather agent module **")
print("request param", request.param)
config = copy.copy(request.param)
source = config.pop('weather_service')
identity = config.pop('identity')
agent = volttron_instance.install_agent(
vip_identity=identity,
agent_dir=source,
start=False,
config_file=config)
volttron_instance.start_agent(agent)
gevent.sleep(3)
def stop_agent():
print("stopping weather service")
if volttron_instance.is_running():
volttron_instance.stop_agent(agent)
request.addfinalizer(stop_agent)
return agent, identity
@pytest.mark.parametrize("locations", [
[{"lat": 39.7555, "long": -105.2211}],
[{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}]
])
@pytest.mark.darksky
def test_success_current(volttron_instance, cleanup_cache, weather, locations):
weather_uuid = weather[0]
identity = weather[1]
version = query_agent.vip.rpc.call(identity, 'get_version').get(timeout=3)
cwd = volttron_instance.volttron_home
database_file = "/".join([cwd, "agents", weather_uuid, "darkskyagent-" + version, "darkskyagent-" + version +
".agent-data", "weather.sqlite"])
sqlite_connection = sqlite3.connect(database_file)
cursor = sqlite_connection.cursor()
api_calls_query = 'SELECT COUNT(*) FROM API_CALLS'
cursor.execute(api_calls_query)
current_api_calls = cursor.fetchone()[0]
query_data = query_agent.vip.rpc.call(identity, 'get_current_weather', locations).get(timeout=30)
if query_data[0].get("weather_error"):
error = query_data[0].get("weather_error")
if error.endswith("Remote API returned Code 403"):
pytest.skip("API key has exceeded daily call limit")
print(query_data)
cursor.execute(api_calls_query)
new_api_calls = cursor.fetchone()[0]
assert new_api_calls == current_api_calls + len(locations)
current_api_calls = new_api_calls
assert len(query_data) == len(locations)
for record in query_data:
# check format here
assert record.get("observation_time")
assert (record.get("lat") and record.get("long"))
results = record.get("weather_results")
if results:
assert isinstance(results, dict)
assert "data" not in results
assert results["attribution"] == "Powered by Dark Sky"
else:
results = record.get("weather_error")
if results.startswith("Remote API returned no data") or \
results.startswith("Remote API redirected request, but redirect failed") \
or results.startswith("Remote API returned invalid response") \
or results.startswith("API request failed with unexpected response"):
assert True
else:
assert False
services = {"get_minutely_forecast": 60,
"get_hourly_forecast": 48,
"get_daily_forecast": 7}
for service, records_amount in services.items():
query = 'SELECT COUNT(*) FROM {service}'.format(service=service)
cursor.execute(query)
num_records = cursor.fetchone()[0]
if identity == 'platform.darksky_perf':
assert num_records is 0
else:
assert num_records is records_amount*len(locations)
cache_data = query_agent.vip.rpc.call(identity, 'get_current_weather', locations).get(timeout=30)
cursor.execute(api_calls_query)
new_api_calls = cursor.fetchone()[0]
assert new_api_calls == current_api_calls
# check names returned are valid
assert len(query_data) == len(cache_data)
for x in range(0, len(cache_data)):
assert len(cache_data[x]) == len(query_data[x])
for key in query_data[x]:
assert query_data[x].get(key) == cache_data[x].get(key)
for service, records_amount in services.items():
query = 'SELECT COUNT(*) FROM {service}'.format(service=service)
cursor.execute(query)
num_records = cursor.fetchone()[0]
if identity == 'platform.darksky_perf':
assert num_records is 0
else:
assert num_records is records_amount*len(locations)
@pytest.mark.darksky
def test_calls_exceeded(volttron_instance, cleanup_cache, query_agent, weather):
weather_uuid = weather[0]
identity = weather[1]
version = query_agent.vip.rpc.call(identity, 'get_version').get(timeout=3)
cwd = volttron_instance.volttron_home
database_file = "/".join([cwd, "agents", weather_uuid, "darkskyagent-" + version, "darkskyagent-" + version +
".agent-data", "weather.sqlite"])
sqlite_connection = sqlite3.connect(database_file)
cursor = sqlite_connection.cursor()
for i in range(0, 100):
time = format_timestamp(get_aware_utc_now() + timedelta(seconds=i))
insert_query = """INSERT INTO API_CALLS (CALL_TIME) VALUES (?);"""
cursor.execute(insert_query, (time,))
sqlite_connection.commit()
locations = [{"lat": 39.7555, "long": -105.2211}]
query_data = query_agent.vip.rpc.call(identity, 'get_current_weather', locations).get(timeout=30)
assert query_data[0]['weather_error'] == 'No calls currently available for the configured API key'
assert not query_data[0].get('weather_results')
query_data = query_agent.vip.rpc.call(identity, 'get_hourly_forecast', locations).get(timeout=30)
assert query_data[0]['weather_error'] == 'No calls currently available for the configured API key'
assert not query_data[0].get('weather_results')
delete_query = "DROP TABLE IF EXISTS API_CALLS;"
cursor.execute(delete_query)
create_query = """CREATE TABLE API_CALLS (CALL_TIME TIMESTAMP NOT NULL);"""
cursor.execute(create_query)
sqlite_connection.commit()
@pytest.mark.parametrize("locations", [
["fail"],
[{"lat": 39.7555}],
()
])
@pytest.mark.darksky
def test_current_fail(weather, query_agent, locations):
identity = weather[1]
query_data = query_agent.vip.rpc.call(identity, 'get_current_weather', locations).get(timeout=30)
for record in query_data:
error = record.get("weather_error")
assert error.startswith("Invalid location format.") or error.startswith("Invalid location")
assert record.get("weather_results") is None
@pytest.mark.parametrize("locations, service", [
([{"lat": 39.7555, "long": -105.2211}], 'get_minutely_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_minutely_forecast'),
([{"lat": 39.7555, "long": -105.2211}], 'get_daily_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_daily_forecast'),
([{"lat": 39.7555, "long": -105.2211}], 'get_hourly_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_hourly_forecast'),
])
@pytest.mark.darksky
def test_success_forecast(volttron_instance, cleanup_cache, weather, query_agent, locations, service):
weather_uuid = weather[0]
identity = weather[1]
version = query_agent.vip.rpc.call(identity, 'get_version').get(timeout=3)
cwd = volttron_instance.volttron_home
database_file = "/".join([cwd, "agents", weather_uuid, "darkskyagent-" + version, "darkskyagent-" + version +
".agent-data", "weather.sqlite"])
sqlite_connection = sqlite3.connect(database_file)
cursor = sqlite_connection.cursor()
api_calls_query = 'SELECT COUNT(*) FROM API_CALLS'
cursor.execute(api_calls_query)
current_api_calls = cursor.fetchone()[0]
query_data = []
if service == "get_minutely_forecast":
query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
if service == "get_hourly_forecast":
query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
if service == "get_daily_forecast":
query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
if query_data[0].get("weather_error"):
error = query_data[0].get("weather_error")
if error.endswith("Remote API returned Code 403"):
pytest.skip("API key has exceeded daily call limit")
cursor.execute(api_calls_query)
new_api_calls = cursor.fetchone()[0]
# For daily forecast, when request time is on the same day but earlier hour as first forecast, the agent discards
# the forecast entry of current day and makes a second call for the 8th day forecast.
if service == "get_daily_forecast":
number = current_api_calls + len(locations)
assert new_api_calls == number or new_api_calls == number + 1
else:
assert new_api_calls == current_api_calls + len(locations)
current_api_calls = new_api_calls
services = {
"get_minutely_forecast": 60,
"get_hourly_forecast": 48,
"get_current_weather": 1,
"get_daily_forecast": 7}
for service_name, records_amount in services.items():
query = 'SELECT COUNT(*) FROM {service}'.format(service=service_name)
print(query)
cursor.execute(query)
num_records = cursor.fetchone()[0]
if service_name == service:
assert num_records is records_amount * len(locations)
else:
if identity == 'platform.darksky_perf':
assert num_records is 0
else:
assert num_records is records_amount * len(locations)
assert len(query_data) == len(locations)
for x in range(0, len(query_data)):
location_data = query_data[x]
assert location_data.get("lat") and location_data.get("long")
results = location_data.get("weather_results")
error = location_data.get("weather_error")
if error and not results:
if error.startswith("Remote API returned no data") \
or error.startswith("Remote API redirected request, but redirect failed") \
or error.startswith("Remote API returned invalid response") \
or error.startswith("API request failed with unexpected response"):
assert True
else:
assert False
if results:
assert location_data.get("generation_time")
for record in results:
forecast_time = utils.parse_timestamp_string(record[0])
assert isinstance(forecast_time, datetime)
if not service == "get_minutely_forecast":
assert 'summary' in record[1]
else:
assert 'summary' not in record[1]
assert record[1]["attribution"] == "Powered by Dark Sky"
cache_data = []
# default quantity
if service == 'get_minutely_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
if service == 'get_hourly_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
if service == 'get_daily_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
cursor.execute(api_calls_query)
new_api_calls = cursor.fetchone()[0]
assert new_api_calls == current_api_calls
assert len(cache_data) == len(query_data)
for x in range(0, len(cache_data)):
query_location_data = query_data[x]
print(query_location_data)
cache_location_data = cache_data[x]
print(cache_location_data)
assert cache_location_data.get("generation_time") == query_location_data.get("generation_time")
assert cache_location_data.get("lat") == query_location_data.get("lat")
assert cache_location_data.get("long") == query_location_data.get("long")
if cache_location_data.get("weather_results"):
query_weather_results = query_location_data.get("weather_results")
cache_weather_results = cache_location_data.get("weather_results")
for y in range(0, len(query_weather_results)):
result = query_weather_results[y]
cache_result = cache_weather_results[y]
query_time, oldtz = utils.process_timestamp(result[0])
query_time = utils.format_timestamp(query_time)
assert query_time == cache_result[0]
for key in cache_result[1]:
assert cache_result[1][key] == result[1][key]
else:
results = cache_location_data.get("weather_error")
if results.startswith("Remote API returned no data") \
or results.startswith("Remote API redirected request, but redirect failed") \
or results.startswith("Remote API returned invalid response") \
or results.startswith("API request failed with unexpected response"):
assert True
else:
assert False
for service_name, records_amount in services.items():
if not service_name == service:
query = 'SELECT COUNT(*) FROM {service}'.format(service=service_name)
cursor.execute(query)
num_records = cursor.fetchone()[0]
if identity == 'platform.darksky_perf':
assert num_records is 0
else:
assert num_records is records_amount*len(locations)
@pytest.mark.parametrize("locations, service", [
([{"lat": 39.7555, "long": -105.2211}], 'get_minutely_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_minutely_forecast'),
([{"lat": 39.7555, "long": -105.2211}], 'get_daily_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_daily_forecast'),
([{"lat": 39.7555, "long": -105.2211}], 'get_hourly_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_hourly_forecast'),
])
@pytest.mark.darksky
def test_less_than_default_forecast(volttron_instance, cleanup_cache, weather, query_agent, locations, service):
query_data = []
cache_data = []
identity = weather[1]
if service == 'get_minutely_forecast':
query_data = query_agent.vip.rpc.call(identity, service, locations, minutes=2).get(timeout=30)
elif service == 'get_hourly_forecast':
query_data = query_agent.vip.rpc.call(identity, service, locations, hours=2).get(timeout=30)
elif service == 'get_daily_forecast':
query_data = query_agent.vip.rpc.call(identity, service, locations, days=2).get(timeout=30)
else:
pytest.fail('invalid request type')
if query_data[0].get("weather_error"):
error = query_data[0].get("weather_error")
if error.endswith("Remote API returned Code 403"):
pytest.skip("API key has exceeded daily call limit")
assert len(query_data) == len(locations)
for record in query_data:
assert len(record['weather_results']) == 2
if service == 'get_minutely_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations, minutes=2).get(timeout=30)
elif service == 'get_hourly_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations, hours=2).get(timeout=30)
elif service == 'get_daily_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations, days=2).get(timeout=30)
assert len(cache_data) == len(query_data)
for x in range(0, len(cache_data)):
query_location_data = query_data[x]
print(query_location_data)
cache_location_data = cache_data[x]
print(cache_location_data)
assert cache_location_data.get("generation_time") == query_location_data.get("generation_time")
assert cache_location_data.get("lat") == query_location_data.get("lat")
assert cache_location_data.get("long") == query_location_data.get("long")
if cache_location_data.get("weather_results"):
query_weather_results = query_location_data.get("weather_results")
cache_weather_results = cache_location_data.get("weather_results")
for y in range(0, len(query_weather_results)):
result = query_weather_results[y]
cache_result = cache_weather_results[y]
query_time, oldtz = utils.process_timestamp(result[0])
query_time = utils.format_timestamp(query_time)
assert query_time == cache_result[0]
for key in cache_result[1]:
assert cache_result[1][key] == result[1][key]
@pytest.mark.parametrize("locations, service", [
([{"lat": 39.7555, "long": -105.2211}], 'get_minutely_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_minutely_forecast'),
([{"lat": 39.7555, "long": -105.2211}], 'get_daily_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_daily_forecast'),
([{"lat": 39.7555, "long": -105.2211}], 'get_hourly_forecast'),
([{"lat": 39.7555, "long": -105.2211}, {"lat": 46.2804, "long": -119.2752}],
'get_hourly_forecast'),
])
@pytest.mark.darksky
def test_more_than_default_forecast(volttron_instance, cleanup_cache, weather, query_agent, locations, service):
identity = weather[1]
big_request = 0
query_data = []
cache_data = []
if service == 'get_minutely_forecast':
big_request = 61
query_data = query_agent.vip.rpc.call(identity, service, locations, minutes=big_request).get(timeout=30)
if big_request > 60:
big_request = 60 # dark sky provides 60 minutes max.
elif service == 'get_hourly_forecast':
big_request = 50
query_data = query_agent.vip.rpc.call(identity, service, locations, hours=big_request).get(timeout=30)
elif service == 'get_daily_forecast':
big_request = 9
query_data = query_agent.vip.rpc.call(identity, service, locations, days=big_request).get(timeout=30)
else:
pytest.fail('invalid request type')
if query_data[0].get("weather_error"):
error = query_data[0].get("weather_error")
if error.endswith("Remote API returned Code 403"):
pytest.skip("API key has exceeded daily call limit")
assert len(query_data) == len(locations)
for record in query_data:
assert len(record['weather_results']) == big_request
if service == 'get_minutely_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations, minutes=big_request).get(timeout=30)
elif service == 'get_hourly_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations, hours=big_request).get(timeout=30)
elif service == 'get_daily_forecast':
cache_data = query_agent.vip.rpc.call(identity, service, locations, days=big_request).get(timeout=30)
assert len(cache_data) == len(query_data)
print("Query data: \n {}".format(query_data))
print("Cache data: \n {}".format(cache_data))
# TODO: verify that we get the right forecast times
for x in range(0, len(cache_data)):
query_location_data = query_data[x]
cache_location_data = cache_data[x]
assert cache_location_data.get("generation_time") == query_location_data.get("generation_time")
assert cache_location_data.get("lat") == query_location_data.get("lat")
assert cache_location_data.get("long") == query_location_data.get("long")
if cache_location_data.get("weather_results"):
query_weather_results = query_location_data.get("weather_results")
cache_weather_results = cache_location_data.get("weather_results")
for y in range(0, len(query_weather_results)):
result = query_weather_results[y]
cache_result = cache_weather_results[y]
query_time, oldtz = utils.process_timestamp(result[0])
query_time = utils.format_timestamp(query_time)
assert query_time == cache_result[0]
for key in cache_result[1]:
assert cache_result[1][key] == result[1][key]
@pytest.mark.parametrize("locations, service", [
(["fail"], 'get_minutely_forecast'),
([{"lat": 39.7555}], 'get_minutely_forecast'),
([], 'get_minutely_forecast'),
(["fail"], 'get_hourly_forecast'),
([{"lat": 39.7555}], 'get_hourly_forecast'),
([], 'get_hourly_forecast'),
(["fail"], 'get_daily_forecast'),
([{"lat": 39.7555}], 'get_daily_forecast'),
([], 'get_daily_forecast')
])
@pytest.mark.darksky
def test_forecast_fail(weather, query_agent, locations, service):
identity = weather[1]
query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30)
for record in query_data:
error = record.get("weather_error")
if error.startswith("Invalid location format."):
assert error.startswith("Invalid location format.")
elif error.startswith("Invalid location"):
assert error.startswith("Invalid location")
else:
assert False
assert record.get("weather_results") is None
@pytest.mark.darksky
@pytest.mark.parametrize('config, result_topics', [
({'poll_locations': [{"lat": 39.7555, "long": -105.2211},
{"lat": 46.2804, "long": 119.2752}],
'poll_interval': 5,
'api_key': API_KEY
},
['weather/poll/current/all']),
({'poll_locations': [{"lat": 39.7555, "long": -105.2211},
{"lat": 46.2804, "long": 119.2752}],
'poll_interval': 5,
'api_key': API_KEY,
'poll_topic_suffixes': ['test1', 'test2']
},
['weather/poll/current/test1', 'weather/poll/current/test2'])
])
def test_polling_locations_valid_config(volttron_instance, query_agent, config, result_topics):
agent_uuid = None
query_agent.poll_callback.reset_mock()
try:
agent_uuid = volttron_instance.install_agent(
vip_identity="poll.weather",
agent_dir=get_services_core("Darksky"),
start=False,
config_file=config)
volttron_instance.start_agent(agent_uuid)
gevent.sleep(3)
print(query_agent.poll_callback.call_args_list)
assert len(result_topics) == query_agent.poll_callback.call_count
assert "poll.weather" == query_agent.poll_callback.call_args[0][1]
i = 0
for topic in result_topics:
arguments = query_agent.poll_callback.call_args_list[i][0]
assert topic == arguments[3]
# header
assert isinstance(arguments[4], dict)
results1 = arguments[5]
if len(result_topics) > 1:
assert isinstance(results1, dict)
assert results1['observation_time']
assert results1['weather_results']
else:
assert isinstance(results1, list)
assert len(results1) == len(config["poll_locations"])
i = i + 1
assert query_agent.vip.rpc.call("poll.weather", "health.get_status").get(timeout=10).get('status') == \
STATUS_GOOD
finally:
if agent_uuid:
volttron_instance.stop_agent(agent_uuid)
volttron_instance.remove_agent(agent_uuid)
@pytest.mark.darksky
def test_default_config(volttron_instance, query_agent, cleanup_cache):
"""
Test the default configuration file included with the agent
"""
locations = [{"lat": 39.7555, "long": -105.2211}]
publish_agent = volttron_instance.build_agent(identity="test_agent")
gevent.sleep(1)
config_path = os.path.join(get_services_core("Darksky"), "config")
with open(config_path, "r") as config_file:
config_json = json.load(config_file)
assert isinstance(config_json, dict)
config_json["api_key"] = API_KEY
volttron_instance.install_agent(
agent_dir=get_services_core("Darksky"),
config_file=config_json,
start=True,
vip_identity="health_test")
assert publish_agent.vip.rpc.call("health_test", "health.get_status").get(timeout=10).get('status') == STATUS_GOOD
query_data = query_agent.vip.rpc.call("health_test", 'get_current_weather', locations).get(timeout=30)
if query_data[0].get("weather_error"):
error = query_data[0].get("weather_error")
if error.endswith("Remote API returned Code 403"):
pytest.skip("API key has exceeded daily call limit")
print(query_data)
assert len(query_data) == len(locations)
for record in query_data:
# check format here
assert record.get("observation_time")
assert (record.get("lat") and record.get("long"))
results = record.get("weather_results")
if results:
assert isinstance(results, dict)
assert "data" not in results
assert results["attribution"] == "Powered by Dark Sky"
else:
results = record.get("weather_error")
if results.startswith("Remote API returned no data") or \
results.startswith("Remote API redirected request, but redirect failed") \
or results.startswith("Remote API returned invalid response") \
or results.startswith("API request failed with unexpected response"):
assert True
else:
assert False
| 42.706777
| 118
| 0.661107
|
8cecb17ad9b6f53f7d3411c33eadbaa1010925f7
| 437
|
py
|
Python
|
primer/first_program.py
|
YunYouJun/python-learn
|
e41ce8ca289fbb6e1a14e07aee6d4b797e6d5d8c
|
[
"MIT"
] | null | null | null |
primer/first_program.py
|
YunYouJun/python-learn
|
e41ce8ca289fbb6e1a14e07aee6d4b797e6d5d8c
|
[
"MIT"
] | null | null | null |
primer/first_program.py
|
YunYouJun/python-learn
|
e41ce8ca289fbb6e1a14e07aee6d4b797e6d5d8c
|
[
"MIT"
] | null | null | null |
def isEqual(num1, num2):
if num1 < num2:
print(str(num1) + ' is too small!')
return False
elif num1 > num2:
print(str(num1) + ' is too big!')
return False
else:
print('Bingo!')
return True
from random import randint
num = randint(1, 100)
print('Guess what I think?')
bingo = False
while bingo == False:
answer = input()
if answer:
isEqual(int(answer), num)
| 19
| 43
| 0.567506
|
31e1beeddbba1d9cfa87276d266b6f232e49a938
| 7,143
|
py
|
Python
|
app_config.py
|
nprapps/books16
|
59a15a09d903f1690d2dbe7d766e05e2c8256a7c
|
[
"MIT"
] | 6
|
2016-12-08T07:29:23.000Z
|
2018-10-04T17:35:24.000Z
|
app_config.py
|
axiompiper/books16
|
59a15a09d903f1690d2dbe7d766e05e2c8256a7c
|
[
"MIT"
] | 2
|
2016-12-09T15:15:16.000Z
|
2017-01-27T21:29:13.000Z
|
app_config.py
|
axiompiper/books16
|
59a15a09d903f1690d2dbe7d766e05e2c8256a7c
|
[
"MIT"
] | 5
|
2016-12-27T16:27:23.000Z
|
2021-02-23T10:44:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Project-wide application configuration.
DO NOT STORE SECRETS, PASSWORDS, ETC. IN THIS FILE.
They will be exposed to users. Use environment variables instead.
See get_secrets() below for a fast way to access them.
"""
import os
import logging
from authomatic.providers import oauth2
from authomatic import Authomatic
"""
NAMES
"""
# Project name to be used in urls
# Use dashes, not underscores!
PROJECT_SLUG = 'best-books-2016'
# Allow override from local settings to test random_prod locally
try:
from local_settings import PROJECT_SLUG
except ImportError:
pass
# Project name to be used in file paths
PROJECT_FILENAME = 'books16'
# The name of the repository containing the source
REPOSITORY_NAME = 'books16'
GITHUB_USERNAME = 'nprapps'
REPOSITORY_URL = 'git@github.com:%s/%s.git' % (GITHUB_USERNAME, REPOSITORY_NAME)
REPOSITORY_ALT_URL = None # 'git@bitbucket.org:nprapps/%s.git' % REPOSITORY_NAME'
# Project name used for assets rig
# Should stay the same, even if PROJECT_SLUG changes
ASSETS_SLUG = 'books16'
"""
DEPLOYMENT
"""
PRODUCTION_S3_BUCKET = 'apps.npr.org'
STAGING_S3_BUCKET = 'stage-apps.npr.org'
ASSETS_S3_BUCKET = 'assets.apps.npr.org'
DEFAULT_MAX_AGE = 20
RELOAD_TRIGGER = False
RELOAD_CHECK_INTERVAL = 60
PRODUCTION_SERVERS = ['cron.nprapps.org']
STAGING_SERVERS = ['cron-staging.nprapps.org']
# Should code be deployed to the web/cron servers?
DEPLOY_TO_SERVERS = False
SERVER_USER = 'ubuntu'
SERVER_PYTHON = 'python2.7'
SERVER_PROJECT_PATH = '/home/%s/apps/%s' % (SERVER_USER, PROJECT_FILENAME)
SERVER_REPOSITORY_PATH = '%s/repository' % SERVER_PROJECT_PATH
SERVER_VIRTUALENV_PATH = '%s/virtualenv' % SERVER_PROJECT_PATH
# Should the crontab file be installed on the servers?
# If True, DEPLOY_TO_SERVERS must also be True
DEPLOY_CRONTAB = False
# Should the service configurations be installed on the servers?
# If True, DEPLOY_TO_SERVERS must also be True
DEPLOY_SERVICES = False
UWSGI_SOCKET_PATH = '/tmp/%s.uwsgi.sock' % PROJECT_FILENAME
# Services are the server-side services we want to enable and configure.
# A three-tuple following this format:
# (service name, service deployment path, service config file extension)
SERVER_SERVICES = [
('app', SERVER_REPOSITORY_PATH, 'ini'),
('uwsgi', '/etc/init', 'conf'),
('nginx', '/etc/nginx/locations-enabled', 'conf'),
]
# These variables will be set at runtime. See configure_targets() below
S3_BUCKET = None
S3_BASE_URL = None
S3_DEPLOY_URL = None
SERVERS = []
SERVER_BASE_URL = None
SERVER_LOG_PATH = None
DEBUG = True
"""
COPY EDITING
"""
COPY_GOOGLE_DOC_KEY = '1D7z6AocqErij7D8GMGMfxltxweu9yzPN60EuRDeaLNw'
COPY_PATH = 'data/copy.xlsx'
DATA_GOOGLE_DOC_KEY = '1frkTY_2BeCXsf0Uie9P3Pccx8eulCrr6bWkCOOYSTEU'
# Override
try:
from local_settings import DATA_GOOGLE_DOC_KEY
except ImportError:
pass
# Provide a csv path for testing locally if DATA_GOOGLE_DOC_KEY fails
LOCAL_CSV_PATH = None
try:
from local_settings import LOCAL_CSV_PATH
except ImportError:
pass
LINK_CATEGORY_MAP = {
'Author Interviews': 'Interview',
'Book Reviews': 'Review',
}
LINK_CATEGORY_DEFAULT = 'Feature'
USE_ITUNES_ID = True
try:
from local_settings import USE_ITUNES_ID
except ImportError:
pass
"""
SHARING
"""
SHARE_URL = 'http://%s/%s/' % (PRODUCTION_S3_BUCKET, PROJECT_SLUG)
"""
ADS
"""
NPR_DFP = {
'STORY_ID': '1002',
'TARGET': 'homepage',
'ENVIRONMENT': 'NPRTEST',
'TESTSERVER': 'false'
}
"""
SERVICES
"""
NPR_GOOGLE_ANALYTICS = {
'ACCOUNT_ID': 'UA-5828686-4',
'DOMAIN': PRODUCTION_S3_BUCKET,
'TOPICS': '[1032,1008,1002]',
}
VIZ_GOOGLE_ANALYTICS = {
'ACCOUNT_ID': 'UA-5828686-75'
}
"""
Logging
"""
LOG_FORMAT = '%(levelname)s:%(name)s:%(asctime)s: %(message)s'
LOG_LEVEL = None
"""
OAUTH
"""
GOOGLE_OAUTH_CREDENTIALS_PATH = '~/.google_oauth_credentials'
authomatic_config = {
'google': {
'id': 1,
'class_': oauth2.Google,
'consumer_key': os.environ.get('GOOGLE_OAUTH_CLIENT_ID'),
'consumer_secret': os.environ.get('GOOGLE_OAUTH_CONSUMER_SECRET'),
'scope': ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/userinfo.email'],
'offline': True,
},
}
authomatic = Authomatic(authomatic_config, os.environ.get('AUTHOMATIC_SALT'))
"""
Utilities
"""
def get_secrets():
"""
A method for accessing our secrets.
"""
secrets_dict = {}
for k,v in os.environ.items():
if k.startswith(PROJECT_FILENAME):
k = k[len(PROJECT_FILENAME) + 1:]
secrets_dict[k] = v
return secrets_dict
def configure_targets(deployment_target):
"""
Configure deployment targets. Abstracted so this can be
overriden for rendering before deployment.
"""
global S3_BUCKET
global S3_BASE_URL
global S3_DEPLOY_URL
global SERVERS
global SERVER_BASE_URL
global SERVER_LOG_PATH
global DEBUG
global DEPLOYMENT_TARGET
global ASSETS_MAX_AGE
global LOG_LEVEL
global PROJECT_SLUG
global SHARE_URL
if deployment_target == 'production':
S3_BUCKET = PRODUCTION_S3_BUCKET
S3_BASE_URL = 'https://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
SERVERS = PRODUCTION_SERVERS
SERVER_BASE_URL = 'https://%s/%s' % (SERVERS[0], PROJECT_SLUG)
SERVER_LOG_PATH = '/var/log/%s' % PROJECT_FILENAME
DEBUG = False
LOG_LEVEL = logging.ERROR
ASSETS_MAX_AGE = 86400
elif deployment_target == 'random_prod':
secrets = get_secrets()
S3_BUCKET = PRODUCTION_S3_BUCKET
PROJECT_SLUG = '%s-%s' % (PROJECT_SLUG, secrets['RANDOM_SUFFIX'])
SHARE_URL = 'https://%s/%s/' % (PRODUCTION_S3_BUCKET, PROJECT_SLUG)
S3_BASE_URL = 'https://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
SERVERS = PRODUCTION_SERVERS
SERVER_BASE_URL = 'https://%s/%s' % (SERVERS[0], PROJECT_SLUG)
SERVER_LOG_PATH = '/var/log/%s' % PROJECT_FILENAME
DEBUG = False
LOG_LEVEL = logging.ERROR
ASSETS_MAX_AGE = 86400
elif deployment_target == 'staging':
S3_BUCKET = STAGING_S3_BUCKET
S3_BASE_URL = 'http://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
SERVERS = STAGING_SERVERS
SERVER_BASE_URL = 'https://%s/%s' % (SERVERS[0], PROJECT_SLUG)
SERVER_LOG_PATH = '/var/log/%s' % PROJECT_FILENAME
DEBUG = False
LOG_LEVEL = logging.INFO
ASSETS_MAX_AGE = 20
else:
S3_BUCKET = None
S3_BASE_URL = 'http://127.0.0.1:8000'
S3_DEPLOY_URL = None
SERVERS = []
SERVER_BASE_URL = '//127.0.0.1:8001/%s' % PROJECT_SLUG
SERVER_LOG_PATH = '/tmp'
DEBUG = True
LOG_LEVEL = logging.INFO
ASSETS_MAX_AGE = 20
DEPLOYMENT_TARGET = deployment_target
"""
Run automated configuration
"""
DEPLOYMENT_TARGET = os.environ.get('DEPLOYMENT_TARGET', None)
configure_targets(DEPLOYMENT_TARGET)
| 26.261029
| 109
| 0.692286
|
b3c8985caafd34e4798b11c67518d93ee70eab66
| 40
|
py
|
Python
|
tests/components/opnsense/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/opnsense/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/components/opnsense/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Tests for the opnsense component."""
| 20
| 39
| 0.7
|
1607ba4b44908747c7f65f0b07183fee755a3372
| 25,681
|
py
|
Python
|
src/encoders.py
|
jhunhwang/goldenretriever
|
08df451c2d726678d91bab372936e95b6cf88732
|
[
"Apache-2.0"
] | 8
|
2020-03-06T02:22:24.000Z
|
2022-03-08T04:18:42.000Z
|
src/encoders.py
|
jhunhwang/goldenretriever
|
08df451c2d726678d91bab372936e95b6cf88732
|
[
"Apache-2.0"
] | 7
|
2020-11-13T18:54:23.000Z
|
2022-02-10T02:29:15.000Z
|
src/encoders.py
|
jhunhwang/goldenretriever
|
08df451c2d726678d91bab372936e95b6cf88732
|
[
"Apache-2.0"
] | 3
|
2020-11-12T13:18:13.000Z
|
2021-10-15T05:50:44.000Z
|
import logging
import os
import tensorflow as tf
# import tensorflow_addons as tfa
import tensorflow_hub as hub
from abc import ABC, abstractmethod
from transformers import AlbertTokenizer, TFAlbertModel
from .loss_functions import triplet_loss
from .tokenizers.bert_tokenization import FullTokenizer, preprocess_str
logger = logging .getLogger(__name__)
class Encoder(ABC):
"""a shared encoder interface
Each encoder should provide an encode() method"""
@abstractmethod
def __init__(self):
pass
@abstractmethod
def encode(self):
pass
@abstractmethod
def finetune_weights(self):
pass
@abstractmethod
def save_weights(self):
pass
@abstractmethod
def restore_weights(self):
pass
class USEEncoder(Encoder):
def __init__(self, max_seq_length=None, **kwargs):
# Not used for USEEncoder but included for standardization across encoders
self.max_seq_length = max_seq_length
# variables to be finetuned
# self.v=['QA/Final/Response_tuning/ResidualHidden_1/dense/kernel','QA/Final/Response_tuning/ResidualHidden_0/dense/kernel', 'QA/Final/Response_tuning/ResidualHidden_1/AdjustDepth/projection/kernel']
self.v = ['QA/Final/Response_tuning/ResidualHidden_1/AdjustDepth/projection/kernel']
if kwargs:
self.opt_params = kwargs
else:
# good defaults for params
self.opt_params = {
'learning_rate': 0.001,
'beta_1': 0.9,
'beta_2': 0.999,
'epsilon': 1e-07
}
# init saved model
self.embed = hub.load('https://tfhub.dev/google/universal-sentence-encoder-qa/3')
self.init_signatures()
def init_signatures(self):
# re-initialize the references to the model signatures
self.question_encoder = self.embed.signatures['question_encoder']
self.response_encoder = self.embed.signatures['response_encoder']
self.neg_response_encoder = self.embed.signatures['response_encoder']
print('model initiated!')
# optimizer
self.optimizer = tf.keras.optimizers.Adam(**self.opt_params)
# retrieve the weights we want to finetune.
self.var_finetune = [x for x in self.embed.variables
for vv in self.v if vv in x.name]
def encode(self, text, context=None, string_type=None):
if string_type == 'query':
if isinstance(text, str):
return self.question_encoder(tf.constant([text]))['outputs']
elif hasattr(text, '__iter__'):
return tf.concat(
[self.question_encoder(tf.constant([one_text]))['outputs']
for one_text in text], axis=0
)
elif string_type == 'response':
"""
A frequent error is OOM - Error recorded below.
The fix is to encode each entry separately.
This is implemented in a list comprehension.
"""
if not context:
context = text
if isinstance(text, str):
return self.response_encoder(
input=tf.constant([text]),
context=tf.constant([context])
)['outputs']
elif hasattr(text, '__iter__'):
encoded_responses = [self.response_encoder(input=tf.constant([t]),
context=tf.constant([c]))['outputs']
for t, c in zip(text, context)]
encoded_responses_tensor = tf.concat(encoded_responses, axis=0)
return encoded_responses_tensor
else:
print('Type of prediction not defined')
def finetune_weights(self, question, answer, margin=0.3,
loss='triplet', context=[], neg_answer=[],
neg_answer_context=[], label=[]):
"""
Finetune the model with GradientTape
:type question: list of str
:type answer: list of str
:type context: list of str
:type neg_answer: list of str
:type neg_answer_context: list of str
:type margin: float
:type label: list of int
:type loss: str
:param question: List of string queries
:param answer: List of string responses
:param context: List of string response contexts, this is applicable to the USE model
:param neg_answer: List of string responses that do not match with the queries. This is applicable for triplet / contrastive loss.
:param neg_answer_context: Similar to neg_answer for the USE model to ingest
:param label: List of int
:param margin: Marrgin tuning parameter for triplet / contrastive loss
:param loss: Specify loss function
:return: numpy array of mean loss value
"""
self.cost_history = []
with tf.GradientTape() as tape:
# get encodings
question_embeddings = self.question_encoder(
tf.constant(question)
)['outputs']
response_embeddings = self.response_encoder(
input=tf.constant(answer),
context=tf.constant(context)
)['outputs']
if loss == 'cosine':
"""
# https://www.tensorflow.org/api_docs/python/tf/keras/losses/CosineSimilarity
"""
self.cost = tf.keras.losses.CosineSimilarity(axis=1)
cost_value = self.cost(question_embeddings,
response_embeddings)
elif loss == 'contrastive':
"""
https://www.tensorflow.org/addons/api_docs/python/tfa/losses/ContrastiveLoss
y_true to be a vector of binary labels
y_hat to be the respective distances
"""
self.cosine_dist = tf.keras.losses.CosineSimilarity(axis=1)
cosine_dist_value = self.cosine_dist(question_embeddings,
response_embeddings)
self.cost = tfa.losses.contrastive.ContrastiveLoss(margin=margin)
cost_value = self.cost(label, cosine_dist_value)
elif loss == 'triplet':
"""
Triplet loss uses a non-official self-implementated loss function outside of TF based on cosine distance
"""
neg_response_embeddings = self.neg_response_encoder(
input=tf.constant(neg_answer),
context=tf.constant(neg_answer_context)
)['outputs']
cost_value = triplet_loss(
question_embeddings,
response_embeddings,
neg_response_embeddings,
margin=margin
)
# record loss
self.cost_history.append(cost_value.numpy().mean())
# apply gradient
grads = tape.gradient(cost_value, self.var_finetune)
self.optimizer.apply_gradients(zip(grads, self.var_finetune))
return cost_value.numpy().mean()
def save_weights(self, save_dir=None):
'''
Save model weights in folder directory
'''
tf.saved_model.save(
self.embed,
save_dir,
signatures={
'default': self.embed.signatures['default'],
'response_encoder':self.embed.signatures['response_encoder'],
'question_encoder':self.embed.signatures['question_encoder']
}
)
def restore_weights(self, save_dir=None):
"""
Signatures need to be re-init after weights are loaded.
"""
self.embed = tf.saved_model.load(save_dir)
self.init_signatures()
class ALBERTEncoder(Encoder):
def __init__(self, max_seq_length=512):
# ALBERT unique params
self.max_seq_length = max_seq_length
# GR params
self.vectorized_knowledge = {}
self.text = {}
self.questions = {}
self.opt_params = {
'learning_rate': 0.001,
'beta_1': 0.9,
'beta_2': 0.999,
'epsilon': 1e-07
}
# init saved model
self.albert_layer = TFAlbertModel.from_pretrained('albert-base-v2')
# writing the model for the training tasks
# get inputs
res_id = tf.keras.layers.Input(
shape=(self.max_seq_length,),
name="input_ids",
dtype='int32'
)
res_mask = tf.keras.layers.Input(
shape=(self.max_seq_length,),
name="input_masks",
dtype='int32'
)
res_segment = tf.keras.layers.Input(
shape=(self.max_seq_length,),
name="input_seg",
dtype='int32'
)
# encode the three inputs
_, res_pooled = self.albert_layer([res_id, res_mask, res_segment])
# dense layer specifically for
self.response_encoder = tf.keras.layers.Dense(
768, input_shape=(768,),
name='response_dense_layer'
)
encoded_response = self.response_encoder(res_pooled)
# init model
self.albert_model = tf.keras.Model(
inputs=[res_id, res_mask, res_segment],
outputs=encoded_response
)
print("Initializing tokenizer and optimizer")
self.init_signatures()
def init_signatures(self):
"""
Re-init references to layers and model attributes
When restoring the model, the references to the vocab file / layers would be lost.
"""
self.tokenizer = AlbertTokenizer.from_pretrained('albert-base-v2')
# init optimizer
self.optimizer = tf.keras.optimizers.Adam(**self.opt_params)
self.cost_history = []
# TF-Hub page recommentds finetuning all weights
# "All parameters in the module are trainable,
# and fine-tuning all parameters is the recommended practice."
self.var_finetune = self.albert_model.variables
print('model initiated!')
def _encode_one_str(self, text, string_type='response'):
"""
Return the tensor representing embedding of input text.
Type can be 'query' or 'response'
:type text: str or iterable of str
:type type: str
:param text: This contains the text that is required to be encoded
:param type: Either 'response' or 'query'. Default is 'response'. This tells GR to either use the response or query encoder but in the case of BERT, this argument is ignored
:return: tf.tensor that contains the 768 dim encoding of the input text
"""
if string_type == 'query':
question_id_mask_seg = preprocess_str(
text, self.max_seq_length,
self.tokenizer
)
question_embedding = self.albert_layer(
[tf.constant(question_id_mask_seg[0]),
tf.constant(question_id_mask_seg[1]),
tf.constant(question_id_mask_seg[2])]
)[1]
return question_embedding
if string_type == 'response':
response_id_mask_seg = preprocess_str(
text, self.max_seq_length,
self.tokenizer
)
response_embedding = self.albert_model(
[tf.constant(response_id_mask_seg[0]),
tf.constant(response_id_mask_seg[1]),
tf.constant(response_id_mask_seg[2])]
)
return response_embedding
def encode(self, text, context=None, string_type='response'):
"""
Encode an iterable of strings
"""
encoded_strings = [self._encode_one_str(t, string_type=string_type)
for t in text]
encoded_tensor = tf.concat(encoded_strings, axis=0)
return encoded_tensor
def finetune_weights(self, question, answer, margin=0.3,
loss='triplet', context=[], neg_answer=[],
neg_answer_context=[], label=[]):
"""
Finetune the model with GradientTape
:type question: list of str
:type answer: list of str
:type context: list of str
:type neg_answer: list of str
:type neg_answer_context: list of str
:type margin: float
:type label: list of int
:type loss: str
:param question: List of string queries
:param answer: List of string responses
:param context: List of string response contexts, this is applicable to the USE model
:param neg_answer: List of string responses that do not match with the queries. This is applicable for triplet / contrastive loss.
:param neg_answer_context: Similar to neg_answer for the USE model to ingest
:param label: List of int
:param margin: Marrgin tuning parameter for triplet / contrastive loss
:param loss: Specify loss function
:return: numpy array of mean loss value
"""
question_id_mask_seg = preprocess_str(
question, self.max_seq_length,
self.tokenizer
)
response_id_mask_seg = preprocess_str(
answer, self.max_seq_length,
self.tokenizer
)
# for eager execution finetuning
with tf.GradientTape() as tape:
# tf-hub's keras layer can take the lists directly
# but the bert_model object needs the inputs to be tf.constants
question_embeddings = self.albert_layer(
[tf.constant(question_id_mask_seg[0]),
tf.constant(question_id_mask_seg[1]),
tf.constant(question_id_mask_seg[2])]
)[1]
response_embeddings = self.albert_model(
[tf.constant(response_id_mask_seg[0]),
tf.constant(response_id_mask_seg[1]),
tf.constant(response_id_mask_seg[2])]
)
if loss == 'cosine':
self.cost = tf.keras.losses.CosineSimilarity(axis=1)
cost_value = self.cost(question_embeddings,
response_embeddings)
elif loss == 'contrastive':
"""
https://www.tensorflow.org/addons/api_docs/python/tfa/losses/ContrastiveLoss
y_true to be a vector of binary labels
y_hat to be the respective distances
"""
self.cosine_dist = tf.keras.losses.CosineSimilarity(axis=1)
cosine_dist_value = self.cosine_dist(question_embeddings,
response_embeddings)
self.cost = tfa.losses.contrastive.ContrastiveLoss(margin=margin)
cost_value = self.cost(label, cosine_dist_value)
elif loss == 'triplet':
"""
Triplet loss uses a non-official self-implementated loss function outside of TF based on cosine distance
"""
# encode the negative response
neg_answer_id_mask_seg = preprocess_str(
neg_answer,
self.max_seq_length,
self.tokenizer
)
neg_response_embeddings = self.albert_model(
[tf.constant(neg_answer_id_mask_seg[0]),
tf.constant(neg_answer_id_mask_seg[1]),
tf.constant(neg_answer_id_mask_seg[2])]
)
cost_value = triplet_loss(question_embeddings,
response_embeddings,
neg_response_embeddings)
# record loss
self.cost_history.append(cost_value.numpy().mean())
# apply gradient
self.grads = tape.gradient(cost_value, self.var_finetune)
self.optimizer.apply_gradients(zip(self.grads, self.var_finetune))
return cost_value.numpy().mean()
def save_weights(self, save_dir=None):
'''Save the BERT model weights into a directory'''
# model.save does not work if there are layers that are subclassed (eg. huggingface models)
save_path = os.path.join(save_dir, 'model')
self.albert_model.save_weights(save_path)
def restore_weights(self, save_dir=None):
"""Load weights from savepath"""
save_path = os.path.join(save_dir, 'model')
self.albert_model.load_weights(save_path)
class BERTEncoder(Encoder):
def __init__(self, max_seq_length=512):
# BERT unique params
self.max_seq_length = max_seq_length
# GR params
self.vectorized_knowledge = {}
self.text = {}
self.questions = {}
self.opt_params = {
'learning_rate': 0.001,
'beta_1': 0.9,
'beta_2': 0.999,
'epsilon': 1e-07
}
# init saved model
# self.bert_layer = hub.KerasLayer("https://tfhub.dev/tensorflow/bert_en_uncased_L-12_H-768_A-12/1", trainable=True) # uncased and smaller model
self.bert_layer = hub.KerasLayer(
"https://tfhub.dev/tensorflow/bert_en_uncased_L-12_H-768_A-12/1",
trainable=True
)
self.vocab_file = self.bert_layer.resolved_object.vocab_file.asset_path.numpy()
self.do_lower_case = self.bert_layer.resolved_object.do_lower_case.numpy()
# writing the model for the training tasks
# get inputs
res_id = tf.keras.layers.Input(
shape=(self.max_seq_length,),
name="input_ids",
dtype='int32'
)
res_mask = tf.keras.layers.Input(
shape=(self.max_seq_length,),
name="input_masks",
dtype='int32'
)
res_segment = tf.keras.layers.Input(
shape=(self.max_seq_length,),
name="input_seg",dtype='int32'
)
# encode the three inputs
res_pooled, res_seq = self.bert_layer([res_id, res_mask, res_segment])
# dense layer specifically for
self.response_encoder = tf.keras.layers.Dense(
768, input_shape=(768,),
name='response_dense_layer'
)
encoded_response = self.response_encoder(res_pooled)
# init model
self.bert_model = tf.keras.Model(
inputs=[res_id, res_mask, res_segment],
outputs=encoded_response
)
print("Downloaded model from Hub, initializing tokenizer and optimizer")
self.init_signatures()
def init_signatures(self):
"""
Re-init references to layers and model attributes
When restoring the model, the references to the vocab file / layers would be lost.
"""
# init tokenizer from hub layer
self.tokenizer = FullTokenizer(self.vocab_file, self.do_lower_case)
# init optimizer
self.optimizer = tf.keras.optimizers.Adam(**self.opt_params)
self.cost_history = []
# bert layer name
self.bert_layer_name = [layer.name for layer in self.bert_model.layers
if layer.name.startswith('keras_layer')][0]
# TF-Hub page recommentds finetuning all weights
# "All parameters in the module are trainable,
# and fine-tuning all parameters is the recommended practice."
self.var_finetune = self.bert_model.variables
print('model initiated!')
def encode(self, text, context=None, string_type='response'):
"""
Return the tensor representing embedding of input text.
Type can be 'query' or 'response'
:type text: str or iterable of str
:type type: str
:param text: This contains the text that is required to be encoded
:param type: Either 'response' or 'query'. Default is 'response'. In the case of BERT, this argument is ignored
:return: a tf.tensor that contains the 768 dim encoding of the input text
"""
if string_type == 'query':
question_id_mask_seg = preprocess_str(
text, self.max_seq_length,
self.tokenizer
)
question_embeddings, q_sequence_output = self.bert_model.get_layer(self.bert_layer_name)(question_id_mask_seg)
return question_embeddings
elif string_type == 'response':
response_id_mask_seg = preprocess_str(
text, self.max_seq_length,
self.tokenizer
)
response_embeddings = self.bert_model(
[tf.constant(response_id_mask_seg[0]),
tf.constant(response_id_mask_seg[1]),
tf.constant(response_id_mask_seg[2])]
)
return response_embeddings
def finetune_weights(self, question, answer, margin=0.3,
loss='triplet', context=[], neg_answer=[],
neg_answer_context=[], label=[]):
"""
Finetune the model with GradientTape
:type question: list of str
:type answer: list of str
:type context: list of str
:type neg_answer: list of str
:type neg_answer_context: list of str
:type margin: float
:type label: list of int
:type loss: str
:param question: List of string queries
:param answer: List of string responses
:param context: List of string response contexts, this is applicable to the USE model
:param neg_answer: List of string responses that do not match with the queries. This is applicable for triplet / contrastive loss.
:param neg_answer_context: Similar to neg_answer for the USE model to ingest
:param label: List of int
:param margin: Marrgin tuning parameter for triplet / contrastive loss
:param loss: Specify loss function
:return: numpy array of mean loss value
"""
question_id_mask_seg = preprocess_str(
question, self.max_seq_length,
self.tokenizer
)
response_id_mask_seg = preprocess_str(
answer, self.max_seq_length,
self.tokenizer
)
# for eager execution finetuning
with tf.GradientTape() as tape:
# tf-hub's keras layer can take the lists directly
# but the bert_model object needs the inputs to be tf.constants
question_embeddings, q_sequence_output = self.bert_model.get_layer(self.bert_layer_name)(question_id_mask_seg)
response_embeddings = self.bert_model(
[tf.constant(response_id_mask_seg[0]),
tf.constant(response_id_mask_seg[1]),
tf.constant(response_id_mask_seg[2])]
)
if loss == 'cosine':
self.cost = tf.keras.losses.CosineSimilarity(axis=1)
cost_value = self.cost(question_embeddings,
response_embeddings)
elif loss == 'contrastive':
"""
https://www.tensorflow.org/addons/api_docs/python/tfa/losses/ContrastiveLoss
y_true to be a vector of binary labels
y_hat to be the respective distances
"""
self.cosine_dist = tf.keras.losses.CosineSimilarity(axis=1)
cosine_dist_value = self.cosine_dist(question_embeddings,
response_embeddings)
self.cost = tfa.losses.contrastive.ContrastiveLoss(margin=margin)
cost_value = self.cost(label, cosine_dist_value)
elif loss == 'triplet':
"""
Triplet loss uses a non-official self-implementated loss function outside of TF based on cosine distance
"""
# encode the negative response
neg_answer_id_mask_seg = preprocess_str(
neg_answer,
self.max_seq_length,
self.tokenizer
)
neg_response_embeddings = self.bert_model(
[tf.constant(neg_answer_id_mask_seg[0]),
tf.constant(neg_answer_id_mask_seg[1]),
tf.constant(neg_answer_id_mask_seg[2])])
cost_value = triplet_loss(question_embeddings,
response_embeddings,
neg_response_embeddings)
# record loss
self.cost_history.append(cost_value.numpy().mean())
# apply gradient
self.grads = tape.gradient(cost_value, self.var_finetune)
self.optimizer.apply_gradients(zip(self.grads, self.var_finetune))
return cost_value.numpy().mean()
def save_weights(self, save_dir=None):
'''Save the BERT model into a directory'''
self.bert_model.vocab_file = self.vocab_file
self.bert_model.do_lower_case = self.do_lower_case
self.bert_model.save(save_dir, include_optimizer=False)
def restore_weights(self, save_dir=None):
"""Load saved model from savepath"""
self.bert_model = tf.keras.models.load_model(
save_dir,
custom_objects={'KerasLayer': hub.KerasLayer}
)
self.init_signatures()
| 36.530583
| 207
| 0.586504
|
b517b6ee5854bb7f9ac4a6c845a73aeb5b209be8
| 64,857
|
py
|
Python
|
pybind/nos/v6_0_2c/rbridge_id/router/router_bgp/router_bgp_attributes/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/nos/v6_0_2c/rbridge_id/router/router_bgp/router_bgp_attributes/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/nos/v6_0_2c/rbridge_id/router/router_bgp/router_bgp_attributes/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import cluster_id
import distance
import capability
import maxas_limit
import timers
import confederation
import bfd
import neighbor
class router_bgp_attributes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-rbridge - based on the path /rbridge-id/router/router-bgp/router-bgp-attributes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__local_as','__always_compare_med','__compare_med_empty_aspath','__med_missing_as_worst','__as_path_ignore','__compare_routerid','__install_igp_cost','__cluster_id','__default_local_preference','__distance','__capability','__maxas_limit','__enforce_first_as','__fast_external_fallover','__timers','__log_dampening_debug','__confederation','__bfd','__neighbor',)
_yang_name = 'router-bgp-attributes'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__distance = YANGDynClass(base=distance.distance, is_container='container', presence=False, yang_name="distance", rest_name="distance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Define an administrative distance', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__compare_med_empty_aspath = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="compare-med-empty-aspath", rest_name="compare-med-empty-aspath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors even with empty as-path\nattribute'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__default_local_preference = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..65535']}), is_leaf=True, yang_name="default-local-preference", rest_name="default-local-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure default local preference value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-preference-number', is_config=True)
self.__med_missing_as_worst = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="med-missing-as-worst", rest_name="med-missing-as-worst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Consider routes missing MED attribute as least desirable'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__bfd = YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD global parameters for BGP', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__enforce_first_as = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enforce the first AS for EBGP routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__always_compare_med = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="always-compare-med", rest_name="always-compare-med", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__local_as = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure local AS number'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-as-type', is_config=True)
self.__timers = YANGDynClass(base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Adjust routing timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__log_dampening_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="log-dampening-debug", rest_name="log-dampening-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Log dampening debug messages'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__install_igp_cost = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="install-igp-cost", rest_name="install-igp-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Install igp cost to nexthop instead of MED value as BGP route cost'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__fast_external_fallover = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="fast-external-fallover", rest_name="fast-external-fallover", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Reset session if link to EBGP peer goes down'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__cluster_id = YANGDynClass(base=cluster_id.cluster_id, is_container='container', presence=False, yang_name="cluster-id", rest_name="cluster-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Route-Reflector Cluster-ID'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__neighbor = YANGDynClass(base=neighbor.neighbor, is_container='container', presence=False, yang_name="neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__compare_routerid = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="compare-routerid", rest_name="compare-routerid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Compare router-id for identical BGP paths'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__capability = YANGDynClass(base=capability.capability, is_container='container', presence=False, yang_name="capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__as_path_ignore = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="as-path-ignore", rest_name="as-path-ignore", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Ignore AS_PATH length for best route selection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__confederation = YANGDynClass(base=confederation.confederation, is_container='container', presence=False, yang_name="confederation", rest_name="confederation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure AS confederation parameters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__maxas_limit = YANGDynClass(base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'rbridge-id', u'router', u'router-bgp', u'router-bgp-attributes']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'rbridge-id', u'router', u'bgp']
def _get_local_as(self):
"""
Getter method for local_as, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/local_as (local-as-type)
"""
return self.__local_as
def _set_local_as(self, v, load=False):
"""
Setter method for local_as, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/local_as (local-as-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_as is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_as() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure local AS number'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-as-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """local_as must be of a type compatible with local-as-type""",
'defined-type': "brocade-bgp:local-as-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure local AS number'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-as-type', is_config=True)""",
})
self.__local_as = t
if hasattr(self, '_set'):
self._set()
def _unset_local_as(self):
self.__local_as = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5]))\\.(([0-9][0-9]{0,3})|([1-5][0-9]{4})|(6[0-4][0-9]{3})|(65[0-4][0-9]{2})|(655[0-2][0-9])|(6553[0-5])))|([1-9][0-9]{0,8})|([1-3][0-9]{9})|(4[0-1][0-9]{8})|(42[0-8][0-9]{7})|(429[0-3][0-9]{6})|(4294[0-8][0-9]{5})|(42949[0-5][0-9]{4})|(429496[0-6][0-9]{3})|(4294967[0-1][0-9]{2})|(42949672[0-8][0-9])|(429496729[0-5])'}), is_leaf=True, yang_name="local-as", rest_name="local-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure local AS number'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-as-type', is_config=True)
def _get_always_compare_med(self):
"""
Getter method for always_compare_med, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/always_compare_med (empty)
"""
return self.__always_compare_med
def _set_always_compare_med(self, v, load=False):
"""
Setter method for always_compare_med, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/always_compare_med (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_always_compare_med is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_always_compare_med() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="always-compare-med", rest_name="always-compare-med", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """always_compare_med must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="always-compare-med", rest_name="always-compare-med", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__always_compare_med = t
if hasattr(self, '_set'):
self._set()
def _unset_always_compare_med(self):
self.__always_compare_med = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="always-compare-med", rest_name="always-compare-med", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_compare_med_empty_aspath(self):
"""
Getter method for compare_med_empty_aspath, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/compare_med_empty_aspath (empty)
"""
return self.__compare_med_empty_aspath
def _set_compare_med_empty_aspath(self, v, load=False):
"""
Setter method for compare_med_empty_aspath, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/compare_med_empty_aspath (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_compare_med_empty_aspath is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_compare_med_empty_aspath() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="compare-med-empty-aspath", rest_name="compare-med-empty-aspath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors even with empty as-path\nattribute'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """compare_med_empty_aspath must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="compare-med-empty-aspath", rest_name="compare-med-empty-aspath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors even with empty as-path\nattribute'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__compare_med_empty_aspath = t
if hasattr(self, '_set'):
self._set()
def _unset_compare_med_empty_aspath(self):
self.__compare_med_empty_aspath = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="compare-med-empty-aspath", rest_name="compare-med-empty-aspath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow comparing MED from different neighbors even with empty as-path\nattribute'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_med_missing_as_worst(self):
"""
Getter method for med_missing_as_worst, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/med_missing_as_worst (empty)
"""
return self.__med_missing_as_worst
def _set_med_missing_as_worst(self, v, load=False):
"""
Setter method for med_missing_as_worst, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/med_missing_as_worst (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_med_missing_as_worst is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_med_missing_as_worst() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="med-missing-as-worst", rest_name="med-missing-as-worst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Consider routes missing MED attribute as least desirable'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """med_missing_as_worst must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="med-missing-as-worst", rest_name="med-missing-as-worst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Consider routes missing MED attribute as least desirable'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__med_missing_as_worst = t
if hasattr(self, '_set'):
self._set()
def _unset_med_missing_as_worst(self):
self.__med_missing_as_worst = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="med-missing-as-worst", rest_name="med-missing-as-worst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Consider routes missing MED attribute as least desirable'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_as_path_ignore(self):
"""
Getter method for as_path_ignore, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/as_path_ignore (empty)
"""
return self.__as_path_ignore
def _set_as_path_ignore(self, v, load=False):
"""
Setter method for as_path_ignore, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/as_path_ignore (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_path_ignore is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_path_ignore() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="as-path-ignore", rest_name="as-path-ignore", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Ignore AS_PATH length for best route selection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_path_ignore must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="as-path-ignore", rest_name="as-path-ignore", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Ignore AS_PATH length for best route selection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__as_path_ignore = t
if hasattr(self, '_set'):
self._set()
def _unset_as_path_ignore(self):
self.__as_path_ignore = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="as-path-ignore", rest_name="as-path-ignore", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Ignore AS_PATH length for best route selection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_compare_routerid(self):
"""
Getter method for compare_routerid, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/compare_routerid (empty)
"""
return self.__compare_routerid
def _set_compare_routerid(self, v, load=False):
"""
Setter method for compare_routerid, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/compare_routerid (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_compare_routerid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_compare_routerid() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="compare-routerid", rest_name="compare-routerid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Compare router-id for identical BGP paths'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """compare_routerid must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="compare-routerid", rest_name="compare-routerid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Compare router-id for identical BGP paths'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__compare_routerid = t
if hasattr(self, '_set'):
self._set()
def _unset_compare_routerid(self):
self.__compare_routerid = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="compare-routerid", rest_name="compare-routerid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Compare router-id for identical BGP paths'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_install_igp_cost(self):
"""
Getter method for install_igp_cost, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/install_igp_cost (empty)
"""
return self.__install_igp_cost
def _set_install_igp_cost(self, v, load=False):
"""
Setter method for install_igp_cost, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/install_igp_cost (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_install_igp_cost is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_install_igp_cost() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="install-igp-cost", rest_name="install-igp-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Install igp cost to nexthop instead of MED value as BGP route cost'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """install_igp_cost must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="install-igp-cost", rest_name="install-igp-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Install igp cost to nexthop instead of MED value as BGP route cost'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__install_igp_cost = t
if hasattr(self, '_set'):
self._set()
def _unset_install_igp_cost(self):
self.__install_igp_cost = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="install-igp-cost", rest_name="install-igp-cost", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Install igp cost to nexthop instead of MED value as BGP route cost'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_cluster_id(self):
"""
Getter method for cluster_id, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/cluster_id (container)
"""
return self.__cluster_id
def _set_cluster_id(self, v, load=False):
"""
Setter method for cluster_id, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/cluster_id (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cluster_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cluster_id() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=cluster_id.cluster_id, is_container='container', presence=False, yang_name="cluster-id", rest_name="cluster-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Route-Reflector Cluster-ID'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cluster_id must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=cluster_id.cluster_id, is_container='container', presence=False, yang_name="cluster-id", rest_name="cluster-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Route-Reflector Cluster-ID'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__cluster_id = t
if hasattr(self, '_set'):
self._set()
def _unset_cluster_id(self):
self.__cluster_id = YANGDynClass(base=cluster_id.cluster_id, is_container='container', presence=False, yang_name="cluster-id", rest_name="cluster-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Route-Reflector Cluster-ID'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_default_local_preference(self):
"""
Getter method for default_local_preference, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/default_local_preference (local-preference-number)
"""
return self.__default_local_preference
def _set_default_local_preference(self, v, load=False):
"""
Setter method for default_local_preference, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/default_local_preference (local-preference-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_local_preference is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_local_preference() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..65535']}), is_leaf=True, yang_name="default-local-preference", rest_name="default-local-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure default local preference value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-preference-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """default_local_preference must be of a type compatible with local-preference-number""",
'defined-type': "brocade-bgp:local-preference-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..65535']}), is_leaf=True, yang_name="default-local-preference", rest_name="default-local-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure default local preference value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-preference-number', is_config=True)""",
})
self.__default_local_preference = t
if hasattr(self, '_set'):
self._set()
def _unset_default_local_preference(self):
self.__default_local_preference = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..65535']}), is_leaf=True, yang_name="default-local-preference", rest_name="default-local-preference", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure default local preference value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='local-preference-number', is_config=True)
def _get_distance(self):
"""
Getter method for distance, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/distance (container)
"""
return self.__distance
def _set_distance(self, v, load=False):
"""
Setter method for distance, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/distance (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_distance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_distance() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=distance.distance, is_container='container', presence=False, yang_name="distance", rest_name="distance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Define an administrative distance', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """distance must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=distance.distance, is_container='container', presence=False, yang_name="distance", rest_name="distance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Define an administrative distance', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__distance = t
if hasattr(self, '_set'):
self._set()
def _unset_distance(self):
self.__distance = YANGDynClass(base=distance.distance, is_container='container', presence=False, yang_name="distance", rest_name="distance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'Define an administrative distance', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_capability(self):
"""
Getter method for capability, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/capability (container)
"""
return self.__capability
def _set_capability(self, v, load=False):
"""
Setter method for capability, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/capability (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_capability is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_capability() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=capability.capability, is_container='container', presence=False, yang_name="capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """capability must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=capability.capability, is_container='container', presence=False, yang_name="capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__capability = t
if hasattr(self, '_set'):
self._set()
def _unset_capability(self):
self.__capability = YANGDynClass(base=capability.capability, is_container='container', presence=False, yang_name="capability", rest_name="capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set capability', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_maxas_limit(self):
"""
Getter method for maxas_limit, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/maxas_limit (container)
"""
return self.__maxas_limit
def _set_maxas_limit(self, v, load=False):
"""
Setter method for maxas_limit, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/maxas_limit (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_maxas_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_maxas_limit() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """maxas_limit must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__maxas_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_maxas_limit(self):
self.__maxas_limit = YANGDynClass(base=maxas_limit.maxas_limit, is_container='container', presence=False, yang_name="maxas-limit", rest_name="maxas-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Impose limit on number of ASes in AS-PATH attribute', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_enforce_first_as(self):
"""
Getter method for enforce_first_as, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/enforce_first_as (empty)
"""
return self.__enforce_first_as
def _set_enforce_first_as(self, v, load=False):
"""
Setter method for enforce_first_as, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/enforce_first_as (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_enforce_first_as is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enforce_first_as() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enforce the first AS for EBGP routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enforce_first_as must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enforce the first AS for EBGP routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__enforce_first_as = t
if hasattr(self, '_set'):
self._set()
def _unset_enforce_first_as(self):
self.__enforce_first_as = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enforce-first-as", rest_name="enforce-first-as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enforce the first AS for EBGP routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_fast_external_fallover(self):
"""
Getter method for fast_external_fallover, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/fast_external_fallover (empty)
"""
return self.__fast_external_fallover
def _set_fast_external_fallover(self, v, load=False):
"""
Setter method for fast_external_fallover, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/fast_external_fallover (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_fast_external_fallover is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fast_external_fallover() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="fast-external-fallover", rest_name="fast-external-fallover", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Reset session if link to EBGP peer goes down'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """fast_external_fallover must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="fast-external-fallover", rest_name="fast-external-fallover", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Reset session if link to EBGP peer goes down'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__fast_external_fallover = t
if hasattr(self, '_set'):
self._set()
def _unset_fast_external_fallover(self):
self.__fast_external_fallover = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="fast-external-fallover", rest_name="fast-external-fallover", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Reset session if link to EBGP peer goes down'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_timers(self):
"""
Getter method for timers, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/timers (container)
"""
return self.__timers
def _set_timers(self, v, load=False):
"""
Setter method for timers, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/timers (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_timers is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_timers() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Adjust routing timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """timers must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Adjust routing timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__timers = t
if hasattr(self, '_set'):
self._set()
def _unset_timers(self):
self.__timers = YANGDynClass(base=timers.timers, is_container='container', presence=False, yang_name="timers", rest_name="timers", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Adjust routing timers', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_log_dampening_debug(self):
"""
Getter method for log_dampening_debug, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/log_dampening_debug (empty)
"""
return self.__log_dampening_debug
def _set_log_dampening_debug(self, v, load=False):
"""
Setter method for log_dampening_debug, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/log_dampening_debug (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_dampening_debug is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_dampening_debug() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="log-dampening-debug", rest_name="log-dampening-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Log dampening debug messages'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_dampening_debug must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="log-dampening-debug", rest_name="log-dampening-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Log dampening debug messages'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__log_dampening_debug = t
if hasattr(self, '_set'):
self._set()
def _unset_log_dampening_debug(self):
self.__log_dampening_debug = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="log-dampening-debug", rest_name="log-dampening-debug", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Log dampening debug messages'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_confederation(self):
"""
Getter method for confederation, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/confederation (container)
"""
return self.__confederation
def _set_confederation(self, v, load=False):
"""
Setter method for confederation, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/confederation (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_confederation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_confederation() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=confederation.confederation, is_container='container', presence=False, yang_name="confederation", rest_name="confederation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure AS confederation parameters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """confederation must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=confederation.confederation, is_container='container', presence=False, yang_name="confederation", rest_name="confederation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure AS confederation parameters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__confederation = t
if hasattr(self, '_set'):
self._set()
def _unset_confederation(self):
self.__confederation = YANGDynClass(base=confederation.confederation, is_container='container', presence=False, yang_name="confederation", rest_name="confederation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure AS confederation parameters', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_bfd(self):
"""
Getter method for bfd, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/bfd (container)
"""
return self.__bfd
def _set_bfd(self, v, load=False):
"""
Setter method for bfd, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/bfd (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bfd is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bfd() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD global parameters for BGP', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bfd must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD global parameters for BGP', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__bfd = t
if hasattr(self, '_set'):
self._set()
def _unset_bfd(self):
self.__bfd = YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD global parameters for BGP', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor (container)
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=neighbor.neighbor, is_container='container', presence=False, yang_name="neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """neighbor must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=neighbor.neighbor, is_container='container', presence=False, yang_name="neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__neighbor = t
if hasattr(self, '_set'):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(base=neighbor.neighbor, is_container='container', presence=False, yang_name="neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
local_as = __builtin__.property(_get_local_as, _set_local_as)
always_compare_med = __builtin__.property(_get_always_compare_med, _set_always_compare_med)
compare_med_empty_aspath = __builtin__.property(_get_compare_med_empty_aspath, _set_compare_med_empty_aspath)
med_missing_as_worst = __builtin__.property(_get_med_missing_as_worst, _set_med_missing_as_worst)
as_path_ignore = __builtin__.property(_get_as_path_ignore, _set_as_path_ignore)
compare_routerid = __builtin__.property(_get_compare_routerid, _set_compare_routerid)
install_igp_cost = __builtin__.property(_get_install_igp_cost, _set_install_igp_cost)
cluster_id = __builtin__.property(_get_cluster_id, _set_cluster_id)
default_local_preference = __builtin__.property(_get_default_local_preference, _set_default_local_preference)
distance = __builtin__.property(_get_distance, _set_distance)
capability = __builtin__.property(_get_capability, _set_capability)
maxas_limit = __builtin__.property(_get_maxas_limit, _set_maxas_limit)
enforce_first_as = __builtin__.property(_get_enforce_first_as, _set_enforce_first_as)
fast_external_fallover = __builtin__.property(_get_fast_external_fallover, _set_fast_external_fallover)
timers = __builtin__.property(_get_timers, _set_timers)
log_dampening_debug = __builtin__.property(_get_log_dampening_debug, _set_log_dampening_debug)
confederation = __builtin__.property(_get_confederation, _set_confederation)
bfd = __builtin__.property(_get_bfd, _set_bfd)
neighbor = __builtin__.property(_get_neighbor, _set_neighbor)
_pyangbind_elements = {'local_as': local_as, 'always_compare_med': always_compare_med, 'compare_med_empty_aspath': compare_med_empty_aspath, 'med_missing_as_worst': med_missing_as_worst, 'as_path_ignore': as_path_ignore, 'compare_routerid': compare_routerid, 'install_igp_cost': install_igp_cost, 'cluster_id': cluster_id, 'default_local_preference': default_local_preference, 'distance': distance, 'capability': capability, 'maxas_limit': maxas_limit, 'enforce_first_as': enforce_first_as, 'fast_external_fallover': fast_external_fallover, 'timers': timers, 'log_dampening_debug': log_dampening_debug, 'confederation': confederation, 'bfd': bfd, 'neighbor': neighbor, }
| 85.226018
| 910
| 0.740136
|
21721d6d9f54fe41aac45f4d7b2969f6c75fd76e
| 200
|
py
|
Python
|
models/account_validation.py
|
allengblack/pay-with-capture
|
6ce0036ec77b92d3c5bd201e3bdb0b3e07b53bb3
|
[
"MIT"
] | 1
|
2017-03-23T20:25:57.000Z
|
2017-03-23T20:25:57.000Z
|
models/account_validation.py
|
allengblack/pay-with-capture
|
6ce0036ec77b92d3c5bd201e3bdb0b3e07b53bb3
|
[
"MIT"
] | null | null | null |
models/account_validation.py
|
allengblack/pay-with-capture
|
6ce0036ec77b92d3c5bd201e3bdb0b3e07b53bb3
|
[
"MIT"
] | null | null | null |
"""pay_with_capture.models"""
import json
class AccountValidation():
"""default response model"""
_type = ""
data = None
def getJson(self):
return json.dumps(self.__dict__)
| 16.666667
| 40
| 0.64
|
3aecce8ec89d855deb53e57f3bbb78a3391186fc
| 3,501
|
py
|
Python
|
nltk_trainer/scripts/analyze_tagged_corpus.py
|
escherba/nltk-trainer
|
2c2964ea8948550f30e8a58e8e6d267833bcfa82
|
[
"Apache-2.0"
] | null | null | null |
nltk_trainer/scripts/analyze_tagged_corpus.py
|
escherba/nltk-trainer
|
2c2964ea8948550f30e8a58e8e6d267833bcfa82
|
[
"Apache-2.0"
] | null | null | null |
nltk_trainer/scripts/analyze_tagged_corpus.py
|
escherba/nltk-trainer
|
2c2964ea8948550f30e8a58e8e6d267833bcfa82
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import argparse
import collections
from nltk_trainer import basestring, load_corpus_reader, simplify_wsj_tag
########################################
## command options & argument parsing ##
########################################
parser = argparse.ArgumentParser(description='Analyze a part-of-speech tagged corpus',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('corpus',
help='''The name of a tagged corpus included with NLTK, such as treebank,
brown, cess_esp, floresta, or the root path to a corpus directory,
which can be either an absolute path or relative to a nltk_data directory.''')
parser.add_argument('--trace', default=1, type=int,
help='How much trace output you want, defaults to %(default)d. 0 is no trace output.')
corpus_group = parser.add_argument_group('Corpus Reader Options')
corpus_group.add_argument('--reader', default=None,
help='''Full module path to a corpus reader class, such as
nltk.corpus.reader.tagged.TaggedCorpusReader''')
corpus_group.add_argument('--fileids', default=None,
help='Specify fileids to load from corpus')
if simplify_wsj_tag:
corpus_group.add_argument('--simplify_tags', action='store_true', default=False,
help='Use simplified tags')
else:
corpus_group.add_argument('--tagset', default=None,
help='Map tags to a given tagset, such as "universal"')
sort_group = parser.add_argument_group('Tag Count Sorting Options')
sort_group.add_argument('--sort', default='tag', choices=['tag', 'count'],
help='Sort key, defaults to %(default)s')
sort_group.add_argument('--reverse', action='store_true', default=False,
help='Sort in revere order')
args = parser.parse_args()
###################
## corpus reader ##
###################
tagged_corpus = load_corpus_reader(args.corpus, reader=args.reader, fileids=args.fileids)
if not tagged_corpus:
raise ValueError('%s is an unknown corpus')
if args.trace:
print('loading %s' % args.corpus)
##############
## counting ##
##############
wc = 0
tag_counts = collections.defaultdict(int)
taglen = 7
word_set = set()
if simplify_wsj_tag and args.simplify_tags and args.corpus not in ['conll2000', 'switchboard']:
kwargs = {'simplify_tags': True}
elif not simplify_wsj_tag and args.tagset:
kwargs = {'tagset': args.tagset}
else:
kwargs = {}
for word, tag in tagged_corpus.tagged_words(fileids=args.fileids, **kwargs):
if not tag:
continue
if len(tag) > taglen:
taglen = len(tag)
if args.corpus in ['conll2000', 'switchboard'] and simplify_wsj_tag and args.simplify_tags:
tag = simplify_wsj_tag(tag)
wc += 1
# loading corpora/treebank/tagged with ChunkedCorpusReader produces None tags
if not isinstance(tag, basestring): tag = str(tag)
tag_counts[tag] += 1
word_set.add(word)
############
## output ##
############
print('%d total words\n%d unique words\n%d tags\n' % (wc, len(word_set), len(tag_counts)))
if args.sort == 'tag':
sort_key = lambda tc: tc[0]
elif args.sort == 'count':
sort_key = lambda tc: tc[1]
else:
raise ValueError('%s is not a valid sort option' % args.sort)
sorted_tag_counts = sorted(tag_counts.items(), key=sort_key, reverse=args.reverse)
countlen = max(len(str(sorted_tag_counts[0][1])) + 2, 9)
# simple reSt table format
print(' '.join(['Tag'.center(taglen), 'Count'.center(countlen)]))
print(' '.join(['='*taglen, '='*(countlen)]))
for tag, count in sorted_tag_counts:
print(' '.join([tag.ljust(taglen), str(count).rjust(countlen)]))
print(' '.join(['='*taglen, '='*(countlen)]))
| 32.119266
| 95
| 0.696658
|
979450b99196429e21e6d024523452793f61de4e
| 28,493
|
py
|
Python
|
clients/binance-client/binance_client/client.py
|
DanielLavinV/criptovidente
|
202140a03f63404fad7107edc3f0127323b82966
|
[
"Apache-2.0"
] | null | null | null |
clients/binance-client/binance_client/client.py
|
DanielLavinV/criptovidente
|
202140a03f63404fad7107edc3f0127323b82966
|
[
"Apache-2.0"
] | null | null | null |
clients/binance-client/binance_client/client.py
|
DanielLavinV/criptovidente
|
202140a03f63404fad7107edc3f0127323b82966
|
[
"Apache-2.0"
] | null | null | null |
from requests import Request, Session, Response # noqa: F401
import json
from . import constants
from datetime import datetime as dtt
import math
from .signatures import sign
from .endpoints import endpoints_config
import logging
from typing import Optional, List
import time
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class BaseClient:
def __init__(
self, keys_file: str, client_name: str, weight_manager, test_net: bool = False
):
self.endpoints_config = endpoints_config[client_name]
with open(keys_file) as f:
keys = json.load(f)
accessor = "REAL" if not test_net else "TEST"
self._api_key = keys[accessor]["API_KEY"]
self._secret_key = keys[accessor]["SECRET_KEY"]
self._session = Session()
self._test_net = test_net
self._base_url = (
constants.BASE_ENDPOINT if not test_net else constants.BASE_TEST_ENDPOINT
)
self._weight_manager = weight_manager
def _forge_request_and_send(self, endpoint: str, params: dict) -> Request:
cfg = self.endpoints_config[endpoint]
url = self._forge_url(cfg)
if url is None: # nonexistant endpoint in the test api
return self._request_result(69, {}) # dummy http code
method = cfg["method"]
security_headers, params = self._check_security(cfg, params)
r = Request(method=method, url=url, params=params, headers=security_headers)
return self._send(r)
def _check_security(self, endpoint_config: dict, params: dict) -> dict:
security_headers = {}
security = endpoint_config["security"]
if security["requires_api_key"]:
security_headers["X-MBX-APIKEY"] = self._api_key
if security["requires_signature"]:
params = self._add_signature(params)
return security_headers, params
def _add_signature(self, total_params: dict) -> dict:
r = Request("", "http://ayy.lmao.com", data=total_params)
prep = r.prepare()
signature = sign(self._secret_key, prep.body)
total_params["signature"] = signature
return total_params
def _timestamp(self) -> int:
return int(math.floor(dtt.now().timestamp() * 1000))
def _forge_url(self, endpoint_config: dict) -> str:
path = endpoint_config["path"]
if self._test_net:
path = path.replace("v1", "v3")
if "wapi" in path or "sapi" in path:
return None # endpoints with wapi and sapi do not exist in the test api
return self._base_url + path
def _resolve_optional_arguments(self, params: dict, **kwargs) -> dict:
for arg, val in kwargs.items():
if val:
if "qty" in arg.lower() or "quantity" in arg.lower():
val = "{:.8f}".format(val)
params[arg] = val
return params
def _send(self, req: Request) -> dict:
logger.info(f"Reaching {req.url}")
try:
response = self._session.send(req.prepare())
except Exception as e:
time.sleep(2)
return self._send(req)
if self._parse_weight_response(response):
return self._send(req)
try:
result = self._request_result(response.status_code, response.json())
except ValueError as e:
logger.error(f"Error when decoding json - {e}")
result = self._request_result(response.status_code, {})
return result
def _request_result(self, http_code, content):
return {"http_code": http_code, "content": content}
def _parse_weight_response(self, response):
code = response.status_code
if code == 429 or code == 418:
logger.info(f"HTTP {code} received: {constants.HTTP_RESPONSE_CODES[code]}")
sleep_time = int(response.headers["Retry-After"])
time.sleep(sleep_time)
return True
# TODO: implement weight management
else:
weight = 0
for h in response.headers:
if "used-weight-" in h:
weight = int(response.headers[h])
break
else:
return
self._weight_manager(method="update", weight=weight)
class WalletClient(BaseClient):
def __init__(self, keys_file: str, weight_manager, test_net: bool = False):
super().__init__(
keys_file=keys_file,
client_name="wallet",
weight_manager=weight_manager,
test_net=test_net,
)
def system_status(self) -> dict:
return self._forge_request_and_send("system_status", {})
def all_coins_information(self, recv_window: int = 5000) -> dict:
return self._forge_request_and_send(
"all_coins_information",
{"recvWindow": recv_window, "timestamp": self._timestamp()},
)
def daily_account_snapshot(
self,
snapshot_type: str,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = None,
recv_window: int = 5000,
) -> dict:
limit = 5 if limit and limit < 5 else None
limit = 30 if limit and limit > 30 else None
params = {
"type": snapshot_type,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params, startTime=start_time, endTime=end_time, limit=limit
)
return self._forge_request_and_send("daily_account_snapshot", params=params)
def disable_fast_withdraw_switch(self, recv_window: int = 5000):
logger.info("Endpoint is not implemented")
def enable_fast_withdraw_switch(self, recv_window: int = 5000):
logger.info("Endpoint is not implemented")
def withdraw_sapi(
self,
coin: str,
address: str,
amount: float,
address_tag: Optional[str] = None,
transaction_fee_flag: Optional[bool] = None,
name: Optional[str] = None,
withdraw_order_id: Optional[str] = None,
network: Optional[str] = None,
recv_window: int = 5000,
) -> dict:
params = {
"coin": coin,
"address": address,
"amount": amount,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params,
addressTag=address_tag,
transactionFeeFlag=transaction_fee_flag,
name=name,
withdrawOrderId=withdraw_order_id,
network=network,
)
return self._forge_request_and_send("withdraw_sapi", params=params)
def withdraw_wapi(
self,
coin: str,
address: str,
amount: float,
address_tag: Optional[str] = None,
transaction_fee_flag: Optional[bool] = None,
name: Optional[str] = None,
withdraw_order_id: Optional[str] = None,
network: Optional[str] = None,
recv_window: int = 5000,
) -> dict:
params = {
"coin": coin,
"address": address,
"amount": amount,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params,
addressTag=address_tag,
transactionFeeFlag=transaction_fee_flag,
name=name,
withdrawOrdeId=withdraw_order_id,
network=network,
)
return self._forge_request_and_send("withdraw_wapi", params=params)
def deposit_history_sapi(
self,
coin: Optional[str] = None,
status: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
offset: Optional[int] = None,
limit: Optional[int] = None,
recv_window: int = 5000,
) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
params = self._resolve_optional_arguments(
params,
coin=coin,
status=status,
startTime=start_time,
endTime=end_time,
offset=offset,
limit=limit,
)
return self._forge_request_and_send("deposit_history_sapi", params=params)
def deposit_history_wapi(
self,
asset: Optional[str] = None,
status: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
recv_window: int = 5000,
) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
params = self._resolve_optional_arguments(
params, asset=asset, status=status, startTime=start_time, endTime=end_time
)
return self._forge_request_and_send(
endpoint="deposit_history_wapi", params=params
)
def withdraw_history_sapi(
self,
coin: Optional[str] = None,
status: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
offset: Optional[int] = None,
limit: Optional[int] = None,
recv_window: int = 5000,
) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
params = self._resolve_optional_arguments(
params,
coin=coin,
status=status,
startTime=start_time,
endTime=end_time,
offset=offset,
limit=limit,
)
return self._forge_request_and_send("withdraw_history_sapi", params=params)
def withdraw_history_wapi(
self,
asset: Optional[str] = None,
status: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
recv_window: int = 5000,
) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
params = self._resolve_optional_arguments(
params, asset=asset, status=status, startTime=start_time, endTime=end_time
)
return self._forge_request_and_send(
endpoint="withdraw_history_wapi", params=params
)
def deposit_address_sapi(
self, coin: str, network: Optional[str] = None, recv_window: int = 5000
) -> dict:
params = {
"coin": coin,
"timestamp": self._timestamp(),
"recvWindow": recv_window,
}
params = self._resolve_optional_arguments(params, network=network)
return self._forge_request_and_send(
endpoint="deposit_address_sapi", params=params
)
def deposit_address_wapi(
self, asset: str, status: Optional[bool] = None, recv_window: int = 5000
) -> dict:
params = {
"asset": asset,
"timestamp": self._timestamp(),
"recvWindow": recv_window,
}
params = self._resolve_optional_arguments(params, status=status)
return self._forge_request_and_send(
endpoint="deposit_address_wapi", params=params
)
def account_status(self, recv_window: int = 5000) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
return self._forge_request_and_send(endpoint="account_status", params=params)
def account_api_trading_status(self, recv_window: int = 5000) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
return self._forge_request_and_send(
endpoint="account_api_trading_status", params=params
)
def dustlog(self, recv_window: int = 5000):
logger.info("Endpoint is not implemented.")
def dust_transfer(self, asset: List[str], recv_window: int = 5000):
logger.info("Endpoint is not implemented.")
def asset_dividend_record(
self,
asset: Optional[str] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = None,
recv_window: int = 5000,
) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
params = self._resolve_optional_arguments(
params, asset=asset, startTime=start_time, endTime=end_time, limit=limit
)
return self._forge_request_and_send(
endpoint="asset_dividend_record", params=params
)
def asset_detail(self, recv_window: int = 5000) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
return self._forge_request_and_send(endpoint="asset_detail", params=params)
def trade_fee(self, symbol: Optional[str] = None, recv_window: int = 5000) -> dict:
params = {"timestamp": self._timestamp(), "recvWindow": recv_window}
params = self._resolve_optional_arguments(params, symbol=symbol)
return self._forge_request_and_send(endpoint="trade_fee", params=params)
def user_universal_transfer(self):
logger.info("Endpoint is not implemented.")
def query_user_universal_transfer_history(self):
logger.info("Endpoint is not implemented.")
class MarketDataClient(BaseClient):
def __init__(self, keys_file: str, weight_manager, test_net: bool = False):
super().__init__(
keys_file=keys_file,
client_name="market_data",
weight_manager=weight_manager,
test_net=test_net,
)
def test_connectivity(self) -> dict:
return self._forge_request_and_send("test_connectivity", params={})
def check_server_time(self) -> dict:
return self._forge_request_and_send("check_server_time", params={})
def exchange_information(self) -> dict:
return self._forge_request_and_send("exchange_information", params={})
def order_book(self, symbol: str, limit: Optional[int] = None) -> dict:
params = {"symbol": symbol}
params = self._resolve_optional_arguments(params, limit=limit)
return self._forge_request_and_send("order_book", params)
def recent_trades_list(self, symbol: str, limit: Optional[int] = None) -> dict:
params = {"symbol": symbol}
params = self._resolve_optional_arguments(params, limit=limit)
return self._forge_request_and_send("recent_trades_list", params)
def old_trade_lookup(
self, symbol: str, limit: Optional[int] = None, from_id: Optional[int] = None
) -> dict:
params = {"symbol": symbol}
params = self._resolve_optional_arguments(params, limit=limit, fromId=from_id)
return self._forge_request_and_send("old_trade_lookup", params)
def compressed_aggregate_trades_list(
self,
symbol: str,
from_id: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = None,
) -> dict:
params = {"symbol": symbol}
params = self._resolve_optional_arguments(
params, limit=limit, fromId=from_id, startTime=start_time, endTime=end_time
)
return self._forge_request_and_send("compressed_aggregate_trades_list", params)
def kline_candlestick_data(
self,
symbol: str,
interval: str,
start_time: Optional[int],
end_time: Optional[int],
limit: Optional[int],
) -> dict:
params = {"symbol": symbol, "interval": interval}
params = self._resolve_optional_arguments(
params, limit=limit, startTime=start_time, endTime=end_time
)
return self._forge_request_and_send("kline_candlestick_data", params)
def current_average_price(self, symbol: str) -> dict:
params = {"symbol": symbol}
return self._forge_request_and_send("current_average_price", params)
def twentyfourhour_ticker_price_change_statistics(
self, symbol: Optional[str] = None
) -> dict:
params = {}
params = self._resolve_optional_arguments(params, symbol=symbol)
return self._forge_request_and_send(
"twentyfourhour_ticker_price_change_statistics", params
)
def symbol_price_ticker(self, symbol: str) -> dict:
params = {"symbol": symbol}
return self._forge_request_and_send("symbol_price_ticker", params)
def symbol_order_book_ticker(self, symbol: str) -> dict:
params = {"symbol": symbol}
return self._forge_request_and_send("symbol_order_book_ticker", params)
class SpotAccountTradeClient(BaseClient):
def __init__(self, keys_file: str, weight_manager, test_net: bool = False):
super().__init__(
keys_file=keys_file,
client_name="spot_account_trade",
weight_manager=weight_manager,
test_net=test_net,
)
def test_new_order(
self,
symbol: str,
side: List[str],
order_type: List[str],
time_in_force: Optional[List[int]] = None,
quantity: Optional[float] = None,
quote_order_qty: Optional[float] = None,
price: Optional[float] = None,
new_client_order_id: Optional[str] = None,
stop_price: Optional[float] = None,
iceberg_qty: Optional[float] = None,
new_order_resp_type: List[str] = None,
recv_window: float = 5000,
) -> dict:
params = {
"symbol": symbol,
"side": side,
"type": order_type,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params,
timeInForce=time_in_force,
quantity=quantity,
quoteOrderQty=quote_order_qty,
price=price,
newClientOrderId=new_client_order_id,
stopPrice=stop_price,
icebergQty=iceberg_qty,
newOrderRespType=new_order_resp_type,
)
return self._forge_request_and_send("test_new_order", params)
def new_order(
self,
symbol: str,
side: List[str],
order_type: List[str],
time_in_force: Optional[List[int]] = None,
quantity: Optional[float] = None,
quote_order_qty: Optional[float] = None,
price: Optional[float] = None,
new_client_order_id: Optional[str] = None,
stop_price: Optional[float] = None,
iceberg_qty: Optional[float] = None,
new_order_resp_type: List[str] = None,
recv_window: int = 5000,
) -> dict:
params = {
"symbol": symbol,
"side": side,
"type": order_type,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params,
timeInForce=time_in_force,
quantity=quantity,
quoteOrderQty=quote_order_qty,
price=price,
newClientOrderId=new_client_order_id,
stopPrice=stop_price,
icebergQty=iceberg_qty,
newOrderRespType=new_order_resp_type,
)
return self._forge_request_and_send("new_order", params)
def cancel_order(
self,
symbol: str,
order_id: Optional[int],
orig_client_order_id: Optional[str],
new_client_order_id: Optional[str],
recv_window: int = 5000,
) -> dict:
params = {
"symbol": symbol,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params,
orderId=order_id,
origClientOrderId=orig_client_order_id,
newClientOrderId=new_client_order_id,
)
return self._forge_request_and_send("cancel_order", params)
def cancel_all_open_orders_on_symbol(
self, symbol: str, recv_window: int = 5000
) -> dict:
params = {
"symbol": symbol,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
return self._forge_request_and_send("cancel_all_open_orders_on_symbol", params)
def query_order(
self,
symbol: str,
order_id: Optional[int] = None,
orig_client_order_id: Optional[str] = None,
recv_window: int = 5000,
) -> dict:
params = {
"symbol": symbol,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params, orderId=order_id, origClientOrderId=orig_client_order_id
)
return self._forge_request_and_send("query_order", params)
def current_open_orders(self, symbol: str, recv_window: int = 5000) -> dict:
params = {
"symbol": symbol,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
return self._forge_request_and_send("current_open_orders", params)
def all_orders(
self,
symbol: str,
order_id: Optional[int],
start_time: Optional[int],
end_time: Optional[int],
limit: Optional[int],
recv_window: int = 5000,
) -> dict:
params = {
"symbol": symbol,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params,
orderId=order_id,
startTime=start_time,
endTime=end_time,
limit=limit,
)
return self._forge_request_and_send("all_orders", params)
def new_oco(
self,
side: str,
quantity: float,
price: float,
stop_price: float,
list_client_order_id: Optional[str],
limit_client_order_id: Optional[str],
limit_iceberg_qty: Optional[float],
stop_client_order_id: Optional[str],
stop_limit_price: Optional[float],
stop_iceberg_qty: Optional[float],
stop_limit_time_in_force: List[str],
new_order_resp_type: List[str],
recv_window: int = 5000,
) -> dict:
params = {
"recvWindow": recv_window,
"timestamp": self._timestamp(),
"side": side,
"quantity": quantity,
"price": price,
"stopPrice": stop_price,
}
params = self._resolve_optional_arguments(
params,
listClientOrderId=list_client_order_id,
limitClientOrderId=limit_client_order_id,
limitIcebergQty=limit_iceberg_qty,
stopClientOrderId=stop_client_order_id,
stopLimitPrice=stop_limit_price,
stopIcebergQty=stop_iceberg_qty,
stopLimitTimeInForce=stop_limit_time_in_force,
newOrderRespType=new_order_resp_type,
)
return self._forge_request_and_send("new_oco", params)
def cancel_oco(
self,
symbol: str,
order_list_id: Optional[int],
list_client_order_id: Optional[str],
new_client_order_id: Optional[str],
recv_window: int = 5000,
) -> dict:
params = {
"recvWindow": recv_window,
"timestamp": self._timestamp(),
"symbol": symbol,
}
params = self._resolve_optional_arguments(
params,
orderListId=order_list_id,
listClientOrderId=list_client_order_id,
newClientOrderId=new_client_order_id,
)
return self._forge_request_and_send("cancel_oco", params)
def query_oco(
self,
order_list_id: Optional[int],
orig_client_order_id: Optional[str],
recv_window: int = 5000,
) -> dict:
params = {"recvWindow": recv_window, "timestamp": self._timestamp()}
params = self._resolve_optional_arguments(
params, orderListId=order_list_id, origClientOrderId=orig_client_order_id
)
return self._forge_request_and_send("query_oco", params)
def query_all_oco(
self,
from_id: Optional[int],
start_time: Optional[int],
end_time: Optional[int],
limit: Optional[int],
recv_window: int = 5000,
) -> dict:
params = {"recvWindow": recv_window, "timestamp": self._timestamp()}
params = self._resolve_optional_arguments(
params, fromId=from_id, starTime=start_time, endTime=end_time, limit=limit
)
return self._forge_request_and_send("query_all_oco", params)
def query_open_oco(self, recv_window: int = 5000) -> dict:
params = {"recvWindow": recv_window, "timestamp": self._timestamp()}
return self._forge_request_and_send("query_open_oco", params)
def account_information(self, recv_window: int = 5000) -> dict:
params = {"recvWindow": recv_window, "timestamp": self._timestamp()}
return self._forge_request_and_send("account_information", params)
def account_trade_list(
self,
symbol: str,
from_id: Optional[int],
start_time: Optional[int],
end_time: Optional[int],
limit: Optional[int],
recv_window: int = 5000,
) -> dict:
params = {
"symbol": symbol,
"recvWindow": recv_window,
"timestamp": self._timestamp(),
}
params = self._resolve_optional_arguments(
params, fromId=from_id, starTime=start_time, endTime=end_time, limit=limit
)
return self._forge_request_and_send("account_trade_list", params)
class UserDataClient(BaseClient):
def __init__(self, keys_file: str, weight_manager, test_net: bool = False):
super().__init__(
keys_file=keys_file,
client_name="user_data",
weight_manager=weight_manager,
test_net=test_net,
)
def create_listen_key(self) -> dict:
return self._forge_request_and_send("create_listen_key", params={})
def ping_listen_key(self, listen_key: str) -> dict:
params = {"listenKey": listen_key}
return self._forge_request_and_send("ping_listen_key", params)
def close_listen_key(self, listen_key: str) -> dict:
params = {"listenKey": listen_key}
return self._forge_request_and_send("close_listen_key", params)
class BinanceClient(BaseClient):
def __init__(self, keys_file: str, test_net: bool = False):
logger.info("Initializing Binance Client...")
self.wallet = WalletClient(
keys_file=keys_file, weight_manager=self._weight_manager, test_net=test_net
)
self.market_data = MarketDataClient(
keys_file=keys_file, weight_manager=self._weight_manager, test_net=test_net
)
self.spot_account_trade = SpotAccountTradeClient(
keys_file=keys_file, weight_manager=self._weight_manager, test_net=test_net
)
self.user_data = UserDataClient(
keys_file=keys_file, weight_manager=self._weight_manager, test_net=test_net
)
self._request_weight_limit = 3000000 # only for initialization
self._order_limit = 0 # unused
self.update_rate_limits()
logger.info("Client is ready to go!")
def update_rate_limits(self):
res = self.market_data.exchange_information()
for limit in res["content"]["rateLimits"]:
if limit["rateLimitType"] == constants.RATE_LIMIT_TYPE_REQUEST_WEIGHT:
self._weight_manager(method="set_limit", weight=limit["limit"])
# Currently only tracks the "REQUEST_WEIGHT" limit, not "ORDERS" nor "RAW_REQUESTS"
# TODO: make sure you update weight limits at least once every 1000 requests
def _weight_manager(self, method: str, **kwargs):
if method == "set_limit":
self._request_weight_limit = kwargs["weight"]
if method == "update":
self._used_weight = kwargs["weight"]
if self._used_weight > self._request_weight_limit * 0.9:
logger.warning("Used weight reaching limit. Taking a rest...")
time.sleep(30)
logger.warning(
f"Used weight: {self._used_weight}, limit: {self._request_weight_limit}"
)
| 36.389527
| 88
| 0.61387
|
a847afb63881e3aa45e9b98140bc8afe6fffb8e4
| 1,681
|
py
|
Python
|
app/main/views.py
|
JECINTA534521/Blog-project
|
1514b92a8999bd25f7304c004747bc9260d17387
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
JECINTA534521/Blog-project
|
1514b92a8999bd25f7304c004747bc9260d17387
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
JECINTA534521/Blog-project
|
1514b92a8999bd25f7304c004747bc9260d17387
|
[
"MIT"
] | null | null | null |
from flask import render_template,url_for,request,abort,redirect
from flask_login import login_required
from . import main
from .forms import Blog,Comment
from .. import db
from ..models import Users,Blogs,Comments
@main.route('/',methods=['GET','POST'])
def index():
blog_form=Blog()
users = Users.query.filter_by(username='uname').first()
if blog_form.validate_on_submit():
blogs = Blogs(title=blog_form.title.data,description=blog_form.description.data)
db.session.add(blogs)
db.session.commit()
return redirect(url_for('main.index'))
def delete_blog(id):
Blogs.delete_blog(id)
blogs=Blogs.query.all()
comments=Comments.query.filter_by(comment='comment').first()
title='Home-The Blogger'
message='The Blogger'
return render_template('index.html',message=message,title=title,users=users,blogs=blogs,comments=comments,blog_form=blog_form,delete_blog=delete_blog)
@main.route('/user/<uname>', methods=['GET','POST'])
@login_required
def profile(uname):
users=Users.query.filter_by(username=uname).first()
blogs=Blogs.query.filter_by(user_id=users.id)
if users is None:
abort(404)
title=f'{Users.username}-The Blogger'
return render_template('profile/profile.html',title=title,users=users,blogs=blogs)
@main.route('/comments/<uname>', methods=['GET','POST'])
@login_required
def comments(uname):
form=Comment()
if form.validate_on_submit():
comments = Comments(comment=form.comment.data)
db.session.add(comments)
db.session.commit()
return redirect(url_for('main.index'))
return render_template('comment.html',form=form)
| 35.765957
| 154
| 0.710291
|
8592dee179b06aec2418bf2dd832fded98294014
| 1,457
|
py
|
Python
|
synapse/__init__.py
|
ThiefMaster/synapse
|
f2af3e4fc550e7e93be1b0f425c3e9c484b96293
|
[
"Apache-2.0"
] | 1
|
2020-07-21T17:51:02.000Z
|
2020-07-21T17:51:02.000Z
|
synapse/__init__.py
|
mjvaldez/synapse
|
de119063f248981510e961e83f1515a3add19a21
|
[
"Apache-2.0"
] | null | null | null |
synapse/__init__.py
|
mjvaldez/synapse
|
de119063f248981510e961e83f1515a3add19a21
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-9 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is a reference implementation of a Matrix homeserver.
"""
import os
import sys
# Check that we're not running on an unsupported Python version.
if sys.version_info < (3, 5):
print("Synapse requires Python 3.5 or above.")
sys.exit(1)
try:
from twisted.internet import protocol
from twisted.internet.protocol import Factory
from twisted.names.dns import DNSDatagramProtocol
protocol.Factory.noisy = False
Factory.noisy = False
DNSDatagramProtocol.noisy = False
except ImportError:
pass
__version__ = "1.17.0"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when
# running the packaging tox test.
from synapse.util.patch_inline_callbacks import do_patch
do_patch()
| 31
| 74
| 0.741249
|
d7e70dae3bbd033490d8a8a568df72bda8ff71ec
| 915
|
py
|
Python
|
src/Modules/Communication/twitter.py
|
bopopescu/PyHouse_1
|
6444ed0b4c38ab59b9e419e4d54d65d598e6a54e
|
[
"MIT"
] | 1
|
2016-09-21T19:30:21.000Z
|
2016-09-21T19:30:21.000Z
|
src/Modules/Communication/twitter.py
|
bopopescu/PyHouse_1
|
6444ed0b4c38ab59b9e419e4d54d65d598e6a54e
|
[
"MIT"
] | null | null | null |
src/Modules/Communication/twitter.py
|
bopopescu/PyHouse_1
|
6444ed0b4c38ab59b9e419e4d54d65d598e6a54e
|
[
"MIT"
] | 1
|
2020-07-23T11:13:36.000Z
|
2020-07-23T11:13:36.000Z
|
"""
-*- test-case-name: PyHouse.src.Modules.Communications.test.test_twitter -*-
@name: PyHouse/src/Modules/Communication/twitter.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2016-2016 by D. Brian Kimmel
@license: MIT License
@note: Created on May 27, 2016
@summary: Allow PyHouse to send tweets.
"""
# Import system type stuff
# Import PyMh files
from Modules.Computer import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.Twitter ')
class API(object):
def __init__(self, p_pyhouse_obj):
self.m_pyhouse_obj = p_pyhouse_obj
def LoadXml(self, p_pyhouse_obj):
# p_pyhouse_obj.Computer.Communication = Utility().read_xml(p_pyhouse_obj)
pass
def Start(self):
pass
def SaveXml(self, p_xml):
LOG.info("Saved XML.")
return p_xml
def Stop(self):
pass
# ## END DBK
| 20.795455
| 82
| 0.664481
|
51fdbb16027ebe39e3576f6b7700c73f59196969
| 1,217
|
py
|
Python
|
hard-gists/8444323/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 21
|
2019-07-08T08:26:45.000Z
|
2022-01-24T23:53:25.000Z
|
hard-gists/8444323/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 5
|
2019-06-15T14:47:47.000Z
|
2022-02-26T05:02:56.000Z
|
hard-gists/8444323/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 17
|
2019-05-16T03:50:34.000Z
|
2021-01-14T14:35:12.000Z
|
#!/usr/bin/python
import random
from struct import pack
from struct import unpack
from scipy import linalg
def Str2matrix(s):
#convert string to 4x4 matrix
return [map(lambda x : ord(x), list(s[i:i+4])) for i in xrange(0, len(s), 4)]
def Matrix2str(m):
#convert matrix to string
return ''.join(map(lambda x : ''.join(map(lambda y : pack('!H', y), x)), m))
def mMatrix2str(m):
return ''.join(map(lambda x : ''.join(map(lambda y : pack('!B', y), x)), m))
def Generate(password):
#generate key matrix
random.seed(password)
return [[random.randint(0,64) for i in xrange(4)] for j in xrange(4)]
def Multiply(A,B):
#multiply two 4x4 matrix
C = [[0 for i in xrange(4)] for j in xrange(4)]
for i in xrange(4):
for j in xrange(4):
for k in xrange(4):
C[i][j] += (A[i][k] * B[k][j])
return C
def Encrypt(fname):
#encrypt file
key = Generate('')
data = open(fname, 'rb').read()
length = pack('!I', len(data))
while len(data) % 16 != 0:
data += '\x00'
out = open(fname + '.out', 'wb')
out.write(length)
for i in xrange(0, len(data), 16):
print Str2matrix(data[i:i+16])
cipher = Multiply(Str2matrix(data[i:i+16]), key)
out.write(Matrix2str(cipher))
out.close()
Encrypt('sample.wmv')
| 24.34
| 78
| 0.639277
|
1925c6e4e4ea7d707f5cbe53202c790ff206a4bf
| 4,426
|
py
|
Python
|
tools/metrics/histograms/update_extension_functions.py
|
nagineni/chromium-crosswalk
|
5725642f1c67d0f97e8613ec1c3e8107ab53fdf8
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 231
|
2015-01-08T09:04:44.000Z
|
2021-12-30T03:03:10.000Z
|
tools/metrics/histograms/update_extension_functions.py
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2017-02-14T21:55:58.000Z
|
2017-02-14T21:55:58.000Z
|
tools/metrics/histograms/update_extension_functions.py
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 268
|
2015-01-21T05:53:28.000Z
|
2022-03-25T22:09:01.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Updates ExtensionFunctions enum in histograms.xml file with values read from
extension_function_histogram_value.h.
If the file was pretty-printed, the updated version is pretty-printed too.
"""
import logging
import re
import sys
from xml.dom import minidom
from diffutil import PromptUserToAcceptDiff
from pretty_print import PrettyPrintNode
HISTOGRAMS_PATH = 'histograms.xml'
ENUM_NAME = 'ExtensionFunctions'
EXTENSION_FUNCTIONS_HISTOGRAM_VALUE_PATH = \
'../../../chrome/browser/extensions/extension_function_histogram_value.h'
ENUM_START_MARKER = "^enum HistogramValue {"
ENUM_END_MARKER = "^ENUM_BOUNDARY"
class UserError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
@property
def message(self):
return self.args[0]
def ExtractRegexGroup(line, regex):
m = re.match(regex, line)
if m:
return m.group(1)
else:
return None
def ReadHistogramValues(filename):
"""Returns a list of pairs (label, value) corresponding to HistogramValue.
Reads the extension_functions_histogram_value.h file, locates the
HistogramValue enum definition and returns a pair for each entry.
"""
# Read the file as a list of lines
with open(filename) as f:
content = f.readlines()
# Locate the enum definition and collect all entries in it
inside_enum = False # We haven't found the enum definition yet
result = []
for line in content:
line = line.strip()
if inside_enum:
# Exit condition: we reached last enum value
if re.match(ENUM_END_MARKER, line):
inside_enum = False
else:
# Inside enum: generate new xml entry
label = ExtractRegexGroup(line.strip(), "^([\w]+)")
if label:
result.append((label, enum_value))
enum_value += 1
else:
if re.match(ENUM_START_MARKER, line):
inside_enum = True
enum_value = 0 # Start at 'UNKNOWN'
return result
def UpdateHistogramDefinitions(histogram_values, document):
"""Sets the children of <enum name="ExtensionFunctions" ...> node in
|document| to values generated from policy ids contained in
|policy_templates|.
Args:
histogram_values: A list of pairs (label, value) defining each extension
function
document: A minidom.Document object representing parsed histogram
definitions XML file.
"""
# Find ExtensionFunctions enum.
for enum_node in document.getElementsByTagName('enum'):
if enum_node.attributes['name'].value == ENUM_NAME:
extension_functions_enum_node = enum_node
break
else:
raise UserError('No policy enum node found')
# Remove existing values.
while extension_functions_enum_node.hasChildNodes():
extension_functions_enum_node.removeChild(
extension_functions_enum_node.lastChild)
# Add a "Generated from (...)" comment
comment = ' Generated from {0} '.format(
EXTENSION_FUNCTIONS_HISTOGRAM_VALUE_PATH)
extension_functions_enum_node.appendChild(document.createComment(comment))
# Add values generated from policy templates.
for (label, value) in histogram_values:
node = document.createElement('int')
node.attributes['value'] = str(value)
node.attributes['label'] = label
extension_functions_enum_node.appendChild(node)
def Log(message):
logging.info(message)
def main():
if len(sys.argv) > 1:
print >>sys.stderr, 'No arguments expected!'
sys.stderr.write(__doc__)
sys.exit(1)
Log('Reading histogram enum definition from "%s".'
% (EXTENSION_FUNCTIONS_HISTOGRAM_VALUE_PATH))
histogram_values = ReadHistogramValues(
EXTENSION_FUNCTIONS_HISTOGRAM_VALUE_PATH)
Log('Reading existing histograms from "%s".' % (HISTOGRAMS_PATH))
with open(HISTOGRAMS_PATH, 'rb') as f:
histograms_doc = minidom.parse(f)
f.seek(0)
xml = f.read()
Log('Comparing histograms enum with new enum definition.')
UpdateHistogramDefinitions(histogram_values, histograms_doc)
Log('Writing out new histograms file.')
new_xml = PrettyPrintNode(histograms_doc)
if PromptUserToAcceptDiff(xml, new_xml, 'Is the updated version acceptable?'):
with open(HISTOGRAMS_PATH, 'wb') as f:
f.write(new_xml)
Log('Done.')
if __name__ == '__main__':
main()
| 29.704698
| 80
| 0.719611
|
5e9f46e1a451c459ceb21393911588ca14527061
| 56,406
|
py
|
Python
|
test/dialect/oracle/test_compiler.py
|
bmacphee/sqlalchemy
|
c4fb058cae423e04e7131005c55c8826abda68e2
|
[
"MIT"
] | null | null | null |
test/dialect/oracle/test_compiler.py
|
bmacphee/sqlalchemy
|
c4fb058cae423e04e7131005c55c8826abda68e2
|
[
"MIT"
] | null | null | null |
test/dialect/oracle/test_compiler.py
|
bmacphee/sqlalchemy
|
c4fb058cae423e04e7131005c55c8826abda68e2
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from sqlalchemy import and_
from sqlalchemy import bindparam
from sqlalchemy import Computed
from sqlalchemy import exc
from sqlalchemy import except_
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import Identity
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import literal
from sqlalchemy import literal_column
from sqlalchemy import MetaData
from sqlalchemy import or_
from sqlalchemy import outerjoin
from sqlalchemy import schema
from sqlalchemy import select
from sqlalchemy import Sequence
from sqlalchemy import sql
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import text
from sqlalchemy import type_coerce
from sqlalchemy import TypeDecorator
from sqlalchemy import union
from sqlalchemy.dialects.oracle import base as oracle
from sqlalchemy.dialects.oracle import cx_oracle
from sqlalchemy.engine import default
from sqlalchemy.sql import column
from sqlalchemy.sql import ddl
from sqlalchemy.sql import quoted_name
from sqlalchemy.sql import table
from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "oracle"
def test_true_false(self):
self.assert_compile(sql.false(), "0")
self.assert_compile(sql.true(), "1")
def test_owner(self):
meta = MetaData()
parent = Table(
"parent",
meta,
Column("id", Integer, primary_key=True),
Column("name", String(50)),
schema="ed",
)
child = Table(
"child",
meta,
Column("id", Integer, primary_key=True),
Column("parent_id", Integer, ForeignKey("ed.parent.id")),
schema="ed",
)
self.assert_compile(
parent.join(child),
"ed.parent JOIN ed.child ON ed.parent.id = " "ed.child.parent_id",
)
def test_subquery(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t).subquery()
s = select(s.c.col1, s.c.col2)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 "
"AS col2 FROM sometable) anon_1",
)
def test_bindparam_quote(self):
"""test that bound parameters take on quoting for reserved words,
column names quote flag enabled."""
# note: this is only in cx_oracle at the moment. not sure
# what other hypothetical oracle dialects might need
self.assert_compile(bindparam("option"), ':"option"')
self.assert_compile(bindparam("plain"), ":plain")
t = Table("s", MetaData(), Column("plain", Integer, quote=True))
self.assert_compile(
t.insert().values(plain=5),
'INSERT INTO s ("plain") VALUES (:"plain")',
)
self.assert_compile(
t.update().values(plain=5), 'UPDATE s SET "plain"=:"plain"'
)
def test_bindparam_quote_works_on_expanding(self):
self.assert_compile(
bindparam("uid", expanding=True),
"([POSTCOMPILE_uid])",
dialect=cx_oracle.dialect(),
)
def test_cte(self):
part = table(
"part", column("part"), column("sub_part"), column("quantity")
)
included_parts = (
select(part.c.sub_part, part.c.part, part.c.quantity)
.where(part.c.part == "p1")
.cte(name="included_parts", recursive=True)
.suffix_with(
"search depth first by part set ord1",
"cycle part set y_cycle to 1 default 0",
dialect="oracle",
)
)
incl_alias = included_parts.alias("pr1")
parts_alias = part.alias("p")
included_parts = included_parts.union_all(
select(
parts_alias.c.sub_part,
parts_alias.c.part,
parts_alias.c.quantity,
).where(parts_alias.c.part == incl_alias.c.sub_part)
)
q = select(
included_parts.c.sub_part,
func.sum(included_parts.c.quantity).label("total_quantity"),
).group_by(included_parts.c.sub_part)
self.assert_compile(
q,
"WITH included_parts(sub_part, part, quantity) AS "
"(SELECT part.sub_part AS sub_part, part.part AS part, "
"part.quantity AS quantity FROM part WHERE part.part = :part_1 "
"UNION ALL SELECT p.sub_part AS sub_part, p.part AS part, "
"p.quantity AS quantity FROM part p, included_parts pr1 "
"WHERE p.part = pr1.sub_part) "
"search depth first by part set ord1 cycle part set "
"y_cycle to 1 default 0 "
"SELECT included_parts.sub_part, sum(included_parts.quantity) "
"AS total_quantity FROM included_parts "
"GROUP BY included_parts.sub_part",
)
def test_limit_one(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t)
c = s.compile(dialect=oracle.OracleDialect())
assert t.c.col1 in set(c._create_result_map()["col1"][1])
s = select(t).limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, "
"anon_2.col2 AS col2, ROWNUM AS ora_rn FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 AS "
"col2 FROM sometable) anon_2 WHERE ROWNUM <= "
"[POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > "
"[POSTCOMPILE_param_2]",
checkparams={"param_1": 10, "param_2": 20},
)
c = s.compile(dialect=oracle.OracleDialect())
eq_(len(c._result_columns), 2)
assert t.c.col1 in set(c._create_result_map()["col1"][1])
def test_limit_one_firstrows(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t)
s = select(t).limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT /*+ FIRST_ROWS([POSTCOMPILE_param_1]) */ "
"anon_2.col1 AS col1, "
"anon_2.col2 AS col2, ROWNUM AS ora_rn FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 AS "
"col2 FROM sometable) anon_2 WHERE ROWNUM <= "
"[POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > "
"[POSTCOMPILE_param_2]",
checkparams={"param_1": 10, "param_2": 20},
dialect=oracle.OracleDialect(optimize_limits=True),
)
def test_limit_two(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t).limit(10).offset(20).subquery()
s2 = select(s.c.col1, s.c.col2)
self.assert_compile(
s2,
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, "
"anon_2.col2 AS col2 "
"FROM (SELECT anon_3.col1 AS col1, anon_3.col2 AS col2, "
"ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, "
"sometable.col2 AS col2 FROM sometable) anon_3 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) "
"anon_2 "
"WHERE ora_rn > [POSTCOMPILE_param_2]) anon_1",
checkparams={"param_1": 10, "param_2": 20},
)
self.assert_compile(
s2,
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, "
"anon_2.col2 AS col2 "
"FROM (SELECT anon_3.col1 AS col1, anon_3.col2 AS col2, "
"ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, "
"sometable.col2 AS col2 FROM sometable) anon_3 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) "
"anon_2 "
"WHERE ora_rn > [POSTCOMPILE_param_2]) anon_1",
)
c = s2.compile(dialect=oracle.OracleDialect())
eq_(len(c._result_columns), 2)
assert s.c.col1 in set(c._create_result_map()["col1"][1])
def test_limit_three(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t).limit(10).offset(20).order_by(t.c.col2)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, "
"anon_2.col2 AS col2, ROWNUM AS ora_rn FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 AS "
"col2 FROM sometable ORDER BY "
"sometable.col2) anon_2 WHERE ROWNUM <= "
"[POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2]",
checkparams={"param_1": 10, "param_2": 20},
)
c = s.compile(dialect=oracle.OracleDialect())
eq_(len(c._result_columns), 2)
assert t.c.col1 in set(c._create_result_map()["col1"][1])
def test_limit_four(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t).with_for_update().limit(10).order_by(t.c.col2)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 AS "
"col2 FROM sometable ORDER BY "
"sometable.col2) anon_1 WHERE ROWNUM <= [POSTCOMPILE_param_1] "
"FOR UPDATE",
checkparams={"param_1": 10},
)
def test_limit_four_firstrows(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t).with_for_update().limit(10).order_by(t.c.col2)
self.assert_compile(
s,
"SELECT /*+ FIRST_ROWS([POSTCOMPILE_param_1]) */ "
"anon_1.col1, anon_1.col2 FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 AS "
"col2 FROM sometable ORDER BY "
"sometable.col2) anon_1 WHERE ROWNUM <= [POSTCOMPILE_param_1] "
"FOR UPDATE",
checkparams={"param_1": 10},
dialect=oracle.OracleDialect(optimize_limits=True),
)
def test_limit_five(self):
t = table("sometable", column("col1"), column("col2"))
s = select(t).with_for_update().limit(10).offset(20).order_by(t.c.col2)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, "
"anon_2.col2 AS col2, ROWNUM AS ora_rn FROM (SELECT "
"sometable.col1 AS col1, sometable.col2 AS "
"col2 FROM sometable ORDER BY "
"sometable.col2) anon_2 WHERE ROWNUM <= "
"[POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2] FOR "
"UPDATE",
checkparams={"param_1": 10, "param_2": 20},
)
def test_limit_six(self):
t = table("sometable", column("col1"), column("col2"))
s = (
select(t)
.limit(10)
.offset(literal(10) + literal(20))
.order_by(t.c.col2)
)
self.assert_compile(
s,
"SELECT anon_1.col1, anon_1.col2 FROM (SELECT anon_2.col1 AS "
"col1, anon_2.col2 AS col2, ROWNUM AS ora_rn FROM "
"(SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
"FROM sometable ORDER BY sometable.col2) anon_2 WHERE "
"ROWNUM <= [POSTCOMPILE_param_1] + :param_2 + :param_3) anon_1 "
"WHERE ora_rn > :param_2 + :param_3",
checkparams={"param_1": 10, "param_2": 10, "param_3": 20},
)
def test_limit_special_quoting(self):
"""Oracle-specific test for #4730.
Even though this issue is generic, test the originally reported Oracle
use case.
"""
col = literal_column("SUM(ABC)").label("SUM(ABC)")
tbl = table("my_table")
query = select(col).select_from(tbl).order_by(col).limit(100)
self.assert_compile(
query,
'SELECT anon_1."SUM(ABC)" FROM '
'(SELECT SUM(ABC) AS "SUM(ABC)" '
"FROM my_table ORDER BY SUM(ABC)) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1]",
)
col = literal_column("SUM(ABC)").label(quoted_name("SUM(ABC)", True))
tbl = table("my_table")
query = select(col).select_from(tbl).order_by(col).limit(100)
self.assert_compile(
query,
'SELECT anon_1."SUM(ABC)" FROM '
'(SELECT SUM(ABC) AS "SUM(ABC)" '
"FROM my_table ORDER BY SUM(ABC)) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1]",
)
col = literal_column("SUM(ABC)").label("SUM(ABC)_")
tbl = table("my_table")
query = select(col).select_from(tbl).order_by(col).limit(100)
self.assert_compile(
query,
'SELECT anon_1."SUM(ABC)_" FROM '
'(SELECT SUM(ABC) AS "SUM(ABC)_" '
"FROM my_table ORDER BY SUM(ABC)) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1]",
)
col = literal_column("SUM(ABC)").label(quoted_name("SUM(ABC)_", True))
tbl = table("my_table")
query = select(col).select_from(tbl).order_by(col).limit(100)
self.assert_compile(
query,
'SELECT anon_1."SUM(ABC)_" FROM '
'(SELECT SUM(ABC) AS "SUM(ABC)_" '
"FROM my_table ORDER BY SUM(ABC)) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1]",
)
def test_for_update(self):
table1 = table(
"mytable", column("myid"), column("name"), column("description")
)
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE",
)
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
of=table1.c.myid
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 "
"FOR UPDATE OF mytable.myid",
)
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(nowait=True),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE NOWAIT",
)
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
nowait=True, of=table1.c.myid
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 "
"FOR UPDATE OF mytable.myid NOWAIT",
)
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
nowait=True, of=[table1.c.myid, table1.c.name]
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF "
"mytable.myid, mytable.name NOWAIT",
)
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
skip_locked=True, of=[table1.c.myid, table1.c.name]
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF "
"mytable.myid, mytable.name SKIP LOCKED",
)
# key_share has no effect
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(key_share=True),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE",
)
# read has no effect
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
read=True, key_share=True
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE",
)
ta = table1.alias()
self.assert_compile(
ta.select(ta.c.myid == 7).with_for_update(
of=[ta.c.myid, ta.c.name]
),
"SELECT mytable_1.myid, mytable_1.name, mytable_1.description "
"FROM mytable mytable_1 "
"WHERE mytable_1.myid = :myid_1 FOR UPDATE OF "
"mytable_1.myid, mytable_1.name",
)
# ensure of=text() for of works
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
read=True, of=text("table1")
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF table1",
)
# ensure of=literal_column() for of works
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(
read=True, of=literal_column("table1")
),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = :myid_1 FOR UPDATE OF table1",
)
def test_for_update_of_w_limit_adaption_col_present(self):
table1 = table("mytable", column("myid"), column("name"))
self.assert_compile(
select(table1.c.myid, table1.c.name)
.where(table1.c.myid == 7)
.with_for_update(nowait=True, of=table1.c.name)
.limit(10),
"SELECT anon_1.myid, anon_1.name FROM "
"(SELECT mytable.myid AS myid, mytable.name AS name "
"FROM mytable WHERE mytable.myid = :myid_1) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] "
"FOR UPDATE OF anon_1.name NOWAIT",
checkparams={"param_1": 10, "myid_1": 7},
)
def test_for_update_of_w_limit_adaption_col_unpresent(self):
table1 = table("mytable", column("myid"), column("name"))
self.assert_compile(
select(table1.c.myid)
.where(table1.c.myid == 7)
.with_for_update(nowait=True, of=table1.c.name)
.limit(10),
"SELECT anon_1.myid FROM "
"(SELECT mytable.myid AS myid, mytable.name AS name "
"FROM mytable WHERE mytable.myid = :myid_1) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] "
"FOR UPDATE OF anon_1.name NOWAIT",
)
def test_for_update_of_w_limit_offset_adaption_col_present(self):
table1 = table("mytable", column("myid"), column("name"))
self.assert_compile(
select(table1.c.myid, table1.c.name)
.where(table1.c.myid == 7)
.with_for_update(nowait=True, of=table1.c.name)
.limit(10)
.offset(50),
"SELECT anon_1.myid, anon_1.name FROM "
"(SELECT anon_2.myid AS myid, anon_2.name AS name, "
"ROWNUM AS ora_rn "
"FROM (SELECT mytable.myid AS myid, mytable.name AS name "
"FROM mytable WHERE mytable.myid = :myid_1) anon_2 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) "
"anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2] "
"FOR UPDATE OF anon_1.name NOWAIT",
checkparams={"param_1": 10, "param_2": 50, "myid_1": 7},
)
def test_for_update_of_w_limit_offset_adaption_col_unpresent(self):
table1 = table("mytable", column("myid"), column("name"))
self.assert_compile(
select(table1.c.myid)
.where(table1.c.myid == 7)
.with_for_update(nowait=True, of=table1.c.name)
.limit(10)
.offset(50),
"SELECT anon_1.myid FROM (SELECT anon_2.myid AS myid, "
"ROWNUM AS ora_rn, anon_2.name AS name "
"FROM (SELECT mytable.myid AS myid, mytable.name AS name "
"FROM mytable WHERE mytable.myid = :myid_1) anon_2 "
"WHERE "
"ROWNUM <= [POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2] "
"FOR UPDATE OF anon_1.name NOWAIT",
checkparams={"param_1": 10, "param_2": 50, "myid_1": 7},
)
def test_for_update_of_w_limit_offset_adaption_partial_col_unpresent(self):
table1 = table("mytable", column("myid"), column("foo"), column("bar"))
self.assert_compile(
select(table1.c.myid, table1.c.bar)
.where(table1.c.myid == 7)
.with_for_update(nowait=True, of=[table1.c.foo, table1.c.bar])
.limit(10)
.offset(50),
"SELECT anon_1.myid, anon_1.bar FROM (SELECT anon_2.myid AS myid, "
"anon_2.bar AS bar, ROWNUM AS ora_rn, "
"anon_2.foo AS foo FROM (SELECT mytable.myid AS myid, "
"mytable.bar AS bar, "
"mytable.foo AS foo FROM mytable "
"WHERE mytable.myid = :myid_1) anon_2 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) "
"anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2] "
"FOR UPDATE OF anon_1.foo, anon_1.bar NOWAIT",
checkparams={"param_1": 10, "param_2": 50, "myid_1": 7},
)
def test_limit_preserves_typing_information(self):
class MyType(TypeDecorator):
impl = Integer
stmt = select(type_coerce(column("x"), MyType).label("foo")).limit(1)
dialect = oracle.dialect()
compiled = stmt.compile(dialect=dialect)
assert isinstance(compiled._create_result_map()["foo"][-2], MyType)
def test_use_binds_for_limits_disabled_one(self):
t = table("sometable", column("col1"), column("col2"))
with testing.expect_deprecated(
"The ``use_binds_for_limits`` Oracle dialect parameter is "
"deprecated."
):
dialect = oracle.OracleDialect(use_binds_for_limits=False)
self.assert_compile(
select(t).limit(10),
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT sometable.col1 AS col1, "
"sometable.col2 AS col2 FROM sometable) anon_1 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1]",
dialect=dialect,
)
def test_use_binds_for_limits_disabled_two(self):
t = table("sometable", column("col1"), column("col2"))
with testing.expect_deprecated(
"The ``use_binds_for_limits`` Oracle dialect parameter is "
"deprecated."
):
dialect = oracle.OracleDialect(use_binds_for_limits=False)
self.assert_compile(
select(t).offset(10),
"SELECT anon_1.col1, anon_1.col2 FROM (SELECT "
"anon_2.col1 AS col1, anon_2.col2 AS col2, ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
"FROM sometable) anon_2) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_1]",
dialect=dialect,
)
def test_use_binds_for_limits_disabled_three(self):
t = table("sometable", column("col1"), column("col2"))
with testing.expect_deprecated(
"The ``use_binds_for_limits`` Oracle dialect parameter is "
"deprecated."
):
dialect = oracle.OracleDialect(use_binds_for_limits=False)
self.assert_compile(
select(t).limit(10).offset(10),
"SELECT anon_1.col1, anon_1.col2 FROM (SELECT "
"anon_2.col1 AS col1, anon_2.col2 AS col2, ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
"FROM sometable) anon_2 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + "
"[POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2]",
dialect=dialect,
)
def test_use_binds_for_limits_enabled_one(self):
t = table("sometable", column("col1"), column("col2"))
with testing.expect_deprecated(
"The ``use_binds_for_limits`` Oracle dialect parameter is "
"deprecated."
):
dialect = oracle.OracleDialect(use_binds_for_limits=True)
self.assert_compile(
select(t).limit(10),
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT sometable.col1 AS col1, "
"sometable.col2 AS col2 FROM sometable) anon_1 WHERE ROWNUM "
"<= [POSTCOMPILE_param_1]",
dialect=dialect,
)
def test_use_binds_for_limits_enabled_two(self):
t = table("sometable", column("col1"), column("col2"))
with testing.expect_deprecated(
"The ``use_binds_for_limits`` Oracle dialect parameter is "
"deprecated."
):
dialect = oracle.OracleDialect(use_binds_for_limits=True)
self.assert_compile(
select(t).offset(10),
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, anon_2.col2 AS col2, "
"ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
"FROM sometable) anon_2) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_1]",
dialect=dialect,
)
def test_use_binds_for_limits_enabled_three(self):
t = table("sometable", column("col1"), column("col2"))
with testing.expect_deprecated(
"The ``use_binds_for_limits`` Oracle dialect parameter is "
"deprecated."
):
dialect = oracle.OracleDialect(use_binds_for_limits=True)
self.assert_compile(
select(t).limit(10).offset(10),
"SELECT anon_1.col1, anon_1.col2 FROM "
"(SELECT anon_2.col1 AS col1, anon_2.col2 AS col2, "
"ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
"FROM sometable) anon_2 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + "
"[POSTCOMPILE_param_2]) anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2]",
dialect=dialect,
checkparams={"param_1": 10, "param_2": 10},
)
def test_long_labels_legacy_ident_length(self):
dialect = default.DefaultDialect()
dialect.max_identifier_length = 30
ora_dialect = oracle.dialect(max_identifier_length=30)
m = MetaData()
a_table = Table(
"thirty_characters_table_xxxxxx",
m,
Column("id", Integer, primary_key=True),
)
other_table = Table(
"other_thirty_characters_table_",
m,
Column("id", Integer, primary_key=True),
Column(
"thirty_characters_table_id",
Integer,
ForeignKey("thirty_characters_table_xxxxxx.id"),
primary_key=True,
),
)
anon = a_table.alias()
self.assert_compile(
select(other_table, anon)
.select_from(other_table.outerjoin(anon))
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
"SELECT other_thirty_characters_table_.id "
"AS other_thirty_characters__1, "
"other_thirty_characters_table_.thirty_char"
"acters_table_id AS other_thirty_characters"
"__2, thirty_characters_table__1.id AS "
"thirty_characters_table__3 FROM "
"other_thirty_characters_table_ LEFT OUTER "
"JOIN thirty_characters_table_xxxxxx AS "
"thirty_characters_table__1 ON "
"thirty_characters_table__1.id = "
"other_thirty_characters_table_.thirty_char"
"acters_table_id",
dialect=dialect,
)
self.assert_compile(
select(other_table, anon)
.select_from(other_table.outerjoin(anon))
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
"SELECT other_thirty_characters_table_.id "
"AS other_thirty_characters__1, "
"other_thirty_characters_table_.thirty_char"
"acters_table_id AS other_thirty_characters"
"__2, thirty_characters_table__1.id AS "
"thirty_characters_table__3 FROM "
"other_thirty_characters_table_ LEFT OUTER "
"JOIN thirty_characters_table_xxxxxx "
"thirty_characters_table__1 ON "
"thirty_characters_table__1.id = "
"other_thirty_characters_table_.thirty_char"
"acters_table_id",
dialect=ora_dialect,
)
def _test_outer_join_fixture(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String),
column("description", String),
)
table2 = table(
"myothertable",
column("otherid", Integer),
column("othername", String),
)
table3 = table(
"thirdtable",
column("userid", Integer),
column("otherstuff", String),
)
return table1, table2, table3
def test_outer_join_one(self):
table1, table2, table3 = self._test_outer_join_fixture()
query = (
select(table1, table2)
.where(
or_(
table1.c.name == "fred",
table1.c.myid == 10,
table2.c.othername != "jack",
text("EXISTS (select yay from foo where boo = lar)"),
)
)
.select_from(
outerjoin(table1, table2, table1.c.myid == table2.c.otherid)
)
)
self.assert_compile(
query,
"SELECT mytable.myid, mytable.name, "
"mytable.description, myothertable.otherid,"
" myothertable.othername FROM mytable, "
"myothertable WHERE (mytable.name = "
":name_1 OR mytable.myid = :myid_1 OR "
"myothertable.othername != :othername_1 OR "
"EXISTS (select yay from foo where boo = "
"lar)) AND mytable.myid = "
"myothertable.otherid(+)",
dialect=oracle.OracleDialect(use_ansi=False),
)
def test_outer_join_two(self):
table1, table2, table3 = self._test_outer_join_fixture()
query = table1.outerjoin(
table2, table1.c.myid == table2.c.otherid
).outerjoin(table3, table3.c.userid == table2.c.otherid)
self.assert_compile(
query.select(),
"SELECT mytable.myid, mytable.name, "
"mytable.description, myothertable.otherid,"
" myothertable.othername, "
"thirdtable.userid, thirdtable.otherstuff "
"FROM mytable LEFT OUTER JOIN myothertable "
"ON mytable.myid = myothertable.otherid "
"LEFT OUTER JOIN thirdtable ON "
"thirdtable.userid = myothertable.otherid",
)
def test_outer_join_three(self):
table1, table2, table3 = self._test_outer_join_fixture()
query = table1.outerjoin(
table2, table1.c.myid == table2.c.otherid
).outerjoin(table3, table3.c.userid == table2.c.otherid)
self.assert_compile(
query.select(),
"SELECT mytable.myid, mytable.name, "
"mytable.description, myothertable.otherid,"
" myothertable.othername, "
"thirdtable.userid, thirdtable.otherstuff "
"FROM mytable, myothertable, thirdtable "
"WHERE thirdtable.userid(+) = "
"myothertable.otherid AND mytable.myid = "
"myothertable.otherid(+)",
dialect=oracle.dialect(use_ansi=False),
)
def test_outer_join_four(self):
table1, table2, table3 = self._test_outer_join_fixture()
query = table1.join(table2, table1.c.myid == table2.c.otherid).join(
table3, table3.c.userid == table2.c.otherid
)
self.assert_compile(
query.select(),
"SELECT mytable.myid, mytable.name, "
"mytable.description, myothertable.otherid,"
" myothertable.othername, "
"thirdtable.userid, thirdtable.otherstuff "
"FROM mytable, myothertable, thirdtable "
"WHERE thirdtable.userid = "
"myothertable.otherid AND mytable.myid = "
"myothertable.otherid",
dialect=oracle.dialect(use_ansi=False),
)
def test_outer_join_five(self):
table1, table2, table3 = self._test_outer_join_fixture()
query = table1.join(
table2, table1.c.myid == table2.c.otherid
).outerjoin(table3, table3.c.userid == table2.c.otherid)
self.assert_compile(
query.select().order_by(table1.c.name).limit(10).offset(5),
"SELECT anon_1.myid, anon_1.name, anon_1.description, "
"anon_1.otherid, "
"anon_1.othername, anon_1.userid, anon_1.otherstuff FROM "
"(SELECT anon_2.myid AS myid, anon_2.name AS name, "
"anon_2.description AS description, anon_2.otherid AS otherid, "
"anon_2.othername AS othername, anon_2.userid AS userid, "
"anon_2.otherstuff AS otherstuff, ROWNUM AS "
"ora_rn FROM (SELECT mytable.myid AS myid, "
"mytable.name AS name, mytable.description "
"AS description, myothertable.otherid AS "
"otherid, myothertable.othername AS "
"othername, thirdtable.userid AS userid, "
"thirdtable.otherstuff AS otherstuff FROM "
"mytable, myothertable, thirdtable WHERE "
"thirdtable.userid(+) = "
"myothertable.otherid AND mytable.myid = "
"myothertable.otherid ORDER BY mytable.name) anon_2 "
"WHERE ROWNUM <= [POSTCOMPILE_param_1] + [POSTCOMPILE_param_2]) "
"anon_1 "
"WHERE ora_rn > [POSTCOMPILE_param_2]",
checkparams={"param_1": 10, "param_2": 5},
dialect=oracle.dialect(use_ansi=False),
)
def test_outer_join_six(self):
table1, table2, table3 = self._test_outer_join_fixture()
subq = (
select(table1)
.select_from(
table1.outerjoin(table2, table1.c.myid == table2.c.otherid)
)
.alias()
)
q = select(table3).select_from(
table3.outerjoin(subq, table3.c.userid == subq.c.myid)
)
self.assert_compile(
q,
"SELECT thirdtable.userid, "
"thirdtable.otherstuff FROM thirdtable "
"LEFT OUTER JOIN (SELECT mytable.myid AS "
"myid, mytable.name AS name, "
"mytable.description AS description FROM "
"mytable LEFT OUTER JOIN myothertable ON "
"mytable.myid = myothertable.otherid) "
"anon_1 ON thirdtable.userid = anon_1.myid",
dialect=oracle.dialect(use_ansi=True),
)
self.assert_compile(
q,
"SELECT thirdtable.userid, "
"thirdtable.otherstuff FROM thirdtable, "
"(SELECT mytable.myid AS myid, "
"mytable.name AS name, mytable.description "
"AS description FROM mytable, myothertable "
"WHERE mytable.myid = myothertable.otherid("
"+)) anon_1 WHERE thirdtable.userid = "
"anon_1.myid(+)",
dialect=oracle.dialect(use_ansi=False),
)
def test_outer_join_seven(self):
table1, table2, table3 = self._test_outer_join_fixture()
q = select(table1.c.name).where(table1.c.name == "foo")
self.assert_compile(
q,
"SELECT mytable.name FROM mytable WHERE " "mytable.name = :name_1",
dialect=oracle.dialect(use_ansi=False),
)
def test_outer_join_eight(self):
table1, table2, table3 = self._test_outer_join_fixture()
subq = (
select(table3.c.otherstuff)
.where(table3.c.otherstuff == table1.c.name)
.label("bar")
)
q = select(table1.c.name, subq)
self.assert_compile(
q,
"SELECT mytable.name, (SELECT "
"thirdtable.otherstuff FROM thirdtable "
"WHERE thirdtable.otherstuff = "
"mytable.name) AS bar FROM mytable",
dialect=oracle.dialect(use_ansi=False),
)
def test_nonansi_plusses_everthing_in_the_condition(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String),
column("description", String),
)
table2 = table(
"myothertable",
column("otherid", Integer),
column("othername", String),
)
stmt = select(table1).select_from(
table1.outerjoin(
table2,
and_(
table1.c.myid == table2.c.otherid,
table2.c.othername > 5,
table1.c.name == "foo",
),
)
)
self.assert_compile(
stmt,
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable, myothertable WHERE mytable.myid = "
"myothertable.otherid(+) AND myothertable.othername(+) > "
":othername_1 AND mytable.name = :name_1",
dialect=oracle.dialect(use_ansi=False),
)
stmt = select(table1).select_from(
table1.outerjoin(
table2,
and_(
table1.c.myid == table2.c.otherid,
table2.c.othername == None,
table1.c.name == None,
),
)
)
self.assert_compile(
stmt,
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable, myothertable WHERE mytable.myid = "
"myothertable.otherid(+) AND myothertable.othername(+) IS NULL "
"AND mytable.name IS NULL",
dialect=oracle.dialect(use_ansi=False),
)
def test_nonansi_nested_right_join(self):
a = table("a", column("a"))
b = table("b", column("b"))
c = table("c", column("c"))
j = a.join(b.join(c, b.c.b == c.c.c), a.c.a == b.c.b)
self.assert_compile(
select(j),
"SELECT a.a, b.b, c.c FROM a, b, c "
"WHERE a.a = b.b AND b.b = c.c",
dialect=oracle.OracleDialect(use_ansi=False),
)
j = a.outerjoin(b.join(c, b.c.b == c.c.c), a.c.a == b.c.b)
self.assert_compile(
select(j),
"SELECT a.a, b.b, c.c FROM a, b, c "
"WHERE a.a = b.b(+) AND b.b = c.c",
dialect=oracle.OracleDialect(use_ansi=False),
)
j = a.join(b.outerjoin(c, b.c.b == c.c.c), a.c.a == b.c.b)
self.assert_compile(
select(j),
"SELECT a.a, b.b, c.c FROM a, b, c "
"WHERE a.a = b.b AND b.b = c.c(+)",
dialect=oracle.OracleDialect(use_ansi=False),
)
def test_alias_outer_join(self):
address_types = table("address_types", column("id"), column("name"))
addresses = table(
"addresses",
column("id"),
column("user_id"),
column("address_type_id"),
column("email_address"),
)
at_alias = address_types.alias()
s = (
select(at_alias, addresses)
.select_from(
addresses.outerjoin(
at_alias, addresses.c.address_type_id == at_alias.c.id
)
)
.where(addresses.c.user_id == 7)
.order_by(addresses.c.id, address_types.c.id)
)
self.assert_compile(
s,
"SELECT address_types_1.id, "
"address_types_1.name, addresses.id AS id_1, "
"addresses.user_id, addresses.address_type_"
"id, addresses.email_address FROM "
"addresses LEFT OUTER JOIN address_types "
"address_types_1 ON addresses.address_type_"
"id = address_types_1.id WHERE "
"addresses.user_id = :user_id_1 ORDER BY "
"addresses.id, address_types.id",
)
def test_returning_insert(self):
t1 = table("t1", column("c1"), column("c2"), column("c3"))
self.assert_compile(
t1.insert().values(c1=1).returning(t1.c.c2, t1.c.c3),
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
"t1.c2, t1.c3 INTO :ret_0, :ret_1",
)
def test_returning_insert_functional(self):
t1 = table(
"t1", column("c1"), column("c2", String()), column("c3", String())
)
fn = func.lower(t1.c.c2, type_=String())
stmt = t1.insert().values(c1=1).returning(fn, t1.c.c3)
compiled = stmt.compile(dialect=oracle.dialect())
eq_(
compiled._create_result_map(),
{
"c3": ("c3", (t1.c.c3, "c3", "c3"), t1.c.c3.type, 1),
"lower": ("lower", (fn, "lower", None), fn.type, 0),
},
)
self.assert_compile(
stmt,
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
"lower(t1.c2), t1.c3 INTO :ret_0, :ret_1",
)
def test_returning_insert_labeled(self):
t1 = table("t1", column("c1"), column("c2"), column("c3"))
self.assert_compile(
t1.insert()
.values(c1=1)
.returning(t1.c.c2.label("c2_l"), t1.c.c3.label("c3_l")),
"INSERT INTO t1 (c1) VALUES (:c1) RETURNING "
"t1.c2, t1.c3 INTO :ret_0, :ret_1",
)
def test_returning_insert_computed(self):
m = MetaData()
t1 = Table(
"t1",
m,
Column("id", Integer, primary_key=True),
Column("foo", Integer),
Column("bar", Integer, Computed("foo + 42")),
)
self.assert_compile(
t1.insert().values(id=1, foo=5).returning(t1.c.bar),
"INSERT INTO t1 (id, foo) VALUES (:id, :foo) "
"RETURNING t1.bar INTO :ret_0",
)
def test_returning_update_computed_warning(self):
m = MetaData()
t1 = Table(
"t1",
m,
Column("id", Integer, primary_key=True),
Column("foo", Integer),
Column("bar", Integer, Computed("foo + 42")),
)
with testing.expect_warnings(
"Computed columns don't work with Oracle UPDATE"
):
self.assert_compile(
t1.update().values(id=1, foo=5).returning(t1.c.bar),
"UPDATE t1 SET id=:id, foo=:foo RETURNING t1.bar INTO :ret_0",
)
def test_compound(self):
t1 = table("t1", column("c1"), column("c2"), column("c3"))
t2 = table("t2", column("c1"), column("c2"), column("c3"))
self.assert_compile(
union(t1.select(), t2.select()),
"SELECT t1.c1, t1.c2, t1.c3 FROM t1 UNION "
"SELECT t2.c1, t2.c2, t2.c3 FROM t2",
)
self.assert_compile(
except_(t1.select(), t2.select()),
"SELECT t1.c1, t1.c2, t1.c3 FROM t1 MINUS "
"SELECT t2.c1, t2.c2, t2.c3 FROM t2",
)
def test_no_paren_fns(self):
for fn, expected in [
(func.uid(), "uid"),
(func.UID(), "UID"),
(func.sysdate(), "sysdate"),
(func.row_number(), "row_number()"),
(func.rank(), "rank()"),
(func.now(), "CURRENT_TIMESTAMP"),
(func.current_timestamp(), "CURRENT_TIMESTAMP"),
(func.user(), "USER"),
]:
self.assert_compile(fn, expected)
def test_create_index_alt_schema(self):
m = MetaData()
t1 = Table("foo", m, Column("x", Integer), schema="alt_schema")
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x)),
"CREATE INDEX alt_schema.bar ON alt_schema.foo (x)",
)
def test_create_index_expr(self):
m = MetaData()
t1 = Table("foo", m, Column("x", Integer))
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x > 5)),
"CREATE INDEX bar ON foo (x > 5)",
)
def test_table_options(self):
m = MetaData()
t = Table(
"foo",
m,
Column("x", Integer),
prefixes=["GLOBAL TEMPORARY"],
oracle_on_commit="PRESERVE ROWS",
)
self.assert_compile(
schema.CreateTable(t),
"CREATE GLOBAL TEMPORARY TABLE "
"foo (x INTEGER) ON COMMIT PRESERVE ROWS",
)
def test_create_table_compress(self):
m = MetaData()
tbl1 = Table(
"testtbl1", m, Column("data", Integer), oracle_compress=True
)
tbl2 = Table(
"testtbl2", m, Column("data", Integer), oracle_compress="OLTP"
)
self.assert_compile(
schema.CreateTable(tbl1),
"CREATE TABLE testtbl1 (data INTEGER) COMPRESS",
)
self.assert_compile(
schema.CreateTable(tbl2),
"CREATE TABLE testtbl2 (data INTEGER) " "COMPRESS FOR OLTP",
)
def test_create_index_bitmap_compress(self):
m = MetaData()
tbl = Table("testtbl", m, Column("data", Integer))
idx1 = Index("idx1", tbl.c.data, oracle_compress=True)
idx2 = Index("idx2", tbl.c.data, oracle_compress=1)
idx3 = Index("idx3", tbl.c.data, oracle_bitmap=True)
self.assert_compile(
schema.CreateIndex(idx1),
"CREATE INDEX idx1 ON testtbl (data) COMPRESS",
)
self.assert_compile(
schema.CreateIndex(idx2),
"CREATE INDEX idx2 ON testtbl (data) COMPRESS 1",
)
self.assert_compile(
schema.CreateIndex(idx3),
"CREATE BITMAP INDEX idx3 ON testtbl (data)",
)
@testing.combinations(
("no_persisted", "", "ignore"),
("persisted_none", "", None),
("persisted_false", " VIRTUAL", False),
id_="iaa",
)
def test_column_computed(self, text, persisted):
m = MetaData()
kwargs = {"persisted": persisted} if persisted != "ignore" else {}
t = Table(
"t",
m,
Column("x", Integer),
Column("y", Integer, Computed("x + 2", **kwargs)),
)
self.assert_compile(
schema.CreateTable(t),
"CREATE TABLE t (x INTEGER, y INTEGER GENERATED "
"ALWAYS AS (x + 2)%s)" % text,
)
def test_column_computed_persisted_true(self):
m = MetaData()
t = Table(
"t",
m,
Column("x", Integer),
Column("y", Integer, Computed("x + 2", persisted=True)),
)
assert_raises_message(
exc.CompileError,
r".*Oracle computed columns do not support 'stored' ",
schema.CreateTable(t).compile,
dialect=oracle.dialect(),
)
def test_column_identity(self):
# all other tests are in test_identity_column.py
m = MetaData()
t = Table(
"t",
m,
Column(
"y",
Integer,
Identity(
always=True,
start=4,
increment=7,
nominvalue=True,
nomaxvalue=True,
cycle=False,
order=False,
),
),
)
self.assert_compile(
schema.CreateTable(t),
"CREATE TABLE t (y INTEGER GENERATED ALWAYS AS IDENTITY "
"(INCREMENT BY 7 START WITH 4 NOMINVALUE NOMAXVALUE "
"NOORDER NOCYCLE))",
)
def test_column_identity_no_generated(self):
m = MetaData()
t = Table("t", m, Column("y", Integer, Identity(always=None)))
self.assert_compile(
schema.CreateTable(t),
"CREATE TABLE t (y INTEGER GENERATED AS IDENTITY)",
)
@testing.combinations(
(True, True, "ALWAYS ON NULL"), # this would error when executed
(False, None, "BY DEFAULT"),
(False, False, "BY DEFAULT"),
(False, True, "BY DEFAULT ON NULL"),
)
def test_column_identity_on_null(self, always, on_null, text):
m = MetaData()
t = Table(
"t", m, Column("y", Integer, Identity(always, on_null=on_null))
)
self.assert_compile(
schema.CreateTable(t),
"CREATE TABLE t (y INTEGER GENERATED %s AS IDENTITY)" % text,
)
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
def test_basic(self):
seq = Sequence("my_seq_no_schema")
dialect = oracle.OracleDialect()
assert (
dialect.identifier_preparer.format_sequence(seq)
== "my_seq_no_schema"
)
seq = Sequence("my_seq", schema="some_schema")
assert (
dialect.identifier_preparer.format_sequence(seq)
== "some_schema.my_seq"
)
seq = Sequence("My_Seq", schema="Some_Schema")
assert (
dialect.identifier_preparer.format_sequence(seq)
== '"Some_Schema"."My_Seq"'
)
def test_compile(self):
self.assert_compile(
ddl.CreateSequence(
Sequence("my_seq", nomaxvalue=True, nominvalue=True)
),
"CREATE SEQUENCE my_seq START WITH 1 NOMINVALUE NOMAXVALUE",
dialect=oracle.OracleDialect(),
)
class RegexpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = "oracle"
def setup_test(self):
self.table = table(
"mytable", column("myid", Integer), column("name", String)
)
def test_regexp_match(self):
self.assert_compile(
self.table.c.myid.regexp_match("pattern"),
"REGEXP_LIKE(mytable.myid, :myid_1)",
checkparams={"myid_1": "pattern"},
)
def test_regexp_match_column(self):
self.assert_compile(
self.table.c.myid.regexp_match(self.table.c.name),
"REGEXP_LIKE(mytable.myid, mytable.name)",
checkparams={},
)
def test_regexp_match_str(self):
self.assert_compile(
literal("string").regexp_match(self.table.c.name),
"REGEXP_LIKE(:param_1, mytable.name)",
checkparams={"param_1": "string"},
)
def test_regexp_match_flags(self):
self.assert_compile(
self.table.c.myid.regexp_match("pattern", flags="ig"),
"REGEXP_LIKE(mytable.myid, :myid_1, :myid_2)",
checkparams={"myid_1": "pattern", "myid_2": "ig"},
)
def test_regexp_match_flags_col(self):
self.assert_compile(
self.table.c.myid.regexp_match("pattern", flags=self.table.c.name),
"REGEXP_LIKE(mytable.myid, :myid_1, mytable.name)",
checkparams={"myid_1": "pattern"},
)
def test_not_regexp_match(self):
self.assert_compile(
~self.table.c.myid.regexp_match("pattern"),
"NOT REGEXP_LIKE(mytable.myid, :myid_1)",
checkparams={"myid_1": "pattern"},
)
def test_not_regexp_match_column(self):
self.assert_compile(
~self.table.c.myid.regexp_match(self.table.c.name),
"NOT REGEXP_LIKE(mytable.myid, mytable.name)",
checkparams={},
)
def test_not_regexp_match_str(self):
self.assert_compile(
~literal("string").regexp_match(self.table.c.name),
"NOT REGEXP_LIKE(:param_1, mytable.name)",
checkparams={"param_1": "string"},
)
def test_not_regexp_match_flags_col(self):
self.assert_compile(
~self.table.c.myid.regexp_match(
"pattern", flags=self.table.c.name
),
"NOT REGEXP_LIKE(mytable.myid, :myid_1, mytable.name)",
checkparams={"myid_1": "pattern"},
)
def test_not_regexp_match_flags(self):
self.assert_compile(
~self.table.c.myid.regexp_match("pattern", flags="ig"),
"NOT REGEXP_LIKE(mytable.myid, :myid_1, :myid_2)",
checkparams={"myid_1": "pattern", "myid_2": "ig"},
)
def test_regexp_replace(self):
self.assert_compile(
self.table.c.myid.regexp_replace("pattern", "replacement"),
"REGEXP_REPLACE(mytable.myid, :myid_1, :myid_2)",
checkparams={"myid_1": "pattern", "myid_2": "replacement"},
)
def test_regexp_replace_column(self):
self.assert_compile(
self.table.c.myid.regexp_replace("pattern", self.table.c.name),
"REGEXP_REPLACE(mytable.myid, :myid_1, mytable.name)",
checkparams={"myid_1": "pattern"},
)
def test_regexp_replace_column2(self):
self.assert_compile(
self.table.c.myid.regexp_replace(self.table.c.name, "replacement"),
"REGEXP_REPLACE(mytable.myid, mytable.name, :myid_1)",
checkparams={"myid_1": "replacement"},
)
def test_regexp_replace_string(self):
self.assert_compile(
literal("string").regexp_replace("pattern", self.table.c.name),
"REGEXP_REPLACE(:param_1, :param_2, mytable.name)",
checkparams={"param_2": "pattern", "param_1": "string"},
)
def test_regexp_replace_flags(self):
self.assert_compile(
self.table.c.myid.regexp_replace(
"pattern", "replacement", flags="ig"
),
"REGEXP_REPLACE(mytable.myid, :myid_1, :myid_3, :myid_2)",
checkparams={
"myid_1": "pattern",
"myid_3": "replacement",
"myid_2": "ig",
},
)
def test_regexp_replace_flags_col(self):
self.assert_compile(
self.table.c.myid.regexp_replace(
"pattern", "replacement", flags=self.table.c.name
),
"REGEXP_REPLACE(mytable.myid, :myid_1, :myid_2, mytable.name)",
checkparams={"myid_1": "pattern", "myid_2": "replacement"},
)
class TableValuedFunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = "oracle"
def test_scalar_alias_column(self):
fn = func.scalar_strings(5)
stmt = select(fn.alias().column)
self.assert_compile(
stmt,
"SELECT COLUMN_VALUE anon_1 "
"FROM TABLE (scalar_strings(:scalar_strings_1)) anon_1",
)
def test_column_valued(self):
fn = func.scalar_strings(5)
stmt = select(fn.column_valued())
self.assert_compile(
stmt,
"SELECT COLUMN_VALUE anon_1 "
"FROM TABLE (scalar_strings(:scalar_strings_1)) anon_1",
)
def test_table_valued(self):
fn = func.three_pairs().table_valued("string1", "string2")
stmt = select(fn.c.string1, fn.c.string2)
self.assert_compile(
stmt,
"SELECT anon_1.string1, anon_1.string2 "
"FROM TABLE (three_pairs()) anon_1",
)
| 36.963303
| 79
| 0.563398
|
577124376a449f2ae8c2498615882cd6c135f508
| 6,156
|
py
|
Python
|
examples/rz_geometry/steadystate_em-static.py
|
radiasoft/mcool
|
67f853093f2ff062b4271c77f52c0b146324a046
|
[
"Apache-2.0"
] | 1
|
2017-11-03T18:03:08.000Z
|
2017-11-03T18:03:08.000Z
|
examples/rz_geometry/steadystate_em-static.py
|
radiasoft/mcool
|
67f853093f2ff062b4271c77f52c0b146324a046
|
[
"Apache-2.0"
] | 1
|
2016-04-18T18:36:10.000Z
|
2016-04-18T21:32:03.000Z
|
examples/rz_geometry/steadystate_em-static.py
|
radiasoft/mcool
|
67f853093f2ff062b4271c77f52c0b146324a046
|
[
"Apache-2.0"
] | null | null | null |
"""
Test beam self-fields with RZ solver geometry
"""
from __future__ import division
from warp_init_tools import *
import numpy as np
import os
import random
from scipy.optimize import newton
from sys import exit
import sys
import pickle
from datetime import datetime
sys.path.append('/Users/chall/research/github/rswarp/')
from rswarp.diagnostics import FieldDiagnostic
from rswarp.utilities.beam_distributions import createKV
diagDir = 'diags/xySlice/hdf5'
diagFDir = ['diags/fields/magnetic','diags/fields/electric']
def cleanupPrevious(outputDirectory = diagDir, fieldDirectory = diagFDir):
if os.path.exists(outputDirectory):
files = os.listdir(outputDirectory)
for file in files:
if file.endswith('.h5'):
os.remove(os.path.join(outputDirectory,file))
for directory in fieldDirectory:
if os.path.exists(directory):
files = os.listdir(directory)
for file in files:
if file.endswith('.h5'):
os.remove(os.path.join(directory,file))
cleanupPrevious()
#########################
### Initialize Plots ###
#########################
def setup():
pass
##########################################
### Create Beam and Set its Parameters ###
##########################################
SC = True # Controls field solve
ptcl_per_step = 80000 # number of particles to inject on each step
beam_beta = 0.56823 # v/c for 110 keV electrons
beam_current = 10e-6
beam_weight = 0.5 * beam_current / (echarge * beam_beta * clight) / ptcl_per_step
top.lrelativ = True
top.relativity = 1
beam = Species(type=Electron, name='Electron', weight=beam_weight)
if SC == False:
beam.sw = 0.0 # Turn off SC
def generateDist():
ptclTrans = createKV(
npart=ptcl_per_step,
a=0.010,
b=0.010,
emitx=4. * 1.e-6,
emity=4. * 1.e-6
)
zrand = np.random.rand(ptcl_per_step,)
zvel = np.ones_like(zrand) * beam_beta * clight
return np.column_stack((ptclTrans, zrand, zvel))
def createmybeam():
ptclArray = generateDist()
beam.addparticles(x=ptclArray[:,0],y=ptclArray[:,2],z=ptclArray[:,4],
vx=ptclArray[:,1] * ptclArray[:,5],vy=ptclArray[:,3] * ptclArray[:,5],vz=ptclArray[:,5])
derivqty() #Sets addition derived parameters (such as beam.vbeam)
################################
### 3D Simulation Parameters ###
################################
#Set cells
w3d.nx = 64
w3d.ny = 64
w3d.nz = 32
w3d.bound0 = periodic
w3d.boundnz = periodic
w3d.boundxy = neumann
#Set boundaries
w3d.xmmin = -0.16
w3d.xmmax = 0.16
w3d.ymmin = -0.16
w3d.ymmax = 0.16
w3d.zmmin = 0.0
w3d.zmmax = 1.0
# Longitudinal absorbing boundaries off to allow beam to recirculate
#top.pbound0 = 0
#top.pboundnz = 0
top.pboundxy = 0
dz = w3d.zmmax - w3d.zmmin
top.dt = dz / (10000 * 0.5 * 3e8)
top.ibpush = 2 # set type of pusher to vXB push without tan corrections
## 0:off, 1:fast, 2:accurate
##########################
### Injection Controls ###
##########################
#--- Specify injection of the particles
top.inject = 6 # 2 means space-charge limited injection, 6 is user specified
top.npinject = total_injected_particles * top.dt * clight * beam_beta / dz # Approximate number of particles injected each step
# or average number of particles in interval of a step
# will determine current if ibeam is set and beam.sw = 0
top.ainject = 0.0008 # Must be set even for user defined injection, doesn't seem to do anything
top.binject = 0.0008 # Must be set even for user defined injection, doesn't seem to do anything
# --- Other injection variables - not sure if these are important
w3d.l_inj_exact = True # if true, position and angle of injected particle are
# computed analytically rather than interpolated
w3d.l_inj_area = False # Not sure what this does
############################
### Particle Diagnostics ###
############################
diagP0 = ParticleDiagnostic( period=1, top=top, w3d=w3d,
species= { species.name : species for species in listofallspecies },
comm_world=comm_world, lparallel_output=False, write_dir = diagDir[:-4] )
installafterstep( diagP0.write )
#################################
### Generate and Run PIC Code ###
#################################
w3d.solvergeom = w3d.RZgeom
package("w3d") # package/generate Must be called after geometry is set
generate() #
if SC == True:
solverB = MagnetostaticMG()
registersolver(solverB)
solverB.mgtol = [0.01] * 3
registersolver(solverB)
solverE = MultiGrid2D()
registersolver(solverE)
installparticleloader(createmybeam) # for particleloader the call Must be between 1st and 2nd generate calls (or macroparticles double)
package("w3d") # package/generate must be called a second time, after solver set
generate() #
fieldperiod = 200
diagBF = FieldDiagnostic.MagnetostaticFields(solver=solverB, top=top, w3d=w3d, period=fieldperiod)
installafterstep(diagBF.write)
diagEF = FieldDiagnostic.ElectrostaticFields(solver=solverE, top=top, w3d=w3d, period=fieldperiod)
installafterstep(diagEF.write)
############
simulation_parameters = {
'space_charge' : SC,
'magnetostatic_solver' : SC, # Currently always running solver if SC on
'electrostatic_solver' : False, # Not being used right now
'solver_geometry' : w3d.solvergeom,
'grid_nodes' : (w3d.nx,w3d.ny,w3d.nz),
'z_boundary_condition' : (w3d.bound0, w3d.boundnz),
'xy_boundary_condition' : w3d.boundxy,
'timestep' : top.dt,
'beam_current' : beam.ibeam,
'total_particles_injected' : ptclArray.shape[0],
'run_start_time' : format(datetime.now())
}
pickle.dump(simulation_parameters, open("simulation_parameters.p", 'wb'))
#############
step(1)
diagP0.period = 100
step(1400)
| 27.72973
| 135
| 0.616147
|
2f36cf0b0400c6414f4290493575f0ceca28522e
| 1,699
|
py
|
Python
|
research/cv/faster_rcnn_dcn/src/lr_schedule.py
|
mindspore-ai/models
|
9127b128e2961fd698977e918861dadfad00a44c
|
[
"Apache-2.0"
] | 77
|
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/faster_rcnn_dcn/src/lr_schedule.py
|
mindspore-ai/models
|
9127b128e2961fd698977e918861dadfad00a44c
|
[
"Apache-2.0"
] | 3
|
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/faster_rcnn_dcn/src/lr_schedule.py
|
mindspore-ai/models
|
9127b128e2961fd698977e918861dadfad00a44c
|
[
"Apache-2.0"
] | 24
|
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""lr generator for FasterRcnn-DCN"""
import math
def linear_warmup_learning_rate(current_step, warmup_steps, base_lr, init_lr):
lr_inc = (float(base_lr) - float(init_lr)) / float(warmup_steps)
learning_rate = float(init_lr) + lr_inc * current_step
return learning_rate
def a_cosine_learning_rate(current_step, base_lr, warmup_steps, decay_steps):
base = float(current_step - warmup_steps) / float(decay_steps)
learning_rate = (1 + math.cos(base * math.pi)) / 2 * base_lr
return learning_rate
def dynamic_lr(config, steps_per_epoch):
"""dynamic learning rate generator"""
base_lr = config.base_lr
total_steps = steps_per_epoch * (config.epoch_size + 1)
warmup_steps = int(config.warmup_step)
lr = []
for i in range(total_steps):
if i < warmup_steps:
lr.append(linear_warmup_learning_rate(i, warmup_steps, base_lr, base_lr * config.warmup_ratio))
else:
lr.append(a_cosine_learning_rate(i, base_lr, warmup_steps, total_steps))
return lr
| 40.452381
| 107
| 0.699823
|
97dc7484600d7292c690e5b83f664bda56c80134
| 644
|
py
|
Python
|
eworkshop/services/views/sells_service.py
|
frankfern/eWorkshop-api
|
4c8a377e67e42c10ae1a762efca9af2c27a2a561
|
[
"MIT"
] | null | null | null |
eworkshop/services/views/sells_service.py
|
frankfern/eWorkshop-api
|
4c8a377e67e42c10ae1a762efca9af2c27a2a561
|
[
"MIT"
] | null | null | null |
eworkshop/services/views/sells_service.py
|
frankfern/eWorkshop-api
|
4c8a377e67e42c10ae1a762efca9af2c27a2a561
|
[
"MIT"
] | null | null | null |
from rest_framework import viewsets, mixins
from rest_framework.permissions import IsAuthenticated
from ..models import SellService
from ..serializers.sells_service import SellServiceSerializer
class SellServiceViewSet(mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
serializer_class = SellServiceSerializer
queryset = SellService.objects.all()
ordering_fields = '__all__'
ordering = ['created']
filterset_fields = ['created', 'modified', ]
| 32.2
| 61
| 0.678571
|
87661de03b49e2c8c7717b7e28891c6d8820781f
| 346
|
py
|
Python
|
ProjectApplication/project_core/migrations/0129_person_position_meta.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 5
|
2020-07-29T10:00:11.000Z
|
2022-02-19T11:00:34.000Z
|
ProjectApplication/project_core/migrations/0129_person_position_meta.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 471
|
2019-09-20T14:37:28.000Z
|
2022-03-25T14:16:34.000Z
|
ProjectApplication/project_core/migrations/0129_person_position_meta.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 5
|
2020-03-15T12:42:47.000Z
|
2022-02-15T18:06:52.000Z
|
# Generated by Django 3.0.5 on 2020-07-29 09:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('project_core', '0128_add_supervisor_for_project'),
]
operations = [
migrations.AlterModelOptions(
name='personposition',
options={},
),
]
| 19.222222
| 60
| 0.612717
|
52aaf1a71f15e2230e4e86a7639c6c47a109a6b6
| 24,330
|
py
|
Python
|
src/snowflake/connector/auth.py
|
fermezz/snowflake-connector-python
|
bc9616ad568b23cb8a931d2d590041f6bac1cff9
|
[
"Apache-2.0"
] | null | null | null |
src/snowflake/connector/auth.py
|
fermezz/snowflake-connector-python
|
bc9616ad568b23cb8a931d2d590041f6bac1cff9
|
[
"Apache-2.0"
] | null | null | null |
src/snowflake/connector/auth.py
|
fermezz/snowflake-connector-python
|
bc9616ad568b23cb8a931d2d590041f6bac1cff9
|
[
"Apache-2.0"
] | 1
|
2021-03-25T14:00:15.000Z
|
2021-03-25T14:00:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2021 Snowflake Computing Inc. All right reserved.
#
import codecs
import copy
import json
import logging
import tempfile
import time
import uuid
from datetime import datetime
from os import getenv, makedirs, mkdir, path, remove, removedirs, rmdir
from os.path import expanduser
from threading import Lock, Thread
from typing import Dict, Union
from .auth_keypair import AuthByKeyPair
from .auth_usrpwdmfa import AuthByUsrPwdMfa
from .compat import IS_LINUX, IS_MACOS, IS_WINDOWS, urlencode
from .constants import (
HTTP_HEADER_ACCEPT,
HTTP_HEADER_CONTENT_TYPE,
HTTP_HEADER_SERVICE_NAME,
HTTP_HEADER_USER_AGENT,
PARAMETER_CLIENT_REQUEST_MFA_TOKEN,
PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL,
)
from .description import (
COMPILER,
IMPLEMENTATION,
OPERATING_SYSTEM,
PLATFORM,
PYTHON_VERSION,
)
from .errorcode import ER_FAILED_TO_CONNECT_TO_DB
from .errors import (
BadGatewayError,
DatabaseError,
Error,
ForbiddenError,
ProgrammingError,
ServiceUnavailableError,
)
from .network import (
ACCEPT_TYPE_APPLICATION_SNOWFLAKE,
CONTENT_TYPE_APPLICATION_JSON,
ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE,
PYTHON_CONNECTOR_USER_AGENT,
ReauthenticationRequest,
)
from .options import installed_keyring, keyring
from .sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED
from .version import VERSION
logger = logging.getLogger(__name__)
# Cache directory
CACHE_ROOT_DIR = (
getenv("SF_TEMPORARY_CREDENTIAL_CACHE_DIR")
or expanduser("~")
or tempfile.gettempdir()
)
if IS_WINDOWS:
CACHE_DIR = path.join(CACHE_ROOT_DIR, "AppData", "Local", "Snowflake", "Caches")
elif IS_MACOS:
CACHE_DIR = path.join(CACHE_ROOT_DIR, "Library", "Caches", "Snowflake")
else:
CACHE_DIR = path.join(CACHE_ROOT_DIR, ".cache", "snowflake")
if not path.exists(CACHE_DIR):
try:
makedirs(CACHE_DIR, mode=0o700)
except Exception as ex:
logger.debug("cannot create a cache directory: [%s], err=[%s]", CACHE_DIR, ex)
CACHE_DIR = None
logger.debug("cache directory: %s", CACHE_DIR)
# temporary credential cache
TEMPORARY_CREDENTIAL = {}
TEMPORARY_CREDENTIAL_LOCK = Lock()
# temporary credential cache file name
TEMPORARY_CREDENTIAL_FILE = "temporary_credential.json"
TEMPORARY_CREDENTIAL_FILE = (
path.join(CACHE_DIR, TEMPORARY_CREDENTIAL_FILE) if CACHE_DIR else ""
)
# temporary credential cache lock directory name
TEMPORARY_CREDENTIAL_FILE_LOCK = TEMPORARY_CREDENTIAL_FILE + ".lck"
# keyring
KEYRING_SERVICE_NAME = "net.snowflake.temporary_token"
KEYRING_USER = "temp_token"
KEYRING_DRIVER_NAME = "SNOWFLAKE-PYTHON-DRIVER"
ID_TOKEN = "ID_TOKEN"
MFA_TOKEN = "MFATOKEN"
class Auth(object):
"""Snowflake Authenticator."""
def __init__(self, rest):
self._rest = rest
@staticmethod
def base_auth_data(
user,
account,
application,
internal_application_name,
internal_application_version,
ocsp_mode,
login_timeout,
network_timeout=None,
):
return {
"data": {
"CLIENT_APP_ID": internal_application_name,
"CLIENT_APP_VERSION": internal_application_version,
"SVN_REVISION": VERSION[3],
"ACCOUNT_NAME": account,
"LOGIN_NAME": user,
"CLIENT_ENVIRONMENT": {
"APPLICATION": application,
"OS": OPERATING_SYSTEM,
"OS_VERSION": PLATFORM,
"PYTHON_VERSION": PYTHON_VERSION,
"PYTHON_RUNTIME": IMPLEMENTATION,
"PYTHON_COMPILER": COMPILER,
"OCSP_MODE": ocsp_mode.name,
"TRACING": logger.getEffectiveLevel(),
"LOGIN_TIMEOUT": login_timeout,
"NETWORK_TIMEOUT": network_timeout,
},
},
}
def authenticate(
self,
auth_instance,
account,
user,
database=None,
schema=None,
warehouse=None,
role=None,
passcode=None,
passcode_in_password=False,
mfa_callback=None,
password_callback=None,
session_parameters=None,
timeout=120,
) -> Dict[str, Union[str, int, bool]]:
logger.debug("authenticate")
if session_parameters is None:
session_parameters = {}
request_id = str(uuid.uuid4())
headers = {
HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON,
HTTP_HEADER_ACCEPT: ACCEPT_TYPE_APPLICATION_SNOWFLAKE,
HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT,
}
if HTTP_HEADER_SERVICE_NAME in session_parameters:
headers[HTTP_HEADER_SERVICE_NAME] = session_parameters[
HTTP_HEADER_SERVICE_NAME
]
url = "/session/v1/login-request"
body_template = Auth.base_auth_data(
user,
account,
self._rest._connection.application,
self._rest._connection._internal_application_name,
self._rest._connection._internal_application_version,
self._rest._connection._ocsp_mode(),
self._rest._connection._login_timeout,
self._rest._connection._network_timeout,
)
body = copy.deepcopy(body_template)
# updating request body
logger.debug("assertion content: %s", auth_instance.assertion_content)
auth_instance.update_body(body)
logger.debug(
"account=%s, user=%s, database=%s, schema=%s, "
"warehouse=%s, role=%s, request_id=%s",
account,
user,
database,
schema,
warehouse,
role,
request_id,
)
url_parameters = {"request_id": request_id}
if database is not None:
url_parameters["databaseName"] = database
if schema is not None:
url_parameters["schemaName"] = schema
if warehouse is not None:
url_parameters["warehouse"] = warehouse
if role is not None:
url_parameters["roleName"] = role
url = url + "?" + urlencode(url_parameters)
# first auth request
if passcode_in_password:
body["data"]["EXT_AUTHN_DUO_METHOD"] = "passcode"
elif passcode:
body["data"]["EXT_AUTHN_DUO_METHOD"] = "passcode"
body["data"]["PASSCODE"] = passcode
if session_parameters:
body["data"]["SESSION_PARAMETERS"] = session_parameters
logger.debug(
"body['data']: %s",
{k: v for (k, v) in body["data"].items() if k != "PASSWORD"},
)
try:
ret = self._rest._post_request(
url,
headers,
json.dumps(body),
timeout=self._rest._connection.login_timeout,
socket_timeout=self._rest._connection.login_timeout,
)
except ForbiddenError as err:
# HTTP 403
raise err.__class__(
msg=(
"Failed to connect to DB. "
"Verify the account name is correct: {host}:{port}. "
"{message}"
).format(
host=self._rest._host, port=self._rest._port, message=str(err)
),
errno=ER_FAILED_TO_CONNECT_TO_DB,
sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED,
)
except (ServiceUnavailableError, BadGatewayError) as err:
# HTTP 502/504
raise err.__class__(
msg=(
"Failed to connect to DB. "
"Service is unavailable: {host}:{port}. "
"{message}"
).format(
host=self._rest._host, port=self._rest._port, message=str(err)
),
errno=ER_FAILED_TO_CONNECT_TO_DB,
sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED,
)
# waiting for MFA authentication
if ret["data"].get("nextAction") == "EXT_AUTHN_DUO_ALL":
body["inFlightCtx"] = ret["data"]["inFlightCtx"]
body["data"]["EXT_AUTHN_DUO_METHOD"] = "push"
self.ret = {"message": "Timeout", "data": {}}
def post_request_wrapper(self, url, headers, body):
# get the MFA response
self.ret = self._rest._post_request(
url, headers, body, timeout=self._rest._connection.login_timeout
)
# send new request to wait until MFA is approved
t = Thread(
target=post_request_wrapper, args=[self, url, headers, json.dumps(body)]
)
t.daemon = True
t.start()
if callable(mfa_callback):
c = mfa_callback()
while not self.ret or self.ret.get("message") == "Timeout":
next(c)
else:
t.join(timeout=timeout)
ret = self.ret
if ret and ret["data"].get("nextAction") == "EXT_AUTHN_SUCCESS":
body = copy.deepcopy(body_template)
body["inFlightCtx"] = ret["data"]["inFlightCtx"]
# final request to get tokens
ret = self._rest._post_request(
url,
headers,
json.dumps(body),
timeout=self._rest._connection.login_timeout,
socket_timeout=self._rest._connection.login_timeout,
)
elif not ret or not ret["data"].get("token"):
# not token is returned.
Error.errorhandler_wrapper(
self._rest._connection,
None,
DatabaseError,
{
"msg": (
"Failed to connect to DB. MFA "
"authentication failed: {"
"host}:{port}. {message}"
).format(
host=self._rest._host,
port=self._rest._port,
message=ret["message"],
),
"errno": ER_FAILED_TO_CONNECT_TO_DB,
"sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED,
},
)
return session_parameters # required for unit test
elif ret["data"].get("nextAction") == "PWD_CHANGE":
if callable(password_callback):
body = copy.deepcopy(body_template)
body["inFlightCtx"] = ret["data"]["inFlightCtx"]
body["data"]["LOGIN_NAME"] = user
body["data"]["PASSWORD"] = (
auth_instance.password
if hasattr(auth_instance, "password")
else None
)
body["data"]["CHOSEN_NEW_PASSWORD"] = password_callback()
# New Password input
ret = self._rest._post_request(
url,
headers,
json.dumps(body),
timeout=self._rest._connection.login_timeout,
socket_timeout=self._rest._connection.login_timeout,
)
logger.debug("completed authentication")
if not ret["success"]:
errno = ret.get("code", ER_FAILED_TO_CONNECT_TO_DB)
if errno == ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE:
# clear stored id_token if failed to connect because of id_token
# raise an exception for reauth without id_token
self._rest.id_token = None
delete_temporary_credential(self._rest._host, user, ID_TOKEN)
raise ReauthenticationRequest(
ProgrammingError(
msg=ret["message"],
errno=int(errno),
sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED,
)
)
if type(auth_instance) is AuthByKeyPair:
logger.debug(
"JWT Token authentication failed. "
"Token expires at: %s. "
"Current Time: %s",
str(auth_instance._jwt_token_exp),
str(datetime.utcnow()),
)
if type(auth_instance) is AuthByUsrPwdMfa:
delete_temporary_credential(self._rest._host, user, MFA_TOKEN)
Error.errorhandler_wrapper(
self._rest._connection,
None,
DatabaseError,
{
"msg": (
"Failed to connect to DB: {host}:{port}. " "{message}"
).format(
host=self._rest._host,
port=self._rest._port,
message=ret["message"],
),
"errno": ER_FAILED_TO_CONNECT_TO_DB,
"sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED,
},
)
else:
logger.debug(
"token = %s", "******" if ret["data"]["token"] is not None else "NULL"
)
logger.debug(
"master_token = %s",
"******" if ret["data"]["masterToken"] is not None else "NULL",
)
logger.debug(
"id_token = %s",
"******" if ret["data"].get("idToken") is not None else "NULL",
)
logger.debug(
"mfa_token = %s",
"******" if ret["data"].get("mfaToken") is not None else "NULL",
)
self._rest.update_tokens(
ret["data"]["token"],
ret["data"]["masterToken"],
master_validity_in_seconds=ret["data"].get("masterValidityInSeconds"),
id_token=ret["data"].get("idToken"),
mfa_token=ret["data"].get("mfaToken"),
)
self.write_temporary_credentials(
self._rest._host, user, session_parameters, ret
)
if "sessionId" in ret["data"]:
self._rest._connection._session_id = ret["data"]["sessionId"]
if "sessionInfo" in ret["data"]:
session_info = ret["data"]["sessionInfo"]
self._rest._connection._database = session_info.get("databaseName")
self._rest._connection._schema = session_info.get("schemaName")
self._rest._connection._warehouse = session_info.get("warehouseName")
self._rest._connection._role = session_info.get("roleName")
if "parameters" in ret["data"]:
session_parameters.update(
{p["name"]: p["value"] for p in ret["data"]["parameters"]}
)
self._rest._connection._update_parameters(session_parameters)
return session_parameters
def _read_temporary_credential(self, host, user, cred_type):
cred = None
if IS_MACOS or IS_WINDOWS:
if not installed_keyring:
logger.debug(
"Dependency 'keyring' is not installed, cannot cache id token. You might experience "
"multiple authentication pop ups while using ExternalBrowser Authenticator. To avoid "
"this please install keyring module using the following command : pip install "
"snowflake-connector-python[secure-local-storage]"
)
return
try:
cred = keyring.get_password(
build_temporary_credential_name(host, user, cred_type), user.upper()
)
except keyring.errors.KeyringError as ke:
logger.error(
"Could not retrieve {} from secure storage : {}".format(
cred_type, str(ke)
)
)
elif IS_LINUX:
read_temporary_credential_file()
cred = TEMPORARY_CREDENTIAL.get(host.upper(), {}).get(
build_temporary_credential_name(host, user, cred_type)
)
else:
logger.debug("OS not supported for Local Secure Storage")
return cred
def read_temporary_credentials(self, host, user, session_parameters):
if session_parameters.get(PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, False):
self._rest.id_token = self._read_temporary_credential(host, user, ID_TOKEN)
if session_parameters.get(PARAMETER_CLIENT_REQUEST_MFA_TOKEN, False):
self._rest.mfa_token = self._read_temporary_credential(
host, user, MFA_TOKEN
)
def _write_temporary_credential(self, host, user, cred_type, cred):
if not cred:
logger.debug(
"no credential is given when try to store temporary credential"
)
return
if IS_MACOS or IS_WINDOWS:
if not installed_keyring:
logger.debug(
"Dependency 'keyring' is not installed, cannot cache id token. You might experience "
"multiple authentication pop ups while using ExternalBrowser Authenticator. To avoid "
"this please install keyring module using the following command : pip install "
"snowflake-connector-python[secure-local-storage]"
)
return
try:
keyring.set_password(
build_temporary_credential_name(host, user, cred_type),
user.upper(),
cred,
)
except keyring.errors.KeyringError as ke:
logger.error("Could not store id_token to keyring, %s", str(ke))
elif IS_LINUX:
write_temporary_credential_file(
host, build_temporary_credential_name(host, user, cred_type), cred
)
else:
logger.debug("OS not supported for Local Secure Storage")
def write_temporary_credentials(self, host, user, session_parameters, response):
if self._rest._connection.consent_cache_id_token and session_parameters.get(
PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, False
):
self._write_temporary_credential(
host, user, ID_TOKEN, response["data"].get("idToken")
)
if session_parameters.get(PARAMETER_CLIENT_REQUEST_MFA_TOKEN, False):
self._write_temporary_credential(
host, user, MFA_TOKEN, response["data"].get("mfaToken")
)
return
def flush_temporary_credentials():
"""Flush temporary credentials in memory into disk. Need to hold TEMPORARY_CREDENTIAL_LOCK."""
global TEMPORARY_CREDENTIAL
global TEMPORARY_CREDENTIAL_FILE
for _ in range(10):
if lock_temporary_credential_file():
break
time.sleep(1)
else:
logger.debug(
"The lock file still persists after the maximum wait time."
"Will ignore it and write temporary credential file: %s",
TEMPORARY_CREDENTIAL_FILE,
)
try:
with open(
TEMPORARY_CREDENTIAL_FILE, "w", encoding="utf-8", errors="ignore"
) as f:
json.dump(TEMPORARY_CREDENTIAL, f)
except Exception as ex:
logger.debug(
"Failed to write a credential file: " "file=[%s], err=[%s]",
TEMPORARY_CREDENTIAL_FILE,
ex,
)
finally:
unlock_temporary_credential_file()
def write_temporary_credential_file(host, cred_name, cred):
"""Writes temporary credential file when OS is Linux."""
if not CACHE_DIR:
# no cache is enabled
return
global TEMPORARY_CREDENTIAL
global TEMPORARY_CREDENTIAL_LOCK
with TEMPORARY_CREDENTIAL_LOCK:
# update the cache
host_data = TEMPORARY_CREDENTIAL.get(host.upper(), {})
host_data[cred_name.upper()] = cred
TEMPORARY_CREDENTIAL[host.upper()] = host_data
flush_temporary_credentials()
def read_temporary_credential_file():
"""Reads temporary credential file when OS is Linux."""
if not CACHE_DIR:
# no cache is enabled
return
global TEMPORARY_CREDENTIAL
global TEMPORARY_CREDENTIAL_LOCK
global TEMPORARY_CREDENTIAL_FILE
with TEMPORARY_CREDENTIAL_LOCK:
for _ in range(10):
if lock_temporary_credential_file():
break
time.sleep(1)
else:
logger.debug(
"The lock file still persists. Will ignore and "
"write the temporary credential file: %s",
TEMPORARY_CREDENTIAL_FILE,
)
try:
with codecs.open(
TEMPORARY_CREDENTIAL_FILE, "r", encoding="utf-8", errors="ignore"
) as f:
TEMPORARY_CREDENTIAL = json.load(f)
return TEMPORARY_CREDENTIAL
except Exception as ex:
logger.debug(
"Failed to read a credential file. The file may not"
"exists: file=[%s], err=[%s]",
TEMPORARY_CREDENTIAL_FILE,
ex,
)
finally:
unlock_temporary_credential_file()
return None
def lock_temporary_credential_file():
global TEMPORARY_CREDENTIAL_FILE_LOCK
try:
mkdir(TEMPORARY_CREDENTIAL_FILE_LOCK)
return True
except OSError:
logger.debug(
"Temporary cache file lock already exists. Other "
"process may be updating the temporary "
)
return False
def unlock_temporary_credential_file():
global TEMPORARY_CREDENTIAL_FILE_LOCK
try:
rmdir(TEMPORARY_CREDENTIAL_FILE_LOCK)
return True
except OSError:
logger.debug("Temporary cache file lock no longer exists.")
return False
def delete_temporary_credential(host, user, cred_type):
if (IS_MACOS or IS_WINDOWS) and installed_keyring:
try:
keyring.delete_password(
build_temporary_credential_name(host, user, cred_type), user.upper()
)
except Exception as ex:
logger.error("Failed to delete credential in the keyring: err=[%s]", ex)
elif IS_LINUX:
temporary_credential_file_delete_password(host, user, cred_type)
def temporary_credential_file_delete_password(host, user, cred_type):
"""Remove credential from temporary credential file when OS is Linux."""
if not CACHE_DIR:
# no cache is enabled
return
global TEMPORARY_CREDENTIAL
global TEMPORARY_CREDENTIAL_LOCK
with TEMPORARY_CREDENTIAL_LOCK:
# update the cache
host_data = TEMPORARY_CREDENTIAL.get(host.upper(), {})
host_data.pop(build_temporary_credential_name(host, user, cred_type), None)
if not host_data:
TEMPORARY_CREDENTIAL.pop(host.upper(), None)
else:
TEMPORARY_CREDENTIAL[host.upper()] = host_data
flush_temporary_credentials()
def delete_temporary_credential_file():
"""Deletes temporary credential file and its lock file."""
global TEMPORARY_CREDENTIAL_FILE
try:
remove(TEMPORARY_CREDENTIAL_FILE)
except Exception as ex:
logger.debug(
"Failed to delete a credential file: " "file=[%s], err=[%s]",
TEMPORARY_CREDENTIAL_FILE,
ex,
)
try:
removedirs(TEMPORARY_CREDENTIAL_FILE_LOCK)
except Exception as ex:
logger.debug("Failed to delete credential lock file: err=[%s]", ex)
def build_temporary_credential_name(host, user, cred_type):
return "{host}:{user}:{driver}:{cred}".format(
host=host.upper(), user=user.upper(), driver=KEYRING_DRIVER_NAME, cred=cred_type
)
| 36.476762
| 106
| 0.573942
|
f6bf78736b51e03f33441bb36bfa767b00c3eff1
| 1,295
|
py
|
Python
|
modules/commands/unmonitor.py
|
FuelRats/FIDO
|
bc2e58a931e77c6e688ae5bdfe110c5e6be082e6
|
[
"BSD-3-Clause"
] | 1
|
2020-01-21T13:32:53.000Z
|
2020-01-21T13:32:53.000Z
|
modules/commands/unmonitor.py
|
FuelRats/FIDO
|
bc2e58a931e77c6e688ae5bdfe110c5e6be082e6
|
[
"BSD-3-Clause"
] | null | null | null |
modules/commands/unmonitor.py
|
FuelRats/FIDO
|
bc2e58a931e77c6e688ae5bdfe110c5e6be082e6
|
[
"BSD-3-Clause"
] | 3
|
2019-12-03T21:08:52.000Z
|
2020-10-04T18:08:04.000Z
|
from typing import List
from config import IRC
import fido
from modules.access import require_permission, Levels
from models import SessionManager
from models import monitor
import logging
log = logging.getLogger(__name__)
@require_permission(level=Levels.OP, message='DENIED!')
async def invoke(bot: fido, channel: str, sender: str, args: List[str]):
"""
Handler for the !unmonitor command
:param channel: Channel the command is invoked in
:param sender: Sender of the IRC command
:param bot: bot instance
:param args: Arguments passed to the command by the user
:return: The reply message
"""
sessionmanager = SessionManager()
session = sessionmanager.session
if len(args) == 0:
return "Usage: " + IRC.commandPrefix + "unmonitor <username>"
nick = args[0]
res = session.query(monitor.Monitor).filter(monitor.Monitor.nickname == nick).one_or_none()
if res:
await bot.message(channel, f"Stopped monitoring for {nick}.")
try:
session.query(monitor.Monitor).filter(monitor.Monitor.nickname == nick).delete()
session.commit()
log.info(f"{sender} stopped monitoring for {nick}")
except:
print("Failed to delete monitor row!")
session.rollback()
| 32.375
| 95
| 0.680309
|
54a39cae8d4d528e10ebdd2aea125c89d481da69
| 9,708
|
py
|
Python
|
lib/rucio/core/monitor.py
|
fno2010/rucio
|
47e93cfbe5887071c70de4ba815c1bbdddfac2ce
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/core/monitor.py
|
fno2010/rucio
|
47e93cfbe5887071c70de4ba815c1bbdddfac2ce
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/core/monitor.py
|
fno2010/rucio
|
47e93cfbe5887071c70de4ba815c1bbdddfac2ce
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright CERN since 2013
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Graphite counters
"""
from __future__ import division
import atexit
import logging
import os
import string
import time
from abc import abstractmethod
from datetime import datetime, timedelta
from pathlib import Path
from retrying import retry
from threading import Lock
from prometheus_client import start_http_server, Counter, Gauge, Histogram, REGISTRY, CollectorRegistry, generate_latest, values, multiprocess
from statsd import StatsClient
from rucio.common.config import config_get, config_get_bool, config_get_int
PROMETHEUS_MULTIPROC_DIR = os.environ.get('PROMETHEUS_MULTIPROC_DIR', os.environ.get('prometheus_multiproc_dir', None))
def cleanup_prometheus_files_at_exit():
if PROMETHEUS_MULTIPROC_DIR:
multiprocess.mark_process_dead(os.getpid())
class MultiprocessMutexValue(values.MultiProcessValue()):
"""
MultiprocessValue protected by mutex
Rucio usually is deployed using the apache MPM module, which means that it both uses multiple
subprocesses, and multiple threads per subprocess.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._lock = Lock()
def inc(self, amount):
with self._lock:
return super().inc(amount)
def set(self, value):
with self._lock:
return super().set(value)
def get(self):
with self._lock:
return super().get()
if PROMETHEUS_MULTIPROC_DIR:
os.makedirs(PROMETHEUS_MULTIPROC_DIR, exist_ok=True)
values.ValueClass = MultiprocessMutexValue
atexit.register(cleanup_prometheus_files_at_exit)
SERVER = config_get('monitor', 'carbon_server', raise_exception=False, default='localhost')
PORT = config_get('monitor', 'carbon_port', raise_exception=False, default=8125)
SCOPE = config_get('monitor', 'user_scope', raise_exception=False, default='rucio')
CLIENT = StatsClient(host=SERVER, port=PORT, prefix=SCOPE)
ENABLE_METRICS = config_get_bool('monitor', 'enable_metrics', raise_exception=False, default=False)
if ENABLE_METRICS:
METRICS_PORT = config_get_int('monitor', 'metrics_port', raise_exception=False, default=8080)
start_http_server(METRICS_PORT, registry=REGISTRY)
COUNTERS = {}
GAUGES = {}
TIMINGS = {}
def _cleanup_old_prometheus_files(path, file_pattern, cleanup_delay, logger):
"""cleanup behind processes which didn't finish gracefully."""
oldest_accepted_mtime = datetime.now() - timedelta(seconds=cleanup_delay)
for file in Path(path).glob(file_pattern):
if not file.is_file():
continue
file_mtime = datetime.fromtimestamp(file.stat().st_mtime)
if file_mtime < oldest_accepted_mtime:
logger(logging.INFO, 'Cleaning up prometheus db file %s', file)
try:
os.remove(file)
except FileNotFoundError:
# Probably file already removed by another concurrent process
pass
def cleanup_old_prometheus_files(logger=logging.log):
path = PROMETHEUS_MULTIPROC_DIR
if path:
_cleanup_old_prometheus_files(path, file_pattern='gauge_live*.db', cleanup_delay=timedelta(hours=1).total_seconds(), logger=logger)
_cleanup_old_prometheus_files(path, file_pattern='*.db', cleanup_delay=timedelta(days=7).total_seconds(), logger=logger)
@retry(retry_on_exception=lambda _: True,
wait_fixed=500,
stop_max_attempt_number=2)
def generate_prometheus_metrics():
cleanup_old_prometheus_files()
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
return generate_latest(registry)
class MultiMetric:
"""
Thin wrapper class allowing to record both prometheus and statsd metrics.
Inspired by the prometheus metric behavior: uses labels to parametrize metrics.
In case of statsd, metrics are formatted using str.format(**labels). The prometheus
ones using metric.labels(**labels) calls.
If the prometheus metric string is not provided, it is derived from the statsd one.
"""
def __init__(self, statsd, prom=None, documentation=None, labelnames=(), registry=None):
"""
:param statsd: a string, eventually with keyword placeholders for the str.format(**labels) call
:param prom: a string or a prometheus metric object
"""
self._registry = registry or REGISTRY
self._documentation = documentation or ''
self._statsd = statsd
if not prom:
# automatically generate a prometheus metric name
#
# remove '.{label}' from the string for each `label`
stats_without_labels = ''.join(tup[0].rstrip('.') for tup in string.Formatter().parse(self._statsd))
prom = 'rucio_{}'.format(stats_without_labels).replace('.', '_')
if isinstance(prom, str):
self._prom = self.init_prometheus_metric(prom, self._documentation, labelnames=labelnames)
else:
self._prom = prom
self._labelnames = labelnames
@abstractmethod
def init_prometheus_metric(self, name, documentation, labelnames=()):
pass
def labels(self, **labelkwargs):
return self.__class__(
prom=self._prom.labels(**labelkwargs),
statsd=self._statsd.format(**labelkwargs),
documentation=self._documentation,
labelnames=self._labelnames,
registry=self._registry,
)
class MultiCounter(MultiMetric):
def inc(self, delta=1):
self._prom.inc(delta)
CLIENT.incr(self._statsd, delta)
def init_prometheus_metric(self, name, documentation, labelnames=()):
return Counter(name, documentation, labelnames=labelnames, registry=self._registry)
class MultiGauge(MultiMetric):
def set(self, value):
self._prom.set(value)
CLIENT.gauge(self._statsd, value)
def init_prometheus_metric(self, name, documentation, labelnames=()):
return Gauge(name, documentation, labelnames=labelnames, registry=self._registry)
class MultiTiming(MultiMetric):
def observe(self, value):
self._prom.observe(value)
CLIENT.timing(self._statsd, value)
def init_prometheus_metric(self, name, documentation, labelnames=()):
return Histogram(name, documentation, labelnames=labelnames, registry=self._registry)
def record_counter(name, delta=1, labels=None):
"""
Log one or more counters by arbitrary amounts
:param name: The counter to be updated.
:param delta: The increment for the counter, by default increment by 1.
:param labels: labels used to parametrize the metric
"""
counter = COUNTERS.get(name)
if not counter:
COUNTERS[name] = counter = MultiCounter(statsd=name, labelnames=labels.keys() if labels else ())
delta = abs(delta)
if labels:
counter.labels(**labels).inc(delta)
else:
counter.inc(delta)
def record_gauge(name, value, labels=None):
"""
Log gauge information for a single stat
:param name: The name of the stat to be updated.
:param value: The value to log.
:param labels: labels used to parametrize the metric
"""
gauge = GAUGES.get(name)
if not gauge:
GAUGES[name] = gauge = MultiGauge(statsd=name, labelnames=labels.keys() if labels else ())
if labels:
gauge.labels(**labels).set(value)
else:
gauge.set(value)
def record_timer(name, time, labels=None):
"""
Log timing information for a single stat (in miliseconds)
:param name: The name of the stat to be updated.
:param time: The time to log.
:param labels: labels used to parametrize the metric
"""
timing = TIMINGS.get(name)
if not timing:
TIMINGS[name] = timing = MultiTiming(statsd=name, labelnames=labels.keys() if labels else ())
if labels:
timing.labels(**labels).observe(time)
else:
timing.observe(time)
class record_timer_block(object):
"""
A context manager for timing a block of code.
:param stats: The name of the stat or list of stats that should be updated.
Each stat can be a simple string or a tuple (string, divisor)
Usage:
with monitor.record_timer_block('test.context_timer'):
stuff1()
stuff2()
with monitor.record_timer_block(['test.context_timer', ('test.context_timer_normalised', 10)]):
stuff1()
stuff2()
"""
def __init__(self, stats, labels=None):
if not isinstance(stats, list):
stats = [stats]
self.stats = stats
self.labels = labels
def __enter__(self):
self.start = time.time()
return self
def __exit__(self, typ, value, tb):
dt = time.time() - self.start
ms = int(round(1000 * dt)) # Convert to ms.
for s in self.stats:
if isinstance(s, str):
record_timer(s, ms, labels=self.labels)
elif isinstance(s, tuple):
if s[1] != 0:
ms = ms / s[1]
record_timer(s[0], ms, labels=self.labels)
| 32.577181
| 142
| 0.68047
|
40ae4c9c131db48b0ad73f7fa9e6c637d62398a7
| 121
|
py
|
Python
|
wsgi.py
|
alberand/issmap
|
13a848df10497db764807305e87816f15b9bca50
|
[
"MIT"
] | null | null | null |
wsgi.py
|
alberand/issmap
|
13a848df10497db764807305e87816f15b9bca50
|
[
"MIT"
] | 6
|
2018-08-16T06:14:39.000Z
|
2020-08-02T16:56:10.000Z
|
wsgi.py
|
alberand/issmap
|
13a848df10497db764807305e87816f15b9bca50
|
[
"MIT"
] | null | null | null |
import os
from issmap import app
application = app
if __name__ == '__main__':
app.run(host=os.environ['ISS_HOST'])
| 15.125
| 40
| 0.710744
|
76d7d8f021579257dedd5cd67eff536874cf80dd
| 47
|
py
|
Python
|
opensourcetest/httpmodel/Common/StringOption/__init__.py
|
chineseluo/opensourcetest
|
b0d222c8b29ff8f70a740ac2b1588a437d41b761
|
[
"Apache-2.0"
] | 69
|
2020-10-20T14:25:49.000Z
|
2022-02-18T02:50:20.000Z
|
opensourcetest/uimodel/TestCases/__init__.py
|
aoozoo/opensourcetest
|
6eaff706c9397847834ef3eef7ad57d5b7f5c5a3
|
[
"Apache-2.0"
] | 6
|
2020-11-23T06:56:09.000Z
|
2022-03-16T04:33:53.000Z
|
opensourcetest/uimodel/TestCases/__init__.py
|
aoozoo/opensourcetest
|
6eaff706c9397847834ef3eef7ad57d5b7f5c5a3
|
[
"Apache-2.0"
] | 8
|
2021-02-01T03:23:20.000Z
|
2022-02-18T02:50:47.000Z
|
#!/user/bin/env python
# -*- coding: utf-8 -*-
| 15.666667
| 23
| 0.553191
|
c6c5ce05827ad52f44fbbda1790752740b94f228
| 7,825
|
py
|
Python
|
cirq/protocols/qid_shape_protocol.py
|
matpompili/Cirq
|
b9ce387a7fc1f571b3d6e903c46543c3578677cb
|
[
"Apache-2.0"
] | 3
|
2020-09-26T03:56:28.000Z
|
2020-09-27T13:21:04.000Z
|
cirq/protocols/qid_shape_protocol.py
|
matpompili/Cirq
|
b9ce387a7fc1f571b3d6e903c46543c3578677cb
|
[
"Apache-2.0"
] | 1
|
2020-08-11T15:45:17.000Z
|
2020-08-11T15:45:17.000Z
|
cirq/protocols/qid_shape_protocol.py
|
matpompili/Cirq
|
b9ce387a7fc1f571b3d6e903c46543c3578677cb
|
[
"Apache-2.0"
] | 1
|
2020-03-12T07:06:14.000Z
|
2020-03-12T07:06:14.000Z
|
# Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING, Any, Sequence, Tuple, TypeVar, Union
from typing_extensions import Protocol
from cirq import ops
from cirq._doc import document
from cirq.type_workarounds import NotImplementedType
if TYPE_CHECKING:
import cirq
# This is a special indicator value used by the methods to determine whether or
# not the caller provided a 'default' argument. It must be of type
# Tuple[int, ...] to ensure the method has the correct type signature in that
# case. It is checked for using `is`, so it won't have a false positive if the
# user provides a different (0,) value.
RaiseTypeErrorIfNotProvided = (0,) # type: Any
# Equal integers outside the range [-5, 256] aren't identically equal with `is`.
RaiseTypeErrorIfNotProvidedInt = -2**512 # type: Any
TDefault = TypeVar('TDefault')
class SupportsExplicitQidShape(Protocol):
"""A unitary, channel, mixture or other object that operates on a known
number qubits/qudits/qids, each with a specific number of quantum levels."""
@document
def _qid_shape_(self) -> Union[Tuple[int, ...], NotImplementedType]:
"""A tuple specifying the number of quantum levels of each qid this
object operates on, e.g. (2, 2, 2) for a three-qubit gate.
This method is used by the global `cirq.qid_shape` method (and by
`cirq.num_qubits` if `_num_qubits_` is not defined). If this
method is not present, or returns NotImplemented, it is assumed that the
receiving object operates on qubits. (The ability to return
NotImplemented is useful when a class cannot know if it has a shape
until runtime.)
The order of values in the tuple is always implicit with respect to the
object being called. For example, for gates the tuple must be ordered
with respect to the list of qubits that the gate is applied to. For
operations, the tuple is ordered to match the list returned by its
`qubits` attribute.
Returns:
The qid shape of this value, or NotImplemented if the shape is
unknown.
"""
class SupportsExplicitNumQubits(Protocol):
"""A unitary, channel, mixture or other object that operates on a known
number of qubits."""
@document
def _num_qubits_(self) -> Union[int, NotImplementedType]:
"""The number of qubits, qudits, or qids this object operates on.
This method is used by the global `cirq.num_qubits` method (and by
`cirq.qid_shape` if `_qid_shape_` is not defined. If this
method is not present, or returns NotImplemented, it will fallback
to using the length of `_qid_shape_`.
Returns:
An integer specifying the number of qubits, qudits or qids.
"""
def qid_shape(val: Any, default: TDefault = RaiseTypeErrorIfNotProvided
) -> Union[Tuple[int, ...], TDefault]:
"""Returns a tuple describing the number of quantum levels of each
qubit/qudit/qid `val` operates on.
Args:
val: The value to get the shape of.
default: Determines the fallback behavior when `val` doesn't have
a shape. If `default` is not set, a TypeError is raised. If
default is set to a value, that value is returned.
Returns:
If `val` has a `_qid_shape_` method and its result is not
NotImplemented, that result is returned. Otherwise, if `val` has a
`_num_qubits_` method, the shape with `num_qubits` qubits is returned
e.g. `(2,)*num_qubits`. If neither method returns a value other than
NotImplemented and a default value was specified, the default value is
returned.
Raises:
TypeError: `val` doesn't have either a `_qid_shape_` or a `_num_qubits_`
method (or they returned NotImplemented) and also no default value
was specified.
"""
getter = getattr(val, '_qid_shape_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented:
return result
# Check if val is a list of qids
if isinstance(val, Sequence) and all(isinstance(q, ops.Qid) for q in val):
return tuple(q.dimension for q in val)
# Fallback to _num_qubits_
num_getter = getattr(val, '_num_qubits_', None)
num_qubits = NotImplemented if num_getter is None else num_getter()
if num_qubits is not NotImplemented:
return (2,) * num_qubits
if default is not RaiseTypeErrorIfNotProvided:
return default
if getter is not None:
raise TypeError("object of type '{}' does have a _qid_shape_ method, "
"but it returned NotImplemented.".format(type(val)))
if num_getter is not None:
raise TypeError("object of type '{}' does have a _num_qubits_ method, "
"but it returned NotImplemented.".format(type(val)))
raise TypeError("object of type '{}' has no _num_qubits_ or _qid_shape_ "
"methods.".format(type(val)))
def num_qubits(val: Any, default: TDefault = RaiseTypeErrorIfNotProvidedInt
) -> Union[int, TDefault]:
"""Returns the number of qubits, qudits, or qids `val` operates on.
Args:
val: The value to get the number of qubits from.
default: Determines the fallback behavior when `val` doesn't have
a number of qubits. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned.
Returns:
If `val` has a `_num_qubits_` method and its result is not
NotImplemented, that result is returned. Otherwise, if `val` has a
`_qid_shape_` method, the number of qubits is computed from the length
of the shape and returned e.g. `len(shape)`. If neither method returns a
value other than NotImplemented and a default value was specified, the
default value is returned.
Raises:
TypeError: `val` doesn't have either a `_num_qubits_` or a `_qid_shape_`
method (or they returned NotImplemented) and also no default value
was specified.
"""
num_getter = getattr(val, '_num_qubits_', None)
num_qubits = NotImplemented if num_getter is None else num_getter()
if num_qubits is not NotImplemented:
return num_qubits
# Fallback to _qid_shape_
getter = getattr(val, '_qid_shape_', None)
shape = NotImplemented if getter is None else getter()
if shape is not NotImplemented:
return len(shape)
# Check if val is a list of qids
if isinstance(val, Sequence) and all(isinstance(q, ops.Qid) for q in val):
return len(val)
if default is not RaiseTypeErrorIfNotProvidedInt:
return default
if num_getter is not None:
raise TypeError("object of type '{}' does have a _num_qubits_ method, "
"but it returned NotImplemented.".format(type(val)))
if getter is not None:
raise TypeError("object of type '{}' does have a _qid_shape_ method, "
"but it returned NotImplemented.".format(type(val)))
raise TypeError("object of type '{}' has no _num_qubits_ or _qid_shape_ "
"methods.".format(type(val)))
| 42.297297
| 80
| 0.678211
|
ef622f78caedf50dc394cd57b429f75d4c64207d
| 5,526
|
py
|
Python
|
src/spouts/iter_files_mails.py
|
fedelemantuano/spamscope
|
ffbfc53b9a3503ef3041cee94c6726c8b899118d
|
[
"Apache-2.0"
] | 252
|
2016-08-29T06:40:31.000Z
|
2022-03-28T10:02:12.000Z
|
src/spouts/iter_files_mails.py
|
fedelemantuano/spamscope
|
ffbfc53b9a3503ef3041cee94c6726c8b899118d
|
[
"Apache-2.0"
] | 19
|
2016-08-30T21:58:50.000Z
|
2021-05-24T10:30:55.000Z
|
src/spouts/iter_files_mails.py
|
fedelemantuano/spamscope
|
ffbfc53b9a3503ef3041cee94c6726c8b899118d
|
[
"Apache-2.0"
] | 59
|
2016-10-18T06:02:10.000Z
|
2022-02-12T21:27:25.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2019 Fedele Mantuano (https://www.linkedin.com/in/fmantuano/)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import, print_function, unicode_literals
from datetime import date
import glob
import os
import shutil
import six
from modules import (
AbstractSpout,
is_file_older_than,
MAIL_PATH_OUTLOOK,
MAIL_PATH,
MailItem,
)
class IterFilesMailSpout(AbstractSpout):
outputs = [
'raw_mail',
'mail_server',
'mailbox',
'priority',
'trust',
'mail_type',
'headers'
]
def initialize(self, stormconf, context):
super(IterFilesMailSpout, self).initialize(stormconf, context)
self._check_conf()
self.mails = self.iter_mails()
def _check_conf(self):
self._fail_seconds = int(self.conf.get("fail.after.seconds", 60))
self._what = self.conf["post_processing"].get("what", "remove").lower()
self._where = self.conf["post_processing"].get("where", "/tmp/moved")
if not os.path.exists(self._where):
os.makedirs(self._where)
self._where_failed = self.conf["post_processing"].get(
"where.failed", "/tmp/failed")
if not os.path.exists(self._where_failed):
os.makedirs(self._where_failed)
def iter_mails(self):
for k, v in self.conf["mailboxes"].items():
path = v["path_mails"]
pattern = v["files_pattern"]
mail_type = MAIL_PATH
if v.get("outlook", False):
mail_type = MAIL_PATH_OUTLOOK
for mail in glob.iglob(os.path.join(path, pattern)):
if mail.endswith(".processing"):
try:
self._fail_old_mails(mail)
except OSError:
# mail already deleted
pass
else:
yield MailItem(
filename=mail,
mail_server=v["mail_server"],
mailbox=k,
priority=None,
trust=v["trust_string"],
mail_type=mail_type,
headers=v.get("headers", []))
def next_tuple(self):
try:
# get the next mail
mail = next(self.mails)
mail_string = mail.filename.split("/")[-1]
self.log("EMITTED - {!r}".format(mail_string))
processing = mail.filename + ".processing"
try:
shutil.move(mail.filename, processing)
except IOError:
self.log("ALREADY EMITTED - {!r}".format(mail_string))
else:
self.emit([
processing, # 0
mail.mail_server, # 1
mail.mailbox, # 2
mail.priority, # 3
mail.trust, # 4
mail.mail_type, # 5
mail.headers], # 6
tup_id=mail.filename)
except StopIteration:
# Reload general spout conf
self._conf_loader()
# Load new mails
self.mails = self.iter_mails()
def ack(self, tup_id):
"""Acknowledge tup_id, that is the path_mail. """
mail_string = tup_id.split("/")[-1]
self.log("ACKED - {!r}".format(mail_string))
processing = tup_id + ".processing"
if self._what == "remove":
try:
os.remove(processing)
except Exception:
self.log("Failed to remove {!r} mail".format(processing))
else:
try:
now = six.text_type(date.today())
mail_path = os.path.join(self._where, now)
if not os.path.exists(mail_path):
os.makedirs(mail_path)
# this chmod is useful to work under
# nginx directory listing
os.chmod(processing, 0o775)
mail = os.path.join(mail_path, mail_string)
shutil.move(processing, mail)
except shutil.Error:
os.remove(processing)
def fail(self, tup_id):
self._move_fail(tup_id)
def _move_fail(self, src):
mail_string = src.split("/")[-1]
mail = os.path.join(self._where_failed, mail_string)
processing = src + ".processing"
try:
os.chmod(processing, 0o775)
shutil.move(processing, mail)
finally:
self.log("FAILED - {!r}".format(mail_string))
def _fail_old_mails(self, process_mail):
mail = process_mail.replace(".processing", "")
mail_string = mail.split("/")[-1]
if is_file_older_than(process_mail, self._fail_seconds):
self.log("Mail {!r} older than {} seconds".format(
mail_string, self._fail_seconds))
self._move_fail(mail)
| 32.892857
| 79
| 0.55067
|
87ec9f8ceb6a80693ab1d8713cafde297390ab7b
| 13,695
|
py
|
Python
|
sdk/lusid_asyncio/models/order_graph_placement.py
|
finbourne/lusid-sdk-python-asyncio-preview
|
290f93590ab5485661216c8622d3de9f7af0ed60
|
[
"MIT"
] | null | null | null |
sdk/lusid_asyncio/models/order_graph_placement.py
|
finbourne/lusid-sdk-python-asyncio-preview
|
290f93590ab5485661216c8622d3de9f7af0ed60
|
[
"MIT"
] | null | null | null |
sdk/lusid_asyncio/models/order_graph_placement.py
|
finbourne/lusid-sdk-python-asyncio-preview
|
290f93590ab5485661216c8622d3de9f7af0ed60
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.3923
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid_asyncio.configuration import Configuration
class OrderGraphPlacement(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'placement': 'Placement',
'block_id': 'ResourceId',
'order_ids': 'list[ResourceId]',
'allocation_ids': 'list[ResourceId]',
'execution_ids': 'list[ResourceId]',
'placed': 'OrderGraphSynopsis',
'executed': 'OrderGraphSynopsis',
'allocated': 'OrderGraphSynopsis',
'derived_state': 'str'
}
attribute_map = {
'placement': 'placement',
'block_id': 'blockId',
'order_ids': 'orderIds',
'allocation_ids': 'allocationIds',
'execution_ids': 'executionIds',
'placed': 'placed',
'executed': 'executed',
'allocated': 'allocated',
'derived_state': 'derivedState'
}
required_map = {
'placement': 'required',
'block_id': 'required',
'order_ids': 'required',
'allocation_ids': 'required',
'execution_ids': 'required',
'placed': 'required',
'executed': 'required',
'allocated': 'required',
'derived_state': 'required'
}
def __init__(self, placement=None, block_id=None, order_ids=None, allocation_ids=None, execution_ids=None, placed=None, executed=None, allocated=None, derived_state=None, local_vars_configuration=None): # noqa: E501
"""OrderGraphPlacement - a model defined in OpenAPI"
:param placement: (required)
:type placement: lusid_asyncio.Placement
:param block_id: (required)
:type block_id: lusid_asyncio.ResourceId
:param order_ids: Identifiers for all orders in the block. (required)
:type order_ids: list[lusid_asyncio.ResourceId]
:param allocation_ids: Identifiers for all allocations relating to this placement. (required)
:type allocation_ids: list[lusid_asyncio.ResourceId]
:param execution_ids: Identifiers of all executions against this placement. (required)
:type execution_ids: list[lusid_asyncio.ResourceId]
:param placed: (required)
:type placed: lusid_asyncio.OrderGraphSynopsis
:param executed: (required)
:type executed: lusid_asyncio.OrderGraphSynopsis
:param allocated: (required)
:type allocated: lusid_asyncio.OrderGraphSynopsis
:param derived_state: A simple description of the overall state of a placement. (required)
:type derived_state: str
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._placement = None
self._block_id = None
self._order_ids = None
self._allocation_ids = None
self._execution_ids = None
self._placed = None
self._executed = None
self._allocated = None
self._derived_state = None
self.discriminator = None
self.placement = placement
self.block_id = block_id
self.order_ids = order_ids
self.allocation_ids = allocation_ids
self.execution_ids = execution_ids
self.placed = placed
self.executed = executed
self.allocated = allocated
self.derived_state = derived_state
@property
def placement(self):
"""Gets the placement of this OrderGraphPlacement. # noqa: E501
:return: The placement of this OrderGraphPlacement. # noqa: E501
:rtype: lusid_asyncio.Placement
"""
return self._placement
@placement.setter
def placement(self, placement):
"""Sets the placement of this OrderGraphPlacement.
:param placement: The placement of this OrderGraphPlacement. # noqa: E501
:type placement: lusid_asyncio.Placement
"""
if self.local_vars_configuration.client_side_validation and placement is None: # noqa: E501
raise ValueError("Invalid value for `placement`, must not be `None`") # noqa: E501
self._placement = placement
@property
def block_id(self):
"""Gets the block_id of this OrderGraphPlacement. # noqa: E501
:return: The block_id of this OrderGraphPlacement. # noqa: E501
:rtype: lusid_asyncio.ResourceId
"""
return self._block_id
@block_id.setter
def block_id(self, block_id):
"""Sets the block_id of this OrderGraphPlacement.
:param block_id: The block_id of this OrderGraphPlacement. # noqa: E501
:type block_id: lusid_asyncio.ResourceId
"""
if self.local_vars_configuration.client_side_validation and block_id is None: # noqa: E501
raise ValueError("Invalid value for `block_id`, must not be `None`") # noqa: E501
self._block_id = block_id
@property
def order_ids(self):
"""Gets the order_ids of this OrderGraphPlacement. # noqa: E501
Identifiers for all orders in the block. # noqa: E501
:return: The order_ids of this OrderGraphPlacement. # noqa: E501
:rtype: list[lusid_asyncio.ResourceId]
"""
return self._order_ids
@order_ids.setter
def order_ids(self, order_ids):
"""Sets the order_ids of this OrderGraphPlacement.
Identifiers for all orders in the block. # noqa: E501
:param order_ids: The order_ids of this OrderGraphPlacement. # noqa: E501
:type order_ids: list[lusid_asyncio.ResourceId]
"""
if self.local_vars_configuration.client_side_validation and order_ids is None: # noqa: E501
raise ValueError("Invalid value for `order_ids`, must not be `None`") # noqa: E501
self._order_ids = order_ids
@property
def allocation_ids(self):
"""Gets the allocation_ids of this OrderGraphPlacement. # noqa: E501
Identifiers for all allocations relating to this placement. # noqa: E501
:return: The allocation_ids of this OrderGraphPlacement. # noqa: E501
:rtype: list[lusid_asyncio.ResourceId]
"""
return self._allocation_ids
@allocation_ids.setter
def allocation_ids(self, allocation_ids):
"""Sets the allocation_ids of this OrderGraphPlacement.
Identifiers for all allocations relating to this placement. # noqa: E501
:param allocation_ids: The allocation_ids of this OrderGraphPlacement. # noqa: E501
:type allocation_ids: list[lusid_asyncio.ResourceId]
"""
if self.local_vars_configuration.client_side_validation and allocation_ids is None: # noqa: E501
raise ValueError("Invalid value for `allocation_ids`, must not be `None`") # noqa: E501
self._allocation_ids = allocation_ids
@property
def execution_ids(self):
"""Gets the execution_ids of this OrderGraphPlacement. # noqa: E501
Identifiers of all executions against this placement. # noqa: E501
:return: The execution_ids of this OrderGraphPlacement. # noqa: E501
:rtype: list[lusid_asyncio.ResourceId]
"""
return self._execution_ids
@execution_ids.setter
def execution_ids(self, execution_ids):
"""Sets the execution_ids of this OrderGraphPlacement.
Identifiers of all executions against this placement. # noqa: E501
:param execution_ids: The execution_ids of this OrderGraphPlacement. # noqa: E501
:type execution_ids: list[lusid_asyncio.ResourceId]
"""
if self.local_vars_configuration.client_side_validation and execution_ids is None: # noqa: E501
raise ValueError("Invalid value for `execution_ids`, must not be `None`") # noqa: E501
self._execution_ids = execution_ids
@property
def placed(self):
"""Gets the placed of this OrderGraphPlacement. # noqa: E501
:return: The placed of this OrderGraphPlacement. # noqa: E501
:rtype: lusid_asyncio.OrderGraphSynopsis
"""
return self._placed
@placed.setter
def placed(self, placed):
"""Sets the placed of this OrderGraphPlacement.
:param placed: The placed of this OrderGraphPlacement. # noqa: E501
:type placed: lusid_asyncio.OrderGraphSynopsis
"""
if self.local_vars_configuration.client_side_validation and placed is None: # noqa: E501
raise ValueError("Invalid value for `placed`, must not be `None`") # noqa: E501
self._placed = placed
@property
def executed(self):
"""Gets the executed of this OrderGraphPlacement. # noqa: E501
:return: The executed of this OrderGraphPlacement. # noqa: E501
:rtype: lusid_asyncio.OrderGraphSynopsis
"""
return self._executed
@executed.setter
def executed(self, executed):
"""Sets the executed of this OrderGraphPlacement.
:param executed: The executed of this OrderGraphPlacement. # noqa: E501
:type executed: lusid_asyncio.OrderGraphSynopsis
"""
if self.local_vars_configuration.client_side_validation and executed is None: # noqa: E501
raise ValueError("Invalid value for `executed`, must not be `None`") # noqa: E501
self._executed = executed
@property
def allocated(self):
"""Gets the allocated of this OrderGraphPlacement. # noqa: E501
:return: The allocated of this OrderGraphPlacement. # noqa: E501
:rtype: lusid_asyncio.OrderGraphSynopsis
"""
return self._allocated
@allocated.setter
def allocated(self, allocated):
"""Sets the allocated of this OrderGraphPlacement.
:param allocated: The allocated of this OrderGraphPlacement. # noqa: E501
:type allocated: lusid_asyncio.OrderGraphSynopsis
"""
if self.local_vars_configuration.client_side_validation and allocated is None: # noqa: E501
raise ValueError("Invalid value for `allocated`, must not be `None`") # noqa: E501
self._allocated = allocated
@property
def derived_state(self):
"""Gets the derived_state of this OrderGraphPlacement. # noqa: E501
A simple description of the overall state of a placement. # noqa: E501
:return: The derived_state of this OrderGraphPlacement. # noqa: E501
:rtype: str
"""
return self._derived_state
@derived_state.setter
def derived_state(self, derived_state):
"""Sets the derived_state of this OrderGraphPlacement.
A simple description of the overall state of a placement. # noqa: E501
:param derived_state: The derived_state of this OrderGraphPlacement. # noqa: E501
:type derived_state: str
"""
if self.local_vars_configuration.client_side_validation and derived_state is None: # noqa: E501
raise ValueError("Invalid value for `derived_state`, must not be `None`") # noqa: E501
self._derived_state = derived_state
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OrderGraphPlacement):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, OrderGraphPlacement):
return True
return self.to_dict() != other.to_dict()
| 34.847328
| 220
| 0.639503
|
2c21d665c4b9385ec51cc661066c90b23591595f
| 460
|
py
|
Python
|
main.py
|
rodrigues-aline/sort
|
9d743125db94098e45fc7417ac18015f2aa5f8bf
|
[
"MIT"
] | null | null | null |
main.py
|
rodrigues-aline/sort
|
9d743125db94098e45fc7417ac18015f2aa5f8bf
|
[
"MIT"
] | null | null | null |
main.py
|
rodrigues-aline/sort
|
9d743125db94098e45fc7417ac18015f2aa5f8bf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from time import sleep
from context_sort import ContextSort
if __name__=="__main__":
"""
Author: Aline Rodrigues
Created: 20/10/2021
Run the context sort
"""
sort = ContextSort()
try:
#sort.init_sort()
# while not sort.status_thread:
# sleep(30))
sort.convert_data_to_csv()
except Exception as error:
print (f'Error: {error}')
| 20.909091
| 39
| 0.556522
|
1ff15ca445c9defb7726b77917f52f77cd9dd4db
| 1,195
|
py
|
Python
|
generalization/experiments/config_utils.py
|
HanGuo97/federated
|
7e64bfe86bb606fad2ea7bc2a0f8ebdb565546f9
|
[
"BSD-3-Clause"
] | 330
|
2020-09-14T23:10:16.000Z
|
2022-03-30T19:49:19.000Z
|
generalization/experiments/config_utils.py
|
HanGuo97/federated
|
7e64bfe86bb606fad2ea7bc2a0f8ebdb565546f9
|
[
"BSD-3-Clause"
] | 52
|
2020-09-30T06:10:51.000Z
|
2022-03-31T19:25:16.000Z
|
generalization/experiments/config_utils.py
|
HanGuo97/federated
|
7e64bfe86bb606fad2ea7bc2a0f8ebdb565546f9
|
[
"BSD-3-Clause"
] | 119
|
2020-09-24T04:54:46.000Z
|
2022-03-31T21:46:57.000Z
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility function for defining hyperparameter grids."""
import itertools
from typing import List, Mapping, Sequence, Union
def hyper_grid(
grid_dict: Mapping[str, Sequence[Union[str, int, float]]]
) -> List[Mapping[str, Union[str, int, float]]]:
"""Converts a param-keyed dict of lists to a list of mapping.
Args:
grid_dict: A Mapping from string parameter names to lists of values.
Returns:
A list of parameter sweep based on the Cartesian product of
all options in param_dict.
"""
return [
dict(zip(grid_dict, val))
for val in itertools.product(*grid_dict.values())
]
| 32.297297
| 74
| 0.729707
|
4e42dbd526bdeae5e630472ad73ef9693992ba3d
| 1,375
|
py
|
Python
|
tests/bugs/issue_79/test_ozone_missing_signal.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/bugs/issue_79/test_ozone_missing_signal.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/bugs/issue_79/test_ozone_missing_signal.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import pandas as pd
import numpy as np
import pyaf.ForecastEngine as autof
csvfile_link = "https://raw.githubusercontent.com/antoinecarme/pyaf/master/data/ozone-la_missing_signal.csv"
csvfile_link = "data/ozone-la_missing_signal.csv"
df = pd.read_csv(csvfile_link);
import datetime
df['Month'] = df['Month'].apply(lambda x : datetime.datetime.strptime(x, "%Y-%m"))
lEngine = autof.cForecastEngine()
lEngine
H = 12;
lTimeVar = 'Month'
lSignalVar = 'Ozone'
# lEngine.mOptions.enable_slow_mode();
# lEngine.mOptions.mDebugPerformance = True;
lEngine.train(df , lTimeVar , lSignalVar, H);
lEngine.getModelInfo();
print(lEngine.mSignalDecomposition.mTrPerfDetails.head());
lEngine.mSignalDecomposition.mBestModel.mTimeInfo.mResolution
lEngine.standardPlots("outputs/my_ozone_missing_signal");
dfapp_in = df.copy();
dfapp_in.tail()
#H = 12
dfapp_out = lEngine.forecast(dfapp_in, H);
#dfapp_out.to_csv("outputs/ozone_apply_out.csv")
dfapp_out.tail(2 * H)
print("Forecast Columns " , dfapp_out.columns);
Forecast_DF = dfapp_out[[lTimeVar , lSignalVar, lSignalVar + '_Forecast']]
print(Forecast_DF.info())
print("Forecasts\n" , Forecast_DF.tail(H));
print("\n\n<ModelInfo>")
print(lEngine.to_json());
print("</ModelInfo>\n\n")
print("\n\n<Forecast>")
print(Forecast_DF.tail(2*H).to_json(date_format='iso'))
print("</Forecast>\n\n")
| 26.960784
| 108
| 0.760727
|
878be1d66da4b53094c6a0d7291721285420cb6d
| 2,137
|
py
|
Python
|
GenerateGraph.py
|
MrPool702/TrabajoFinal
|
2aed36b928243cba7b8d1f1b17ecbef7ee02f8d6
|
[
"CC0-1.0"
] | 2
|
2021-10-02T20:01:55.000Z
|
2021-10-02T20:01:58.000Z
|
GenerateGraph.py
|
MrPool702/TrabajoFinal
|
2aed36b928243cba7b8d1f1b17ecbef7ee02f8d6
|
[
"CC0-1.0"
] | 34
|
2021-09-27T22:48:27.000Z
|
2021-11-27T19:33:17.000Z
|
GenerateGraph.py
|
MrPool702/TrabajoFinal
|
2aed36b928243cba7b8d1f1b17ecbef7ee02f8d6
|
[
"CC0-1.0"
] | null | null | null |
class Point:
def __init__(self,x,y,types,ide) -> None:
self.x = x
self.y = y
self.type = types
self.id = ide
def getID(self):
return self.id
def getX(self):
return self.x
def getY(self):
return self.y
def getType(self):
return self.type
def readNodes(filename):
with open(filename) as file:
lines = file.readlines()
lines.pop(0)
arr = []
for l in lines:
lista = l.strip().split(sep=",")
arr.append(Point(int(lista[1]),int(lista[2]),lista[3],int(lista[0])))
return arr
def readPuntosEntrega(filename):
arr=[[]for _ in range(14)]
e=[[]for _ in range(14)]
with open(filename) as file:
lines = file.readlines()
ide=0
for l in lines:
lista = l.strip().split(",")
arr[ide]=lista
ide+=1
ide=0
for i in range(len(arr)):
for j in range(len(arr[i])):
e[i].append(int(arr[i][j]))
return e
def readAlmacenes(filename):
arr=[]
with open(filename) as file:
lines = file.readlines()
for l in lines:
lista = l.strip().split(",")
arr=lista
for i in range(len(arr)):
arr[i]=int(arr[i])
return arr
def createGrah(n):
g=[[] for _ in range(len(n))]
for i in range(len(n)):
if n[i].getID()-1 >= 0 and n[i].getID() % 1000!=0 and n[i-1].getType() != "CaminoObstruido":
g[i].append(((n[i].getID()-1),10))
if n[i].getID()+1 < len(n) and n[i].getID() % 1000 != 999 and n[i+1].getType() != "CaminoObstruido":
g[i].append(((n[i].getID()+1),10))
if n[i].getID()+1000<len(n) and n[i+1000].getType() != "CaminoObstruido":
g[i].append(((n[i].getID()+1000),15))
if n[i].getID()-1000 >= 0 and n[i-1000].getType() != "CaminoObstruido":
g[i].append(((n[i].getID()-1000),15))
return g
'''
para obtener el grafo:
nodos= readNodes("Puntos.csv")
grafo=createGrah(nodos)
'''
| 29.273973
| 109
| 0.500234
|
287b74d090343ead913378c65fdc37a44cf39cbf
| 876
|
py
|
Python
|
pwkit/environments/casa/cscript_setjy.py
|
pkgw/pwk
|
c5a2c25c9b42cb08bd9813e5b8c51c13163fa1f1
|
[
"MIT"
] | 18
|
2016-03-21T20:06:02.000Z
|
2022-02-12T19:00:40.000Z
|
pwkit/environments/casa/cscript_setjy.py
|
pkgw/pwk
|
c5a2c25c9b42cb08bd9813e5b8c51c13163fa1f1
|
[
"MIT"
] | 8
|
2016-08-06T19:12:00.000Z
|
2019-03-18T19:10:57.000Z
|
pwkit/environments/casa/cscript_setjy.py
|
pkgw/pwk
|
c5a2c25c9b42cb08bd9813e5b8c51c13163fa1f1
|
[
"MIT"
] | 8
|
2015-01-05T19:23:31.000Z
|
2020-04-02T21:01:38.000Z
|
# -*- mode: python; coding: utf-8 -*-
# Copyright 2016 Peter Williams <peter@newton.cx> and collaborators
# Licensed under the MIT License
"""This file is a casapy script. Do not use it as a module.
It is also not intended to be invoked directly through pkcasascript. See
`pwkit.environments.casa.tasks:setjy`.
We can invoke the C++ code directly for most setjy functionality, but the
current solar system model implementation is based on a big batch of Python
code in the CASA distribution. So when those are requested, we farm out to
that.
"""
def in_casapy (helper, **kwargs):
"""This function is run inside the weirdo casapy IPython environment! A
strange set of modules is available, and the
`pwkit.environments.casa.scripting` system sets up a very particular
environment to allow encapsulated scripting.
"""
helper.casans.setjy (**kwargs)
| 35.04
| 75
| 0.746575
|
dd7f6929523cbb265fb7dce560b671487d10d825
| 306
|
py
|
Python
|
naughty_string_validator/utils/file_utils.py
|
shashikumarraja/naughty-string-validator-python
|
8b0fad299aa64e99b3f8dda9f3dad7f7bef8b9af
|
[
"MIT"
] | 1
|
2021-11-25T03:43:56.000Z
|
2021-11-25T03:43:56.000Z
|
naughty_string_validator/utils/file_utils.py
|
shashikumarraja/naughty-string-validator-python
|
8b0fad299aa64e99b3f8dda9f3dad7f7bef8b9af
|
[
"MIT"
] | 16
|
2018-09-02T18:04:09.000Z
|
2020-05-26T05:59:34.000Z
|
naughty_string_validator/utils/file_utils.py
|
shashikumarraja/naughty-string-validator-python
|
8b0fad299aa64e99b3f8dda9f3dad7f7bef8b9af
|
[
"MIT"
] | 5
|
2018-09-04T08:03:31.000Z
|
2022-02-14T10:31:28.000Z
|
import json
import io
class FileUtils:
def __init__(self):
pass
def read_file(self, path):
try:
with io.open(path, 'r', encoding='utf8') as f:
content = json.loads(f.read())
return content
except IOError as e:
raise e
| 18
| 58
| 0.522876
|
be05e85923e01e8be65cfd648743e93314b2a780
| 10,285
|
py
|
Python
|
src/dddql_tictactoe_agent.py
|
InsaneMonster/TicTacToeRL
|
2c1a7da40f32d8ff8dbb5aaafcae5a4f16d20bf2
|
[
"BSD-3-Clause"
] | null | null | null |
src/dddql_tictactoe_agent.py
|
InsaneMonster/TicTacToeRL
|
2c1a7da40f32d8ff8dbb5aaafcae5a4f16d20bf2
|
[
"BSD-3-Clause"
] | null | null | null |
src/dddql_tictactoe_agent.py
|
InsaneMonster/TicTacToeRL
|
2c1a7da40f32d8ff8dbb5aaafcae5a4f16d20bf2
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Copyright (C) 2019 Luca Pasqualini
# University of Siena - Artificial Intelligence Laboratory - SAILab
#
#
# TicTacToeRL project is licensed under a BSD 3-Clause.
#
# You should have received a copy of the license along with this
# work. If not, see <https://opensource.org/licenses/BSD-3-Clause>.
# Import packages
import logging
import numpy
import random
# Import usienarl package
from usienarl import Agent, ExplorationPolicy, SpaceType
from usienarl.td_models import DuelingDeepQLearning
# Import required src
from src.tictactoe_pass_through_interface import TicTacToePassThroughInterface
class DDDQLTicTacToeAgent(Agent):
"""
Dueling Double Deep Q-Learning agent for Tic Tac Toe environments.
"""
def __init__(self,
name: str,
model: DuelingDeepQLearning,
exploration_policy: ExplorationPolicy,
weight_copy_step_interval: int,
batch_size: int = 1,
warmup_random_action_probability: float = 1.0):
# Define agent attributes
self.warmup_random_action_probability: float = warmup_random_action_probability
# Define internal agent attributes
self._model: DuelingDeepQLearning = model
self._exploration_policy: ExplorationPolicy = exploration_policy
self._weight_copy_step_interval: int = weight_copy_step_interval
self._batch_size: int = batch_size
self._current_absolute_errors = None
self._current_loss = None
# Generate base agent
super(DDDQLTicTacToeAgent, self).__init__(name)
def _generate(self,
logger: logging.Logger,
observation_space_type: SpaceType, observation_space_shape,
agent_action_space_type: SpaceType, agent_action_space_shape) -> bool:
# Generate the exploration policy and check if it's successful, stop if not successful
if self._exploration_policy.generate(logger, agent_action_space_type, agent_action_space_shape):
# Generate the _model and return a flag stating if generation was successful
return self._model.generate(logger, self._scope + "/" + self._name,
observation_space_type, observation_space_shape,
agent_action_space_type, agent_action_space_shape)
return False
def initialize(self,
logger: logging.Logger,
session):
# Reset internal agent attributes
self._current_absolute_errors = None
self._current_loss = None
# Initialize the model
self._model.initialize(logger, session)
# Initialize the exploration policy
self._exploration_policy.initialize(logger, session)
# Run the weight copy operation to uniform main and target networks
self._model.copy_weight(session)
def act_warmup(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
agent_observation_current):
# Act randomly or using best agent prediction depending on defined probability
if random.uniform(0, 1) < self.warmup_random_action_probability:
action = interface.get_random_agent_action(logger, session)
else:
# Return the best action predicted by the model with the current possible action mask
action = self._model.get_best_action(session, agent_observation_current, interface.get_action_mask(logger, session))
# Return the chosen action
return action
def act_train(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
agent_observation_current):
# Get the best action predicted by the model and all relative action q-values
best_action, all_actions = self._model.get_best_action_and_all_action_values(session, agent_observation_current, interface.get_action_mask(logger, session))
# Act according to the exploration policy
action = self._exploration_policy.act(logger, session, interface, all_actions, best_action)
# Return the chosen action
return action
def act_inference(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
agent_observation_current):
# Return the best action predicted by the model with the current possible action mask
return self._model.get_best_action(session, agent_observation_current, interface.get_action_mask(logger, session))
def complete_step_warmup(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
agent_observation_current,
agent_action, reward: float,
agent_observation_next,
warmup_step_current: int,
warmup_episode_current: int,
warmup_episode_volley: int):
# Adjust the next observation if None (final step)
last_step: bool = False
if agent_observation_next is None:
last_step = True
if self._observation_space_type == SpaceType.discrete:
agent_observation_next = 0
else:
agent_observation_next = numpy.zeros(self._observation_space_shape, dtype=float)
# Save the current step in the buffer
self._model.buffer.store(agent_observation_current, agent_action, reward, agent_observation_next, last_step)
def complete_step_train(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
agent_observation_current,
agent_action,
reward: float,
agent_observation_next,
train_step_current: int, train_step_absolute: int,
train_episode_current: int, train_episode_absolute: int,
train_episode_volley: int, train_episode_total: int):
# Adjust the next observation if None (final step)
last_step: bool = False
if agent_observation_next is None:
last_step = True
if self._observation_space_type == SpaceType.discrete:
agent_observation_next = 0
else:
agent_observation_next = numpy.zeros(self._observation_space_shape, dtype=float)
# After each weight step interval update the target network weights with the main network weights
if train_step_absolute % self._weight_copy_step_interval == 0:
self._model.copy_weight(session)
# Save the current step in the buffer
self._model.buffer.store(agent_observation_current, agent_action, reward, agent_observation_next, last_step)
# Update the model and save current loss and absolute errors
summary, self._current_loss, self._current_absolute_errors = self._model.update(session, self._model.buffer.get(self._batch_size))
# Update the buffer with the computed absolute error
self._model.buffer.update(self._current_absolute_errors)
# Update the summary at the absolute current step
self._summary_writer.add_summary(summary, train_step_absolute)
def complete_step_inference(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
agent_observation_current,
agent_action,
reward: float,
agent_observation_next,
inference_step_current: int,
inference_episode_current: int,
inference_episode_volley: int):
pass
def complete_episode_warmup(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
last_step_reward: float,
episode_total_reward: float,
warmup_episode_current: int,
warmup_episode_volley: int):
pass
def complete_episode_train(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
last_step_reward: float,
episode_total_reward: float,
train_step_absolute: int,
train_episode_current: int, train_episode_absolute: int,
train_episode_volley: int, train_episode_total: int):
# Update the exploration policy
self._exploration_policy.update(logger, session)
def complete_episode_inference(self,
logger: logging.Logger,
session,
interface: TicTacToePassThroughInterface,
last_step_reward: float,
episode_total_reward: float,
inference_episode_current: int,
inference_episode_volley: int):
pass
@property
def trainable_variables(self):
# Return the trainable variables of the agent model in experiment/agent _scope
return self._model.trainable_variables
@property
def warmup_episodes(self) -> int:
# Return the amount of warmup episodes required by the model
return self._model.warmup_episodes
| 47.615741
| 164
| 0.601945
|
e601fc552bb332e867b17bbb67463edcef9146a7
| 10,364
|
py
|
Python
|
docker/sds-api/spine-directory-service/sds/request/tests/request_handler_test_base.py
|
NHSDigital/elecronic-prescriptions-service-api
|
41fb4b02cfdb674b2a787dd2d8dd698a8c7a59b0
|
[
"MIT"
] | 2
|
2020-08-18T09:23:09.000Z
|
2020-11-23T11:43:27.000Z
|
docker/sds-api/spine-directory-service/sds/request/tests/request_handler_test_base.py
|
NHSDigital/elecronic-prescriptions-service-api
|
41fb4b02cfdb674b2a787dd2d8dd698a8c7a59b0
|
[
"MIT"
] | 179
|
2020-07-01T08:53:50.000Z
|
2022-03-11T14:18:39.000Z
|
docker/sds-api/spine-directory-service/sds/request/tests/request_handler_test_base.py
|
NHSDigital/elecronic-prescriptions-service-api
|
41fb4b02cfdb674b2a787dd2d8dd698a8c7a59b0
|
[
"MIT"
] | 3
|
2020-07-22T14:00:41.000Z
|
2021-12-15T15:15:06.000Z
|
import json
import unittest.mock
import uuid
from abc import ABC
from typing import Optional
import tornado.testing
import tornado.web
from request import routing_reliability_handler, accredited_system_handler
from request.http_headers import HttpHeaders
from utilities import message_utilities
ORG_CODE = "org"
SPINE_CORE_ORG_CODE = "core_org"
SERVICE_ID = "service:interaction"
FORWARD_RELIABLE_SERVICE_ID = "urn:nhs:names:services:gp2gp:RCMR_IN010000UK05"
CORE_SPINE_FORWARD_RELIABLE_SERVICE_ID = "urn:nhs:names:services:tms:ReliableIntermediary"
PARTY_KEY = "some_party_key"
MANUFACTURING_ORG = "some_manufacturer"
FIXED_UUID = "f0f0e921-92ca-4a88-a550-2dbb36f703af"
DEVICE_PATH = "/device"
class RequestHandlerTestBase(ABC, tornado.testing.AsyncHTTPTestCase):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.maxDiff = None
def get_app(self):
self.sds_client = unittest.mock.Mock()
return tornado.web.Application([
(r"/endpoint", routing_reliability_handler.RoutingReliabilityRequestHandler, {"sds_client": self.sds_client}),
(r"/device", accredited_system_handler.AccreditedSystemRequestHandler, {"sds_client": self.sds_client})
])
def _test_get(self, url, expected_json_file_path):
response = self.fetch(url, method="GET")
current, expected = self._get_current_and_expected_body(response, expected_json_file_path)
self.assertEqual(response.code, 200)
self.assertEqual(expected, current)
self.assertEqual(response.headers.get(HttpHeaders.CONTENT_TYPE, None), "application/fhir+json")
self.assertIsNotNone(response.headers.get(HttpHeaders.X_CORRELATION_ID, None))
def _test_correlation_id_is_set_as_response_header(self, url, invalid_url, mock_200, mock_500):
with self.subTest("X-Correlation-ID is set on 200 response"):
correlation_id = str(uuid.uuid4()).upper()
mock_200()
response = self.fetch(url, method="GET", headers={'X-Correlation-ID': correlation_id})
self.assertEqual(response.code, 200)
self.assertEqual(response.headers.get('X-Correlation-ID'), correlation_id)
with self.subTest("X-Correlation-ID is set on 500 response"):
correlation_id = str(uuid.uuid4()).upper()
mock_500()
response = self.fetch(url, method="GET", headers={'X-Correlation-ID': correlation_id})
self.assertEqual(response.code, 500)
self.assertEqual(response.headers.get('X-Correlation-ID'), correlation_id)
with self.subTest("X-Correlation-ID is set on 400 response"):
correlation_id = str(uuid.uuid4()).upper()
response = self.fetch(
invalid_url, method="GET", headers={'X-Correlation-ID': correlation_id})
self.assertEqual(response.code, 400)
self.assertEqual(response.headers.get('X-Correlation-ID'), correlation_id)
def _test_get_handles_different_accept_header(self, url, expected_json_file_path):
with self.subTest("Accept header is missing"):
response = self.fetch(url, method="GET")
current, expected = self._get_current_and_expected_body(response, expected_json_file_path)
self.assertEqual(response.code, 200)
self.assertEqual(expected, current)
self.assertEqual(response.headers.get(HttpHeaders.CONTENT_TYPE, None), "application/fhir+json")
with self.subTest("Accept header is case-insensitive application/fhir+json"):
headers = {'Accept': 'application/fhir+JSON'}
response = self.fetch(url, method="GET", headers=headers)
current, expected = self._get_current_and_expected_body(response, expected_json_file_path)
self.assertEqual(response.code, 200)
self.assertEqual(expected, current)
self.assertEqual(response.headers.get(HttpHeaders.CONTENT_TYPE, None), "application/fhir+json")
with self.subTest("Accept header can have multiple values and one must be valid"):
headers = {'Accept': ', , text/plain , application/fhir+JSON'}
response = self.fetch(url, method="GET", headers=headers)
current, expected = self._get_current_and_expected_body(response, expected_json_file_path)
self.assertEqual(response.code, 200)
self.assertEqual(expected, current)
self.assertEqual(response.headers.get(HttpHeaders.CONTENT_TYPE, None), "application/fhir+json")
with self.subTest("Accept header is invalid"):
headers = {'Accept': 'text/plain,application/xml'}
response = self.fetch(url, method="GET", headers=headers)
self.assertEqual(response.code, 406)
def _test_should_return_405_when_using_non_get(self, url: str):
for method in ["POST", "DELETE", "PUT", "OPTIONS"]:
with self.subTest(f"405 when using {method}"):
response = self.fetch(url, body="" if method in ["POST", "PUT"] else None, method=method)
self.assertEqual(response.code, 405)
self.assertEqual(response.headers.get("Allow"), "GET")
self._assert_405_operation_outcome(response.body.decode())
@staticmethod
def _build_endpoint_url(org_code: Optional[str] = ORG_CODE, service_id: Optional[str] = SERVICE_ID, party_key: Optional[str] = PARTY_KEY):
url = "/endpoint"
org_code = f"organization=https://fhir.nhs.uk/Id/ods-organization-code|{org_code}" if org_code is not None else None
service_id = f"identifier=https://fhir.nhs.uk/Id/nhsServiceInteractionId|{service_id}" if service_id is not None else None
party_key = f"identifier=https://fhir.nhs.uk/Id/nhsMhsPartyKey|{party_key}" if party_key is not None else None
query_params = "&".join(filter(lambda query_param: query_param, [org_code, service_id, party_key]))
url = f"{url}?{query_params}" if query_params else url
return url
@staticmethod
def _build_device_url(
org_code: Optional[str] = ORG_CODE,
service_id: Optional[str] = SERVICE_ID,
party_key: Optional[str] = PARTY_KEY,
manufacturing_organization: Optional[str] = MANUFACTURING_ORG):
path = DEVICE_PATH
org_code = f"organization=https://fhir.nhs.uk/Id/ods-organization-code|{org_code}" if org_code is not None else None
service_id = f"identifier=https://fhir.nhs.uk/Id/nhsServiceInteractionId|{service_id}" if service_id is not None else None
party_key = f"identifier=https://fhir.nhs.uk/Id/nhsMhsPartyKey|{party_key}" if party_key is not None else None
manufacturing_organization = f"manufacturing-organization=https://fhir.nhs.uk/Id/ods-organization-code|{manufacturing_organization}" if manufacturing_organization is not None else None
query_params = "&".join(filter(lambda query_param: query_param, [org_code, service_id, party_key, manufacturing_organization]))
path = f"{path}?{query_params}" if query_params else path
return path
def _get_current_and_expected_body(self, response, expected_file_path):
current = json.loads(message_utilities.replace_uuid(response.body.decode(), FIXED_UUID))
self.assertEqual(current["resourceType"], "Bundle", current)
current_entries = current["entry"]
current_id = current['id']
current_link_url = current['link'][0]['url']
expected = json.loads(open(expected_file_path, "r").read())
expected_entries = expected["entry"]
expected['id'] = current_id
expected['link'][0]['url'] = current_link_url
self.assertEqual(len(current_entries), len(expected_entries))
for i in range(0, len(current_entries)):
current_entry = current_entries[i]
current_entry_full_url = current_entry["fullUrl"]
current_resource_id = current_entry["resource"]["id"]
expected_entry = expected_entries[i]
expected_entry["fullUrl"] = current_entry_full_url
expected_entry["resource"]["id"] = current_resource_id
return current, expected
def _assert_400_operation_outcome(self, response_content, diagnostics):
operation_outcome = json.loads(response_content)
self.assertEqual(operation_outcome["resourceType"], "OperationOutcome")
issue = operation_outcome["issue"][0]
self.assertEqual(issue["severity"], "error")
self.assertEqual(issue["code"], "required")
self.assertEqual(issue["diagnostics"], diagnostics)
coding = issue["details"]["coding"][0]
self.assertEqual(coding["system"], 'https://fhir.nhs.uk/STU3/ValueSet/Spine-ErrorOrWarningCode-1')
self.assertEqual(coding["code"], 'BAD_REQUEST')
self.assertEqual(coding["display"], 'Bad request')
def _assert_405_operation_outcome(self, response_content):
operation_outcome = json.loads(response_content)
self.assertEqual(operation_outcome["resourceType"], "OperationOutcome")
issue = operation_outcome["issue"][0]
self.assertEqual(issue["severity"], "error")
self.assertEqual(issue["code"], "not-supported")
self.assertEqual(issue["diagnostics"], 'HTTP operation not supported')
coding = issue["details"]["coding"][0]
self.assertEqual(coding["system"], 'https://fhir.nhs.uk/STU3/ValueSet/Spine-ErrorOrWarningCode-1')
self.assertEqual(coding["code"], 'NOT_IMPLEMENTED')
self.assertEqual(coding["display"], 'Not implemented')
def _assert_500_operation_outcome(self, response_content):
operation_outcome = json.loads(response_content)
self.assertEqual(operation_outcome["resourceType"], "OperationOutcome")
issue = operation_outcome["issue"][0]
self.assertEqual(issue["severity"], "error")
self.assertEqual(issue["code"], "exception")
self.assertEqual(issue["diagnostics"], 'some error')
coding = issue["details"]["coding"][0]
self.assertEqual(coding["system"], 'https://fhir.nhs.uk/STU3/ValueSet/Spine-ErrorOrWarningCode-1')
self.assertEqual(coding["code"], 'INTERNAL_SERVER_ERROR')
self.assertEqual(coding["display"], 'Unexpected internal server error')
| 49.826923
| 192
| 0.690178
|
57ae140f0542b97b5e9d5be0ebce3d49c123602f
| 8,740
|
py
|
Python
|
frozen/uasyncio/__init__.py
|
joewez/FunWithMicroPython
|
11f36165376929bcfac15445034d1cd352734c44
|
[
"MIT"
] | null | null | null |
frozen/uasyncio/__init__.py
|
joewez/FunWithMicroPython
|
11f36165376929bcfac15445034d1cd352734c44
|
[
"MIT"
] | null | null | null |
frozen/uasyncio/__init__.py
|
joewez/FunWithMicroPython
|
11f36165376929bcfac15445034d1cd352734c44
|
[
"MIT"
] | null | null | null |
import uerrno
import uselect as select
import usocket as _socket
from uasyncio.core import *
DEBUG = 0
log = None
def set_debug(val):
global DEBUG, log
DEBUG = val
if val:
import logging
log = logging.getLogger("uasyncio")
class PollEventLoop(EventLoop):
def __init__(self, runq_len=16, waitq_len=16):
EventLoop.__init__(self, runq_len, waitq_len)
self.poller = select.poll()
self.objmap = {}
def add_reader(self, sock, cb, *args):
#if DEBUG and __debug__:
# log.debug("add_reader%s", (sock, cb, args))
if args:
self.poller.register(sock, select.POLLIN)
self.objmap[id(sock)] = (cb, args)
else:
self.poller.register(sock, select.POLLIN)
self.objmap[id(sock)] = cb
def remove_reader(self, sock):
#if DEBUG and __debug__:
# log.debug("remove_reader(%s)", sock)
self.poller.unregister(sock)
del self.objmap[id(sock)]
def add_writer(self, sock, cb, *args):
#if DEBUG and __debug__:
# log.debug("add_writer%s", (sock, cb, args))
if args:
self.poller.register(sock, select.POLLOUT)
self.objmap[id(sock)] = (cb, args)
else:
self.poller.register(sock, select.POLLOUT)
self.objmap[id(sock)] = cb
def remove_writer(self, sock):
#if DEBUG and __debug__:
# log.debug("remove_writer(%s)", sock)
try:
self.poller.unregister(sock)
self.objmap.pop(id(sock), None)
except OSError as e:
# StreamWriter.awrite() first tries to write to a socket,
# and if that succeeds, yield IOWrite may never be called
# for that socket, and it will never be added to poller. So,
# ignore such error.
if e.args[0] != uerrno.ENOENT:
raise
def wait(self, delay):
#if DEBUG and __debug__:
# log.debug("poll.wait(%d)", delay)
# We need one-shot behavior (second arg of 1 to .poll())
res = self.poller.ipoll(delay, 1)
#log.debug("poll result: %s", res)
# Remove "if res" workaround after
# https://github.com/micropython/micropython/issues/2716 fixed.
if res:
for sock, ev in res:
cb = self.objmap[id(sock)]
if ev & (select.POLLHUP | select.POLLERR):
# These events are returned even if not requested, and
# are sticky, i.e. will be returned again and again.
# If the caller doesn't do proper error handling and
# unregister this sock, we'll busy-loop on it, so we
# as well can unregister it now "just in case".
self.remove_reader(sock)
#if DEBUG and __debug__:
# log.debug("Calling IO callback: %r", cb)
if isinstance(cb, tuple):
cb[0](*cb[1])
else:
cb.pend_throw(None)
self.call_soon(cb)
class StreamReader:
def __init__(self, polls, ios=None):
if ios is None:
ios = polls
self.polls = polls
self.ios = ios
def read(self, n=-1):
while True:
yield IORead(self.polls)
res = self.ios.read(n)
if res is not None:
break
# This should not happen for real sockets, but can easily
# happen for stream wrappers (ssl, websockets, etc.)
#log.warn("Empty read")
if not res:
yield IOReadDone(self.polls)
return res
def readexactly(self, n):
buf = b""
while n:
yield IORead(self.polls)
res = self.ios.read(n)
assert res is not None
if not res:
yield IOReadDone(self.polls)
break
buf += res
n -= len(res)
return buf
def readline(self):
#if DEBUG and __debug__:
# log.debug("StreamReader.readline()")
buf = b""
while True:
yield IORead(self.polls)
res = self.ios.readline()
assert res is not None
if not res:
yield IOReadDone(self.polls)
break
buf += res
if buf[-1] == 0x0a:
break
#if DEBUG and __debug__:
# log.debug("StreamReader.readline(): %s", buf)
return buf
def aclose(self):
yield IOReadDone(self.polls)
self.ios.close()
def __repr__(self):
return "<StreamReader %r %r>" % (self.polls, self.ios)
class StreamWriter:
def __init__(self, s, extra):
self.s = s
self.extra = extra
def awrite(self, buf, off=0, sz=-1):
# This method is called awrite (async write) to not proliferate
# incompatibility with original asyncio. Unlike original asyncio
# whose .write() method is both not a coroutine and guaranteed
# to return immediately (which means it has to buffer all the
# data), this method is a coroutine.
if sz == -1:
sz = len(buf) - off
#if DEBUG and __debug__:
# log.debug("StreamWriter.awrite(): spooling %d bytes", sz)
while True:
res = self.s.write(buf, off, sz)
# If we spooled everything, return immediately
if res == sz:
#if DEBUG and __debug__:
# log.debug("StreamWriter.awrite(): completed spooling %d bytes", res)
return
if res is None:
res = 0
#if DEBUG and __debug__:
# log.debug("StreamWriter.awrite(): spooled partial %d bytes", res)
assert res < sz
off += res
sz -= res
yield IOWrite(self.s)
#assert s2.fileno() == self.s.fileno()
#if DEBUG and __debug__:
# log.debug("StreamWriter.awrite(): can write more")
# Write piecewise content from iterable (usually, a generator)
def awriteiter(self, iterable):
for buf in iterable:
yield from self.awrite(buf)
def aclose(self):
yield IOWriteDone(self.s)
self.s.close()
def get_extra_info(self, name, default=None):
return self.extra.get(name, default)
def __repr__(self):
return "<StreamWriter %r>" % self.s
def open_connection(host, port, ssl=False):
#if DEBUG and __debug__:
# log.debug("open_connection(%s, %s)", host, port)
ai = _socket.getaddrinfo(host, port, 0, _socket.SOCK_STREAM)
ai = ai[0]
s = _socket.socket(ai[0], ai[1], ai[2])
s.setblocking(False)
try:
s.connect(ai[-1])
except OSError as e:
if e.args[0] != uerrno.EINPROGRESS:
raise
#if DEBUG and __debug__:
# log.debug("open_connection: After connect")
yield IOWrite(s)
# if __debug__:
# assert s2.fileno() == s.fileno()
#if DEBUG and __debug__:
# log.debug("open_connection: After iowait: %s", s)
if ssl:
print("Warning: uasyncio SSL support is alpha")
import ussl
s.setblocking(True)
s2 = ussl.wrap_socket(s)
s.setblocking(False)
return StreamReader(s, s2), StreamWriter(s2, {})
return StreamReader(s), StreamWriter(s, {})
def start_server(client_coro, host, port, backlog=10):
#if DEBUG and __debug__:
# log.debug("start_server(%s, %s)", host, port)
print("HTTP Server started on ", host)
ai = _socket.getaddrinfo(host, port, 0, _socket.SOCK_STREAM)
ai = ai[0]
s = _socket.socket(ai[0], ai[1], ai[2])
s.setblocking(False)
s.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1)
s.bind(ai[-1])
s.listen(backlog)
while True:
#if DEBUG and __debug__:
# log.debug("start_server: Before accept")
yield IORead(s)
#if DEBUG and __debug__:
# log.debug("start_server: After iowait")
s2, client_addr = s.accept()
s2.setblocking(False)
#if DEBUG and __debug__:
# log.debug("start_server: After accept: %s", s2)
extra = {"peername": client_addr}
yield client_coro(StreamReader(s2), StreamWriter(s2, extra))
import uasyncio.core
uasyncio.core._event_loop_class = PollEventLoop
| 33.615385
| 90
| 0.53833
|
de269554a2bcbe41b931f7b856c594e361798b6c
| 1,184
|
py
|
Python
|
src/template/TestCases.py
|
adityayedetore/LCRNN
|
7b6afaf6098fed584b90fe0196cfd26aa6a190c5
|
[
"MIT"
] | 41
|
2018-08-30T10:29:21.000Z
|
2022-01-19T08:49:33.000Z
|
src/template/TestCases.py
|
billptw/FastTrees
|
776cf513a78b6941308f3e9781deeffa7c45a492
|
[
"BSD-3-Clause"
] | 1
|
2021-12-09T06:14:45.000Z
|
2021-12-09T09:33:35.000Z
|
src/template/TestCases.py
|
billptw/FastTrees
|
776cf513a78b6941308f3e9781deeffa7c45a492
|
[
"BSD-3-Clause"
] | 17
|
2018-09-10T19:11:26.000Z
|
2021-12-23T06:32:24.000Z
|
class TestCase():
def __init__(self):
self.agrmt_cases = ['obj_rel_across_anim',
'obj_rel_within_anim',
'obj_rel_across_inanim',
'obj_rel_within_inanim',
'subj_rel',
'prep_anim',
'prep_inanim',
'obj_rel_no_comp_across_anim',
'obj_rel_no_comp_within_anim',
'obj_rel_no_comp_across_inanim',
'obj_rel_no_comp_within_inanim',
'simple_agrmt',
'sent_comp',
'vp_coord',
'long_vp_coord',
'reflexives_across',
'simple_reflexives',
'reflexive_sent_comp']
self.npi_cases = ['npi_across_anim',
'npi_across_inanim',
'simple_npi_anim',
'simple_npi_inanim']
self.all_cases = self.agrmt_cases+self.npi_cases
| 42.285714
| 60
| 0.402027
|
2a2a766c5dd5617081098cfe36a49d2ec19d00d1
| 2,970
|
py
|
Python
|
examples/shorty/utils.py
|
adityavs/werkzeug
|
03bf010f239255049b62f41e37e2e53006ad2398
|
[
"BSD-3-Clause"
] | 1
|
2020-08-08T21:54:22.000Z
|
2020-08-08T21:54:22.000Z
|
examples/shorty/utils.py
|
adityavs/werkzeug
|
03bf010f239255049b62f41e37e2e53006ad2398
|
[
"BSD-3-Clause"
] | null | null | null |
examples/shorty/utils.py
|
adityavs/werkzeug
|
03bf010f239255049b62f41e37e2e53006ad2398
|
[
"BSD-3-Clause"
] | null | null | null |
from os import path
from random import randrange
from random import sample
from jinja2 import Environment
from jinja2 import FileSystemLoader
from sqlalchemy import MetaData
from sqlalchemy.orm import create_session
from sqlalchemy.orm import scoped_session
from werkzeug.local import Local
from werkzeug.local import LocalManager
from werkzeug.routing import Map
from werkzeug.routing import Rule
from werkzeug.urls import url_parse
from werkzeug.utils import cached_property
from werkzeug.wrappers import Response
TEMPLATE_PATH = path.join(path.dirname(__file__), "templates")
STATIC_PATH = path.join(path.dirname(__file__), "static")
ALLOWED_SCHEMES = frozenset(["http", "https", "ftp", "ftps"])
URL_CHARS = "abcdefghijkmpqrstuvwxyzABCDEFGHIJKLMNPQRST23456789"
local = Local()
local_manager = LocalManager([local])
application = local("application")
metadata = MetaData()
url_map = Map([Rule("/static/<file>", endpoint="static", build_only=True)])
session = scoped_session(
lambda: create_session(
application.database_engine, autocommit=False, autoflush=False
)
)
jinja_env = Environment(loader=FileSystemLoader(TEMPLATE_PATH))
def expose(rule, **kw):
def decorate(f):
kw["endpoint"] = f.__name__
url_map.add(Rule(rule, **kw))
return f
return decorate
def url_for(endpoint, _external=False, **values):
return local.url_adapter.build(endpoint, values, force_external=_external)
jinja_env.globals["url_for"] = url_for
def render_template(template, **context):
return Response(
jinja_env.get_template(template).render(**context), mimetype="text/html"
)
def validate_url(url):
return url_parse(url)[0] in ALLOWED_SCHEMES
def get_random_uid():
return "".join(sample(URL_CHARS, randrange(3, 9)))
class Pagination(object):
def __init__(self, query, per_page, page, endpoint):
self.query = query
self.per_page = per_page
self.page = page
self.endpoint = endpoint
@cached_property
def count(self):
return self.query.count()
@cached_property
def entries(self):
return (
self.query.offset((self.page - 1) * self.per_page)
.limit(self.per_page)
.all()
)
@property
def has_previous(self):
"""Return True if there are pages before the current one."""
return self.page > 1
@property
def has_next(self):
"""Return True if there are pages after the current one."""
return self.page < self.pages
@property
def previous(self):
"""Return the URL for the previous page."""
return url_for(self.endpoint, page=self.page - 1)
@property
def next(self):
"""Return the URL for the next page."""
return url_for(self.endpoint, page=self.page + 1)
@property
def pages(self):
"""Return the number of pages."""
return max(0, self.count - 1) // self.per_page + 1
| 26.517857
| 80
| 0.688552
|
7c9c73b49d75e9519a015dcaa64a36d23af5b1c3
| 8,226
|
py
|
Python
|
src/amplitude/plugin.py
|
bohan-amplitude/Amplitude-Python
|
8eb6e5242d77a84d6516dce96e8b8411e6fc1247
|
[
"MIT"
] | null | null | null |
src/amplitude/plugin.py
|
bohan-amplitude/Amplitude-Python
|
8eb6e5242d77a84d6516dce96e8b8411e6fc1247
|
[
"MIT"
] | null | null | null |
src/amplitude/plugin.py
|
bohan-amplitude/Amplitude-Python
|
8eb6e5242d77a84d6516dce96e8b8411e6fc1247
|
[
"MIT"
] | null | null | null |
"""Amplutide plugin module. Provide base class to implement customized plugin
Classes:
Plugin: Base class of all plugins.
EventPlugin: Base class to implement plugins that modify and enrich events.
DestinationPlugin: Base class to implement plugins that send events to customized destinations.
AmplitudeDestinationPlugin: Default Amplitude Destination plugin that send events to Amplitude.
ContextPlugin: A default plugin that add library info to event. Also set event default timestamp and insert_id
if not set elsewhere.
Methods:
verify_event(event): Perform basic validation before AmplitudeDestinationPlugin send the event to storage.
"""
import abc
import uuid
from typing import Optional
from amplitude.event import BaseEvent, GroupIdentifyEvent, IdentifyEvent, RevenueEvent
from amplitude import constants
from amplitude.timeline import Timeline
from amplitude.exception import InvalidEventError
from amplitude import utils
from amplitude.worker import Workers
class Plugin(abc.ABC):
"""The abstract base class of plugins
Args:
plugin_type (constants.PluginType): The plugin type.
"""
def __init__(self, plugin_type: constants.PluginType):
"""The constructor of Plugin class"""
self.plugin_type: constants.PluginType = plugin_type
def setup(self, client):
"""Setup plugins with client instance parameter"""
pass
@abc.abstractmethod
def execute(self, event: BaseEvent):
"""Process event with plugin instance"""
pass
class EventPlugin(Plugin):
"""Plugins that modify and enrich events. Used as base class of event plugins.
Args:
plugin_type (constants.PluginType): The plugin type.
Methods:
setup(client): Setup plugin using Amplitude client instance.
execute(event): Method to override to process event. Return modified event or None. Return None will stop the
Amplitude client sending the event and callback will not be triggered.
track(event): Can be override to process BaseEvent if execute method not overrided.
revenue(event): Can be override to process RevenueEvent if execute method not overrided.
identify(event): Can be override to process IdentifyEvent if execute method not overrided.
group_identify(event): Can be override to process GroupIdentifyEvent if execute method not overrided.
"""
def __init__(self, plugin_type: constants.PluginType):
super().__init__(plugin_type)
def execute(self, event: BaseEvent) -> Optional[BaseEvent]:
if isinstance(event, GroupIdentifyEvent):
return self.group_identify(event)
if isinstance(event, IdentifyEvent):
return self.identify(event)
if isinstance(event, RevenueEvent):
return self.revenue(event)
return self.track(event)
def group_identify(self, event: GroupIdentifyEvent) -> Optional[GroupIdentifyEvent]:
return event
def identify(self, event: IdentifyEvent) -> Optional[IdentifyEvent]:
return event
def revenue(self, event: RevenueEvent) -> Optional[RevenueEvent]:
return event
def track(self, event: BaseEvent) -> Optional[BaseEvent]:
return event
class DestinationPlugin(EventPlugin):
"""Plugins that send events to a destination like Amplitude.
Methods:
setup(client): Setup plugin using Amplitude client instance.
execute(event): Method to override to send out events.
add(plugin): Add additional processing ability by plugins to modify events before sending out.
remove(plugin): Remove a plugin instance from destination plugin.
shutdown(): Method to override to handle closure of client like flushing events,
closing threads and connections. Triggered by client.shutdown()
"""
def __init__(self):
super().__init__(constants.PluginType.DESTINATION)
self.timeline = Timeline()
def setup(self, client):
self.timeline.setup(client)
def add(self, plugin):
self.timeline.add(plugin)
return self
def remove(self, plugin):
self.timeline.remove(plugin)
return self
def execute(self, event: BaseEvent) -> None:
event = self.timeline.process(event)
super().execute(event)
def shutdown(self):
self.timeline.shutdown()
class AmplitudeDestinationPlugin(DestinationPlugin):
"""The Amplitude destination plugin. Added to client by default. Send events to Amplitude.
Methods:
setup(client): Setup plugin instance and storage and workers instance of the destination plugin.
execute(event): Process event with plugins added to the destination plugin. Then pushed the event to storage
waiting to be sent.
flush(): Flush all event in storage instance.
shutdown(): Shutdown plugins and works of the destination plugin.
"""
def __init__(self):
"""The constructor of AmplitudeDestinationPlugin class"""
super().__init__()
self.workers = Workers()
self.storage = None
self.configuration = None
def setup(self, client):
"""Setup plugin instance and storage and workers instance of the destination plugin.
Args:
client: The Amplitude client that holds the destination plugin.
"""
super().setup(client)
self.configuration = client.configuration
self.storage = client.configuration.get_storage()
self.workers.setup(client.configuration, self.storage)
self.storage.setup(client.configuration, self.workers)
def execute(self, event: BaseEvent) -> None:
"""Process event with plugins added to the destination plugin. Then pushed the event to storage
waiting to be sent.
Args:
event (BaseEvent): The event to be sent.
"""
event = self.timeline.process(event)
if not verify_event(event):
raise InvalidEventError("Invalid event.")
self.storage.push(event)
def flush(self):
"""Flush all event in storage instance."""
self.workers.flush()
def shutdown(self):
"""Shutdown plugins and works of the destination plugin."""
self.timeline.shutdown()
self.workers.stop()
class ContextPlugin(Plugin):
"""Amplitude Context plugin. Default added to client. Add library info to event.
Also set event default timestamp and insert_id if not set elsewhere.
Methods:
apply_context_data(event): Add SDK name and version to event.library.
execute(event): Set event default timestamp and insert_id if not set elsewhere.
Add SDK name and version to event.library.
"""
def __init__(self):
"""The constructor of ContextPlugin class"""
super().__init__(constants.PluginType.BEFORE)
self.context_string = f"{constants.SDK_LIBRARY}/{constants.SDK_VERSION}"
def apply_context_data(self, event: BaseEvent):
"""Add SDK name and version to event.library.
Args:
event (BaseEvent): The event to be processed.
"""
event.library = self.context_string
def execute(self, event: BaseEvent) -> BaseEvent:
"""Set event default timestamp and insert_id if not set elsewhere. Add SDK name and version to event.library.
Args:
event (BaseEvent): The event to be processed.
"""
if not event.time:
event.time = utils.current_milliseconds()
if not event.insert_id:
event.insert_id = str(uuid.uuid4())
self.apply_context_data(event)
return event
def verify_event(event):
"""Perform basic validation before AmplitudeDestinationPlugin send the event to storage.
Args:
event (BaseEvent): the event to be verified.
Returns:
True is event is valid, False otherwise.
"""
if isinstance(event, GroupIdentifyEvent):
return True
if (not isinstance(event, BaseEvent)) or \
(not event["event_type"]) or \
(not event["user_id"] and not event["device_id"]):
return False
return True
| 35.921397
| 117
| 0.682835
|
d61d7668e99a14c1b21c58a0fde624d6f2a526e9
| 2,036
|
py
|
Python
|
controllers/line_detection.py
|
luispmb/orc
|
28a8c19f7884b5235e7d1b1cdda3584ab83e09c1
|
[
"MIT"
] | null | null | null |
controllers/line_detection.py
|
luispmb/orc
|
28a8c19f7884b5235e7d1b1cdda3584ab83e09c1
|
[
"MIT"
] | null | null | null |
controllers/line_detection.py
|
luispmb/orc
|
28a8c19f7884b5235e7d1b1cdda3584ab83e09c1
|
[
"MIT"
] | null | null | null |
#DEPENDENCIES
#######################################################################################################################
import sys
sys.path.insert(0, '..')
import os
import numpy as np
from functions_line_detection import line_preprocessing, line_preprocessing_advanced, line_detector
from line_breakdown import *
#inputs
parent_directory = os.path.abspath('')
root = os.path.abspath(os.path.join(parent_directory, os.pardir))
data_output_folder = os.path.join(root, 'test', 'data', 'output')
def line_detection(img_line, temp_object, path_lines, idl, key, mydoc_tesseract, path_quant, idw, idq, path_words, idt, path_letters):
# print('line image', key)
#preprocess it
try:
img_score,img_res=line_preprocessing(img_line, 31,29,19,17,30,0,0,4,3,'two')
#print('Base Image')
except:
img_score,img_res=line_preprocessing_advanced(img_line)
print('Base Image v2 / can be deleted')
#get all the lines per object and line pixels write them on a path
line_images, pixels, idl = line_detector(img_score,img_res,path_lines,idl)
# print('line detection pixels', pixels)
#1 text output per maximum hierarchy
text=[]
#recognition
for line in range (0,len(line_images)):
# import line breakdown
line_image = line_images[line]
pixels_line = pixels[line]
pixels_line = line_breakdown(line_image, mydoc_tesseract, path_quant, idw, path_words, idq, text, pixels_line, idt, path_letters)
temp_object['ocr_text'][key]=text
# print('key is', key)
temp_object['sub_coordinates'][key]=pixels
#mydoc_tesseract.save(path_lines+"OCR_output_teserract.docx")
mydoc_tesseract.save(os.path.join(path_lines, 'OCR_output_teserract.docx'))
# print('lines recognized for image')
#save the object
np.save(os.path.join(data_output_folder, 'object_output.npy'), temp_object)
return pixels_line, text
| 36.357143
| 138
| 0.641454
|
bc2325da34534e9fdc648d3fd547cf81082c2b0a
| 1,950
|
py
|
Python
|
kt/keypoint/visualization.py
|
tkianai/tk-cv
|
b8b264b59e119396440071c3aa6cf9978c2fddad
|
[
"MIT"
] | 2
|
2019-09-25T12:18:04.000Z
|
2020-04-25T05:30:56.000Z
|
kt/keypoint/visualization.py
|
tkianai/tk-cv
|
b8b264b59e119396440071c3aa6cf9978c2fddad
|
[
"MIT"
] | null | null | null |
kt/keypoint/visualization.py
|
tkianai/tk-cv
|
b8b264b59e119396440071c3aa6cf9978c2fddad
|
[
"MIT"
] | null | null | null |
"""This visualize the keypoints, including pose, face, hand and so on
"""
import os
import cv2
import numpy as np
def show_keypoints_over_image(
img,
points,
skeletons=None,
point_radius=3,
point_color=(0, 255, 0),
line_width=2,
line_color=None,
):
"""Plot keypoints on image
Arguments:
img {str | np img | pil img} -- input image
points {list of list} -- point set
Keyword Arguments:
skeletons {list of list} -- point collection (default: {None})
point_radius {int} -- radius of keypoint (default: {3})
point_color {tuple} -- keypoint color to show (default: {(0, 255, 0)})
line_width {int} -- line width(default: {2})
line_color {tuple} -- line color to show (default: {None})
Returns:
img -- numpy image[opencv]
"""
if isinstance(img, str):
img = cv2.imread(img)
if isinstance(img, np.ndarray):
img = np.array(img) # Handle PIL object
# plot isolate keypoints
for point in points:
# point: [width(x), height(y), visible]
if point[2] > 0:
img = cv2.circle(img, (int(point[0]), int(point[1])), point_radius, point_color,-1)
# plot skeletons
if skeletons is not None:
for skeleton in skeletons:
for i in range(len(skeleton) - 1):
p1_v, p2_v = points[skeleton[i]][2], points[skeleton[i + 1]][2]
if p1_v > 0 and p2_v > 0:
if line_color is None:
_line_color = np.random.randint(256, size=3).tolist()
else:
_line_color = line_color
p1 = (int(points[skeleton[i]][0]), int(points[skeleton[i]][1]))
p2 = (int(points[skeleton[i + 1]][0]), int(points[skeleton[i + 1]][1]))
img = cv2.line(img, p1, p2, _line_color, line_width)
return img
| 30
| 95
| 0.549231
|
e9469ab68f2f1e7851c2ce45333d79fdf5af79fe
| 206
|
py
|
Python
|
eg1.py
|
thuyatun/python-exercise
|
fd411ff815012b461934277d5e8e0591270a09e4
|
[
"MIT"
] | null | null | null |
eg1.py
|
thuyatun/python-exercise
|
fd411ff815012b461934277d5e8e0591270a09e4
|
[
"MIT"
] | null | null | null |
eg1.py
|
thuyatun/python-exercise
|
fd411ff815012b461934277d5e8e0591270a09e4
|
[
"MIT"
] | null | null | null |
class dog:
legs = 4
def _init_(self, name, color):
self.name = name
self.color = color
fido = dog("fido", "brown")
print(fido.legs)
print(dog.legs)
| 18.727273
| 35
| 0.495146
|
bf48b9a689cfcb45352e5a2f2833dae725f10bbf
| 8,043
|
py
|
Python
|
ucf101/evaluate_ucf101_rgb_ALSTM.py
|
QUVA-Lab/VideoLSTM
|
fcacf9be37cff634652ff4e08c694de91627900f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
ucf101/evaluate_ucf101_rgb_ALSTM.py
|
QUVA-Lab/VideoLSTM
|
fcacf9be37cff634652ff4e08c694de91627900f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
ucf101/evaluate_ucf101_rgb_ALSTM.py
|
QUVA-Lab/VideoLSTM
|
fcacf9be37cff634652ff4e08c694de91627900f
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
__author__ = 'zhenyang'
import theano
import theano.tensor as TT
import sys
sys.path.append('../')
import sparnn
import sparnn.utils
from sparnn.utils import *
from sparnn.iterators import VideoDataIterator
from sparnn.layers import InterfaceLayer
from sparnn.layers import FeedForwardLayer
from sparnn.layers import CondLSTMLayer
from sparnn.layers import DropoutLayer
from sparnn.layers import PredictionLayer
from sparnn.layers import ElementwiseCostLayer
from sparnn.models import VideoModel
from sparnn.optimizers import SGD
from sparnn.optimizers import RMSProp
from sparnn.optimizers import AdaDelta
from sparnn.optimizers import Adam
import os
import random
import numpy
save_path = "./ucf101-experiment/ucf101-rgb-ALSTM/rms-lr-0.001-drop-0.5/"
log_path = save_path + "evaluate_ucf101_rgb_ALSTM.log"
if not os.path.exists(save_path):
os.makedirs(save_path)
sparnn.utils.quick_logging_config(log_path)
random.seed(1000)
numpy.random.seed(1000)
iterator_rng = sparnn.utils.quick_npy_rng(1337)
iterator_frame_rng = sparnn.utils.quick_npy_rng(1234)
seq_length = 30
#############################
iterator_param = {'dataset': 'ucf101',
'data_file': '/ssd/zhenyang/data/UCF101/features/rgb_vgg16_pool5',
'num_frames_file': '/ssd/zhenyang/data/UCF101/train_framenum.txt',
'labels_file': '/ssd/zhenyang/data/UCF101/train_labels.txt',
'vid_name_file': '/ssd/zhenyang/data/UCF101/train_filenames.txt',
'dataset_name': 'features', 'rng': iterator_rng, 'frame_rng': iterator_frame_rng,
'seq_length': seq_length, 'num_segments': 1, 'seq_fps': 30,
'minibatch_size': 128, 'train_sampling': True, 'reshape': True,
'use_mask': True, 'input_data_type': 'float32', 'output_data_type': 'int64', 'one_hot_label': True,
'is_output_multilabel': False,
'name': 'ucf101-train-video-iterator'}
train_iterator = VideoDataIterator(iterator_param)
train_iterator.begin(do_shuffle=True)
train_iterator.print_stat()
#
iterator_param = {'dataset': 'ucf101',
'data_file': '/ssd/zhenyang/data/UCF101/features/rgb_vgg16_pool5',
'num_frames_file': '/ssd/zhenyang/data/UCF101/test_framenum.txt',
'labels_file': '/ssd/zhenyang/data/UCF101/test_labels.txt',
'vid_name_file': '/ssd/zhenyang/data/UCF101/test_filenames.txt',
'dataset_name': 'features', 'rng': None, 'frame_rng': None,
'seq_length': seq_length, 'num_segments': 25, 'seq_fps': 30,
'minibatch_size': 20, 'train_sampling': False, 'reshape': True,
'use_mask': True, 'input_data_type': 'float32', 'output_data_type': 'int64', 'one_hot_label': True,
'is_output_multilabel': False,
'name': 'ucf101-valid-video-iterator'}
valid_iterator = VideoDataIterator(iterator_param)
valid_iterator.begin(do_shuffle=False)
valid_iterator.print_stat()
#
test_iterator = None
#############################
rng = sparnn.utils.quick_npy_rng()
theano_rng = sparnn.utils.quick_theano_rng(rng)
############################# interface layer
param = {"id": "ucf101-rgb-vgg16-pool5", "use_mask": True,
"input_ndim": 4, "output_ndim": 2,
"output_data_type": "int64"}
interface_layer = InterfaceLayer(param)
x = interface_layer.input
mask = interface_layer.mask
y = interface_layer.output
timesteps = x.shape[0]
minibatch_size = x.shape[1]
feature_dim = 512
hidden_dim = 512
out_dim = 1024
regions = 7*7
actions = 101
data_dim = (feature_dim, regions)
logger.info("Data Dim:" + str(data_dim))
# initial state/cell (Timestep, Minibatch, FeatureDim, Region)
input_mean = x.mean(0) ### input_mean is now (Minibatch, FeatureDim, Region)
input_mean = input_mean.mean(2) ### you want input_mean to be Minibatch x FeatureDim
#############################
middle_layers = []
#0# initalization layer for lstm state
param = {"id": 0, "rng": rng, "theano_rng": theano_rng,
"dim_in": (feature_dim,), "dim_out": (hidden_dim,),
"minibatch_size": minibatch_size,
"activation": "tanh",
"input": input_mean}
middle_layers.append(FeedForwardLayer(param))
#1# initalization layer for lstm memory
param = {"id": 1, "rng": rng, "theano_rng": theano_rng,
"dim_in": (feature_dim,), "dim_out": (hidden_dim,),
"minibatch_size": minibatch_size,
"activation": "tanh",
"input": input_mean}
middle_layers.append(FeedForwardLayer(param))
#2# conditional lstm layer (main layer)
param = {"id": 2, "rng": rng, "theano_rng": theano_rng,
"dim_in": data_dim, "dim_out": (hidden_dim,),
"minibatch_size": minibatch_size,
"input": x, "mask": mask,
"init_hidden_state": middle_layers[0].output,
"init_cell_state": middle_layers[1].output,
"temperature_inverse": 1.,
"n_steps": seq_length}
middle_layers.append(CondLSTMLayer(param))
#3# set up dropout 1
param = {"id": 3, "rng": rng, "theano_rng": theano_rng,
"dim_in": (hidden_dim,), "dim_out": (hidden_dim,),
"minibatch_size": minibatch_size,
"dropout_rate": 0.5,
"input": middle_layers[2].output}
middle_layers.append(DropoutLayer(param))
#4# output layer
param = {"id": 4, "rng": rng, "theano_rng": theano_rng,
"dim_in": (hidden_dim,), "dim_out": (out_dim,),
"minibatch_size": minibatch_size,
"activation": "tanh",
"input": middle_layers[3].output}
middle_layers.append(FeedForwardLayer(param))
#5# set up dropout 2
param = {"id": 5, "rng": rng, "theano_rng": theano_rng,
"dim_in": (out_dim,), "dim_out": (out_dim,),
"minibatch_size": minibatch_size,
"dropout_rate": 0.5,
"input": middle_layers[4].output}
middle_layers.append(DropoutLayer(param))
#6# classification layer (softmax outputs class probabilities)
param = {"id": 6, "rng": rng, "theano_rng": theano_rng,
"dim_in": (out_dim,), "dim_out": (actions,),
"minibatch_size": minibatch_size,
"activation": "softmax",
"input": middle_layers[5].output}
middle_layers.append(FeedForwardLayer(param))
#7# label prediction layer
#param = {"id": 7, "rng": rng, "theano_rng": theano_rng,
# "dim_in": (actions,), "dim_out": (1,),
# "minibatch_size": minibatch_size,
# "last_n": seq_length,
# "is_multilabel": False,
# "input": middle_layers[6].output}
#middle_layers.append(PredictionLayer(param))
############################# cost layer
param = {"id": "cost", "rng": rng, "theano_rng": theano_rng,
"dim_in": (actions,), "dim_out": (1,),
"minibatch_size": minibatch_size,
"cost_func": "CategoricalCrossEntropy",
#"regularization": "l2",
"param_layers": middle_layers,
#"penalty_rate": 0.00001,
"input": middle_layers[6].output,
"mask": mask,
"target": y}
cost_layer = ElementwiseCostLayer(param)
outputs = [{"name": "probability", "value": middle_layers[6].output}]
# error_layers = [cost_layer]
############################# model
param = {'interface_layer': interface_layer, 'middle_layers': middle_layers, 'cost_layer': cost_layer,
'outputs': outputs, 'errors': None, 'last_n': seq_length,
'name': "UCF101-VideoModel-RGB-ALSTM-RMS",
'problem_type': "classification"}
model = VideoModel(param)
model.print_stat()
############################# optimizer
param = {'id': '1', 'learning_rate': 0.001, 'decay_rate': 0.9, 'clip_threshold': None, 'verbose': False,
'max_epoch': 200, 'start_epoch': 0, 'valid_epoch': 20, 'max_epochs_no_best': 200,
'display_freq': 150, 'valid_freq': None, 'save_freq': None,
'autosave_mode': ['interval', 'best'], 'save_path': save_path, 'save_interval': 20}
optimizer = RMSProp(model, train_iterator, valid_iterator, test_iterator, param)
optimizer.train()
| 37.409302
| 117
| 0.647395
|
a42daebe592d289f6bfeb3011d1d4800cc6f718b
| 9,922
|
py
|
Python
|
src/asm.py
|
dalehumby/d8
|
1b8e09270ddeccdad2aaa932f8085bb761e84cf6
|
[
"MIT"
] | null | null | null |
src/asm.py
|
dalehumby/d8
|
1b8e09270ddeccdad2aaa932f8085bb761e84cf6
|
[
"MIT"
] | null | null | null |
src/asm.py
|
dalehumby/d8
|
1b8e09270ddeccdad2aaa932f8085bb761e84cf6
|
[
"MIT"
] | 1
|
2022-02-06T15:46:17.000Z
|
2022-02-06T15:46:17.000Z
|
#!/usr/local/bin/python3
"""
Assembler for the D8 CPU
Usage:
$ python asm.py file.asm
This generates
- file.d8
- file.hex
in the same folder as file.asm
.asm files need to conform to the grammar as defined in grammar.lark
Once assembled, the d8 file can be used by emulate.py to run the emulator, of
in GUI mode using gui.py
The hex file can be loaded in to the CPU simulator's RAM for execution.
"""
import argparse
import os
from lark import Lark, Transformer, UnexpectedCharacters, v_args
from lark.exceptions import VisitError
from d8 import Machine
def load_grammar(filename):
"""Load the Lark EBNF grammar file"""
with open(filename, "r") as f:
grammar = f.read()
return Lark(grammar, start="program", propagate_positions=True)
class SymbolTable:
"""Manage the symbol table."""
def __init__(self):
self.symbol_table = {}
def add(self, key, value):
"""Add a symbol to the table, checking for duplicates."""
if key in self.symbol_table:
raise KeyError(f'Symbol "{key}" already defined')
else:
self.symbol_table[key.lower()] = value
def get_all(self):
"""Get full symbol table."""
return self.symbol_table
def get(self, key):
"""
Get a specific key from the table.
NOTE: Don't catch the KeyError here, catch elsewhere where there is
more contect eg line numbers
"""
return self.symbol_table[key.lower()]
@v_args(inline=True) # Affects the signatures of the methods
class EvalExpressions(Transformer):
"""
Using Lark's Transformer method we can write a transformer for each subtree
to evaluate our expressions. We also use Python's built in operators for
doing the maths, and write our own to transform eg hex in to an int.
Ref https://lark-parser.readthedocs.io/en/latest/visitors.html#transformer
"""
from operator import add, lshift, mul, neg, rshift, sub
from operator import truediv as div
def __init__(self, get_symbol):
self.get_symbol = get_symbol
def integer(self, value):
return int(value)
def hex(self, value):
return int(value, 16)
def char(self, value):
return ord(value)
def symbol(self, name):
return self.get_symbol(name)
def resolve_expression(self, tree):
"""Take a math expression and resolve it to an int value."""
try:
answer = self.EvalExpressions(self.get).transform(tree)
except VisitError as e:
print(f"Undefined symbol {e.orig_exc} on line {tree.meta.line}")
exit(-1)
try:
return int(answer.children[0])
except AttributeError:
# If answer is not a tree then it's a token
return int(answer)
class MemoryMap:
"""Map of the entire program space."""
def __init__(self):
"""Initialise empty memory."""
self.memory = {}
self.address = 0
def set_origin(self, address):
self.address = address
def set_reset(self, location):
"""Set the reset location."""
self.add_instruction(0, "bra", [location], location.meta.line)
def add_instruction(self, address, op, opr, line_number):
self.memory[address] = {
"type": "instruction",
"op": op.lower(),
"opr": opr,
"line_number": line_number,
}
self.address += 2
def add_variable(self, address, symbol, value, line_number):
self.memory[address] = {
"type": "variable",
"symbol": symbol,
"value": value,
"line_number": line_number,
}
self.address += len(value)
def get_all(self):
return self.memory
def items(self):
return self.memory.items()
def get_address(self):
return self.address
def resolve_directive(node, symbols, memory):
"""Resolve the assembler directives such as .reset .origin .define and .data"""
if node.data == "reset":
location = resolve_reset(node.children)
memory.set_reset(location)
elif node.data == "origin":
address = resolve_origin(node, symbols)
memory.set_origin(address)
elif node.data == "define":
resolve_define(node.children, symbols)
elif node.data == "string":
resolve_string(node.children, symbols, memory)
elif node.data == "byte":
resolve_byte(node.children, symbols, memory)
elif node.data == "array":
resolve_array(node.children, symbols, memory)
else:
raise NotImplementedError
def resolve_reset(tokens):
"""Resolve the reset to an location, but dont resolve the location to an address yet."""
tree = tokens[0]
if tree.data != "symbol":
raise Exception("Unknown reset type", tree.data)
return tree
def resolve_origin(node, symbols):
"""Resolve the address of the origin."""
return symbols.resolve_expression(node)
def resolve_define(tokens, symbols):
"""Add the symbol and value that has been defined to the symbol table."""
symbol = tokens[0]
value = symbols.resolve_expression(tokens[1])
symbols.add(symbol, value)
def resolve_string(tokens, symbols, memory):
symbol = tokens[0]
value = tokens[1]
address = memory.get_address()
symbols.add(symbol, address)
value = [ord(x) for x in value[1:-1]] # Find better way to strip ""
value.append(0)
memory.add_variable(address, symbol, value, symbol.line)
def resolve_byte(tokens, symbols, memory):
"""Declare the variable and initialise memory to 0x00."""
symbol = tokens[0]
byte_count = symbols.resolve_expression(tokens[1])
value = [0] * byte_count # init all bytes to 0
address = memory.get_address()
symbols.add(symbol, address)
memory.add_variable(address, symbol, value, symbol.line)
def resolve_array(tokens, symbols, memory):
"""Declare the variable and define each element in the array to initialise memory."""
symbol = tokens[0]
values = [symbols.resolve_expression(element) for element in tokens[1:]]
address = memory.get_address()
symbols.add(symbol, address)
memory.add_variable(address, symbol, values, symbol.line)
def resolve_label(token, symbols, memory):
"""Given a label, resolve it in to an address and save to symbol table."""
symbols.add(token, memory.get_address())
def resolve_instruction(instruction, memory):
"""Add an instruction to the memory map."""
memory.add_instruction(
memory.get_address(),
instruction.data,
instruction.children,
instruction.meta.line,
)
def build_symbols_memory(source_tree, symbols, memory):
"""Walks the parsed source tree, adding symbols and memory as you go."""
for line in source_tree.children:
for item in line.children:
if item.data == "comment":
pass
elif item.data == "directive":
resolve_directive(item.children[0], symbols, memory)
elif item.data == "label":
resolve_label(item.children[0], symbols, memory)
else:
resolve_instruction(item, memory)
def build_d8_file(source_filename, symbols, memory):
"""Write the d8 file with the machine instructions and debug info."""
outlines = []
machine = Machine(symbols)
# Write the header of the .d8 file
out = f"; Assembled {source_filename}\n; Symbols = {symbols.get_all()}\n;Adr | Val | Ln | Debug info"
outlines.append(out)
print(out)
# Now that we have the complete symbol table, do the second pass
for address, line in memory.items():
line_number = line["line_number"]
if line["type"] == "instruction":
opcode = line["op"]
operands = line["opr"]
m = machine.instruction(address, opcode, operands)
out = f"{address:04X} | {m:04X} | {line_number:2d} | {opcode} {operands} ({machine.string(m)})"
outlines.append(out)
print(out)
elif line["type"] == "variable":
hexstr = "".join(f"{v:02X}" for v in line["value"])
out = f'{address:04X} | {hexstr} | {line_number:2d} | var:{line["symbol"]}[{len(line["value"])}]'
outlines.append(out)
print(out)
else:
raise Exception(f"Unknown type {line['type']}")
# Write the .d8 file
outfile = os.path.splitext(source_filename)[0] + ".d8"
with open(outfile, "w") as f:
f.writelines(map(lambda s: s + "\n", outlines))
if __name__ == "__main__":
argparser = argparse.ArgumentParser(description="Assembler for the D8 CPU")
argparser.add_argument("source", help="Input file to assemble")
argparser.add_argument(
"--check", action="store_true", help="Only check the syntax, don't assemble"
)
args = argparser.parse_args()
source_filename = args.source
check_syntax = args.check
with open(source_filename, "r") as f:
raw_source = f.read()
asmparser = load_grammar("grammar.lark")
try:
source_tree = asmparser.parse(raw_source)
except UnexpectedCharacters as e:
print(e)
exit(-1)
if check_syntax:
exit()
print(source_tree.pretty(), "\n")
print(source_tree, "\n")
symbols = SymbolTable()
memory = MemoryMap()
# First pass, iterate over file building symbol table and memory map
build_symbols_memory(source_tree, symbols, memory)
print("Symbols:\n", symbols.get_all(), "\n")
print("Memory map:\n", memory.get_all(), "\n")
# Second pass, resolve all symbols in to values and write machine instructions to output file
build_d8_file(source_filename, symbols, memory)
# HEX file output
# TODO
| 31.398734
| 109
| 0.63082
|
98347491ee00b66d2c2f8df69ddf663c1bffbd84
| 2,062
|
py
|
Python
|
youtube_dl/extractor/yourporn.py
|
hackarada/youtube-dl
|
2ba46715a41fe074eab2221170b2ac78fab93fad
|
[
"Unlicense"
] | 66,635
|
2019-03-10T21:34:18.000Z
|
2022-03-31T23:50:31.000Z
|
youtube_dl/extractor/yourporn.py
|
hackarada/youtube-dl
|
2ba46715a41fe074eab2221170b2ac78fab93fad
|
[
"Unlicense"
] | 10,936
|
2019-03-10T21:35:47.000Z
|
2022-03-31T23:46:52.000Z
|
youtube_dl/extractor/yourporn.py
|
hackarada/youtube-dl
|
2ba46715a41fe074eab2221170b2ac78fab93fad
|
[
"Unlicense"
] | 15,194
|
2019-03-10T21:09:27.000Z
|
2022-03-31T22:13:49.000Z
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
parse_duration,
urljoin,
)
class YourPornIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?sxyprn\.com/post/(?P<id>[^/?#&.]+)'
_TESTS = [{
'url': 'https://sxyprn.com/post/57ffcb2e1179b.html',
'md5': '6f8682b6464033d87acaa7a8ff0c092e',
'info_dict': {
'id': '57ffcb2e1179b',
'ext': 'mp4',
'title': 'md5:c9f43630bd968267672651ba905a7d35',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 165,
'age_limit': 18,
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://sxyprn.com/post/57ffcb2e1179b.html',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
parts = self._parse_json(
self._search_regex(
r'data-vnfo=(["\'])(?P<data>{.+?})\1', webpage, 'data info',
group='data'),
video_id)[video_id].split('/')
num = 0
for c in parts[6] + parts[7]:
if c.isnumeric():
num += int(c)
parts[5] = compat_str(int(parts[5]) - num)
parts[1] += '8'
video_url = urljoin(url, '/'.join(parts))
title = (self._search_regex(
r'<[^>]+\bclass=["\']PostEditTA[^>]+>([^<]+)', webpage, 'title',
default=None) or self._og_search_description(webpage)).strip()
thumbnail = self._og_search_thumbnail(webpage)
duration = parse_duration(self._search_regex(
r'duration\s*:\s*<[^>]+>([\d:]+)', webpage, 'duration',
default=None))
return {
'id': video_id,
'url': video_url,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'age_limit': 18,
'ext': 'mp4',
}
| 30.323529
| 76
| 0.511154
|
a0461b6a541d3e470541ee898e1088456beeef13
| 686
|
py
|
Python
|
arbitrage/public_markets/brokercny.py
|
abaoj/bitcoin-arbitrage
|
cf19f3a388b34b3587c8fc0363254a72236451d7
|
[
"Unlicense"
] | 126
|
2017-06-23T09:10:58.000Z
|
2021-12-21T19:06:35.000Z
|
arbitrage/public_markets/brokercny.py
|
lism/bitcoin-arbitrage
|
cf19f3a388b34b3587c8fc0363254a72236451d7
|
[
"Unlicense"
] | 1
|
2020-06-04T11:05:45.000Z
|
2020-06-04T11:05:45.000Z
|
arbitrage/public_markets/brokercny.py
|
lism/bitcoin-arbitrage
|
cf19f3a388b34b3587c8fc0363254a72236451d7
|
[
"Unlicense"
] | 65
|
2017-06-23T09:11:03.000Z
|
2021-11-20T04:11:20.000Z
|
# Copyright (C) 2017, JackYao <yaozihao@yaozihao.cn>
import urllib.request
import urllib.error
import urllib.parse
import json
from .market import Market
import lib.broker_api as exchange_api
class BrokerCNY(Market):
def __init__(self):
super().__init__('CNY')
self.update_rate = 1
exchange_api.init_broker()
def update_depth(self):
depth = {}
try:
ticker = exchange_api.exchange_get_ticker()
depth['asks'] = [[ticker.ask, 30]]
depth['bids'] = [[ticker.bid, 30]]
except Exception as e:
exchange_api.init_broker()
return
self.depth = self.format_depth(depth)
| 25.407407
| 55
| 0.623907
|
07c647ce69a7953844e2410038cdedb89e2ef82e
| 27,157
|
py
|
Python
|
Bio/AlignIO/StockholmIO.py
|
bioinf-mcb/biopython
|
1a1f4a7ee4e0efba517d3d607c56c27e72e399cc
|
[
"BSD-3-Clause"
] | 1
|
2018-04-16T20:05:30.000Z
|
2018-04-16T20:05:30.000Z
|
Bio/AlignIO/StockholmIO.py
|
cosign070128/biopython
|
2f02e34ba76306e9c27eec9e051809bec2cece9b
|
[
"BSD-3-Clause"
] | 9
|
2020-05-05T00:54:23.000Z
|
2020-06-09T17:10:45.000Z
|
Bio/AlignIO/StockholmIO.py
|
cosign070128/biopython
|
2f02e34ba76306e9c27eec9e051809bec2cece9b
|
[
"BSD-3-Clause"
] | 3
|
2020-06-29T13:07:46.000Z
|
2021-06-14T20:11:55.000Z
|
# Copyright 2006-2016 by Peter Cock. All rights reserved.
# Revisions copyright 2015 by Ben Woodcroft. All rights reserved.
#
# This file is part of the Biopython distribution and governed by your
# choice of the "Biopython License Agreement" or the "BSD 3-Clause License".
# Please see the LICENSE file that should have been included as part of this
# package.
"""Bio.AlignIO support for "stockholm" format (used in the PFAM database).
You are expected to use this module via the Bio.AlignIO functions (or the
Bio.SeqIO functions if you want to work directly with the gapped sequences).
For example, consider a Stockholm alignment file containing the following::
# STOCKHOLM 1.0
#=GC SS_cons .................<<<<<<<<...<<<<<<<........>>>>>>>..
AP001509.1 UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-GAUGAGGGU
#=GR AP001509.1 SS -----------------<<<<<<<<---..<<-<<-------->>->>..--
AE007476.1 AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-CACGA-CGU
#=GR AE007476.1 SS -----------------<<<<<<<<-----<<.<<-------->>.>>----
#=GC SS_cons ......<<<<<<<.......>>>>>>>..>>>>>>>>...............
AP001509.1 CUCUAC-AGGUA-CCGUAAA-UACCUAGCUACGAAAAGAAUGCAGUUAAUGU
#=GR AP001509.1 SS -------<<<<<--------->>>>>--->>>>>>>>---------------
AE007476.1 UUCUACAAGGUG-CCGG-AA-CACCUAACAAUAAGUAAGUCAGCAGUGAGAU
#=GR AE007476.1 SS ------.<<<<<--------->>>>>.-->>>>>>>>---------------
//
This is a single multiple sequence alignment, so you would probably load this
using the Bio.AlignIO.read() function:
>>> from Bio import AlignIO
>>> align = AlignIO.read("Stockholm/simple.sth", "stockholm")
>>> print(align)
SingleLetterAlphabet() alignment with 2 rows and 104 columns
UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-G...UGU AP001509.1
AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-C...GAU AE007476.1
>>> for record in align:
... print("%s %i" % (record.id, len(record)))
AP001509.1 104
AE007476.1 104
This example file is clearly using RNA, so you might want the alignment object
(and the SeqRecord objects it holds) to reflect this, rather than simple using
the default single letter alphabet as shown above. You can do this with an
optional argument to the Bio.AlignIO.read() function:
>>> from Bio import AlignIO
>>> from Bio.Alphabet import generic_rna
>>> align = AlignIO.read("Stockholm/simple.sth", "stockholm",
... alphabet=generic_rna)
>>> print(align)
RNAAlphabet() alignment with 2 rows and 104 columns
UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-G...UGU AP001509.1
AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-C...GAU AE007476.1
In addition to the sequences themselves, this example alignment also includes
some GR lines for the secondary structure of the sequences. These are
strings, with one character for each letter in the associated sequence:
>>> for record in align:
... print(record.id)
... print(record.seq)
... print(record.letter_annotations['secondary_structure'])
AP001509.1
UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-GAUGAGGGUCUCUAC-AGGUA-CCGUAAA-UACCUAGCUACGAAAAGAAUGCAGUUAAUGU
-----------------<<<<<<<<---..<<-<<-------->>->>..---------<<<<<--------->>>>>--->>>>>>>>---------------
AE007476.1
AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-CACGA-CGUUUCUACAAGGUG-CCGG-AA-CACCUAACAAUAAGUAAGUCAGCAGUGAGAU
-----------------<<<<<<<<-----<<.<<-------->>.>>----------.<<<<<--------->>>>>.-->>>>>>>>---------------
Any general annotation for each row is recorded in the SeqRecord's annotations
dictionary. Any per-column annotation for the entire alignment in in the
alignment's column annotations dictionary, such as the secondary structure
consensus in this example:
>>> sorted(align.column_annotations.keys())
['secondary_structure']
>>> align.column_annotations["secondary_structure"]
'.................<<<<<<<<...<<<<<<<........>>>>>>>........<<<<<<<.......>>>>>>>..>>>>>>>>...............'
You can output this alignment in many different file formats
using Bio.AlignIO.write(), or the MultipleSeqAlignment object's format method:
>>> print(align.format("fasta"))
>AP001509.1
UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-GAUGAGGGUCUCUAC-A
GGUA-CCGUAAA-UACCUAGCUACGAAAAGAAUGCAGUUAAUGU
>AE007476.1
AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-CACGA-CGUUUCUACAA
GGUG-CCGG-AA-CACCUAACAAUAAGUAAGUCAGCAGUGAGAU
<BLANKLINE>
Most output formats won't be able to hold the annotation possible in a
Stockholm file:
>>> print(align.format("stockholm"))
# STOCKHOLM 1.0
#=GF SQ 2
AP001509.1 UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-GAUGAGGGUCUCUAC-AGGUA-CCGUAAA-UACCUAGCUACGAAAAGAAUGCAGUUAAUGU
#=GS AP001509.1 AC AP001509.1
#=GS AP001509.1 DE AP001509.1
#=GR AP001509.1 SS -----------------<<<<<<<<---..<<-<<-------->>->>..---------<<<<<--------->>>>>--->>>>>>>>---------------
AE007476.1 AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-CACGA-CGUUUCUACAAGGUG-CCGG-AA-CACCUAACAAUAAGUAAGUCAGCAGUGAGAU
#=GS AE007476.1 AC AE007476.1
#=GS AE007476.1 DE AE007476.1
#=GR AE007476.1 SS -----------------<<<<<<<<-----<<.<<-------->>.>>----------.<<<<<--------->>>>>.-->>>>>>>>---------------
#=GC SS_cons .................<<<<<<<<...<<<<<<<........>>>>>>>........<<<<<<<.......>>>>>>>..>>>>>>>>...............
//
<BLANKLINE>
Note that when writing Stockholm files, AlignIO does not break long sequences
up and interleave them (as in the input file shown above). The standard
allows this simpler layout, and it is more likely to be understood by other
tools.
Finally, as an aside, it can sometimes be useful to use Bio.SeqIO.parse() to
iterate over the alignment rows as SeqRecord objects - rather than working
with Alignnment objects. Again, if you want to you can specify this is RNA:
>>> from Bio import SeqIO
>>> from Bio.Alphabet import generic_rna
>>> for record in SeqIO.parse("Stockholm/simple.sth", "stockholm",
... alphabet=generic_rna):
... print(record.id)
... print(record.seq)
... print(record.letter_annotations['secondary_structure'])
AP001509.1
UUAAUCGAGCUCAACACUCUUCGUAUAUCCUC-UCAAUAUGG-GAUGAGGGUCUCUAC-AGGUA-CCGUAAA-UACCUAGCUACGAAAAGAAUGCAGUUAAUGU
-----------------<<<<<<<<---..<<-<<-------->>->>..---------<<<<<--------->>>>>--->>>>>>>>---------------
AE007476.1
AAAAUUGAAUAUCGUUUUACUUGUUUAU-GUCGUGAAU-UGG-CACGA-CGUUUCUACAAGGUG-CCGG-AA-CACCUAACAAUAAGUAAGUCAGCAGUGAGAU
-----------------<<<<<<<<-----<<.<<-------->>.>>----------.<<<<<--------->>>>>.-->>>>>>>>---------------
Remember that if you slice a SeqRecord, the per-letter-annotations like the
secondary structure string here, are also sliced:
>>> sub_record = record[10:20]
>>> print(sub_record.seq)
AUCGUUUUAC
>>> print(sub_record.letter_annotations['secondary_structure'])
-------<<<
Likewise with the alignment object, as long as you are not dropping any rows,
slicing specific columns of an alignment will slice any per-column-annotations:
>>> align.column_annotations["secondary_structure"]
'.................<<<<<<<<...<<<<<<<........>>>>>>>........<<<<<<<.......>>>>>>>..>>>>>>>>...............'
>>> part_align = align[:,10:20]
>>> part_align.column_annotations["secondary_structure"]
'.......<<<'
You can also see this in the Stockholm output of this partial-alignment:
>>> print(part_align.format("stockholm"))
# STOCKHOLM 1.0
#=GF SQ 2
AP001509.1 UCAACACUCU
#=GS AP001509.1 AC AP001509.1
#=GS AP001509.1 DE AP001509.1
#=GR AP001509.1 SS -------<<<
AE007476.1 AUCGUUUUAC
#=GS AE007476.1 AC AE007476.1
#=GS AE007476.1 DE AE007476.1
#=GR AE007476.1 SS -------<<<
#=GC SS_cons .......<<<
//
<BLANKLINE>
"""
from collections import OrderedDict
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Align import MultipleSeqAlignment
from .Interfaces import AlignmentIterator, SequentialAlignmentWriter
class StockholmWriter(SequentialAlignmentWriter):
"""Stockholm/PFAM alignment writer."""
# These dictionaries should be kept in sync with those
# defined in the StockholmIterator class.
pfam_gr_mapping = {
"secondary_structure": "SS",
"surface_accessibility": "SA",
"transmembrane": "TM",
"posterior_probability": "PP",
"ligand_binding": "LI",
"active_site": "AS",
"intron": "IN",
}
# These GC mappings are in addition to *_cons in GR mapping:
pfam_gc_mapping = {"reference_annotation": "RF", "model_mask": "MM"}
# Following dictionary deliberately does not cover AC, DE or DR
pfam_gs_mapping = {"organism": "OS", "organism_classification": "OC", "look": "LO"}
def write_alignment(self, alignment):
"""Use this to write (another) single alignment to an open file.
Note that sequences and their annotation are recorded
together (rather than having a block of annotation followed
by a block of aligned sequences).
"""
count = len(alignment)
self._length_of_sequences = alignment.get_alignment_length()
self._ids_written = []
if count == 0:
raise ValueError("Must have at least one sequence")
if self._length_of_sequences == 0:
raise ValueError("Non-empty sequences are required")
self.handle.write("# STOCKHOLM 1.0\n")
self.handle.write("#=GF SQ %i\n" % count)
for record in alignment:
self._write_record(record)
# This shouldn't be None... but just in case,
if alignment.column_annotations:
for k, v in sorted(alignment.column_annotations.items()):
if k in self.pfam_gc_mapping:
self.handle.write("#=GC %s %s\n" % (self.pfam_gc_mapping[k], v))
elif k in self.pfam_gr_mapping:
self.handle.write(
"#=GC %s %s\n" % (self.pfam_gr_mapping[k] + "_cons", v)
)
else:
# It doesn't follow the PFAM standards, but should we record
# this data anyway?
pass
self.handle.write("//\n")
def _write_record(self, record):
"""Write a single SeqRecord to the file (PRIVATE)."""
if self._length_of_sequences != len(record.seq):
raise ValueError("Sequences must all be the same length")
# For the case for stockholm to stockholm, try and use record.name
seq_name = record.id
if record.name is not None:
if "accession" in record.annotations:
if record.id == record.annotations["accession"]:
seq_name = record.name
# In the Stockholm file format, spaces are not allowed in the id
seq_name = seq_name.replace(" ", "_")
if "start" in record.annotations and "end" in record.annotations:
suffix = "/%s-%s" % (
str(record.annotations["start"]),
str(record.annotations["end"]),
)
if seq_name[-len(suffix) :] != suffix:
seq_name = "%s/%s-%s" % (
seq_name,
str(record.annotations["start"]),
str(record.annotations["end"]),
)
if seq_name in self._ids_written:
raise ValueError("Duplicate record identifier: %s" % seq_name)
self._ids_written.append(seq_name)
self.handle.write("%s %s\n" % (seq_name, str(record.seq)))
# The recommended placement for GS lines (per sequence annotation)
# is above the alignment (as a header block) or just below the
# corresponding sequence.
#
# The recommended placement for GR lines (per sequence per column
# annotation such as secondary structure) is just below the
# corresponding sequence.
#
# We put both just below the corresponding sequence as this allows
# us to write the file using a single pass through the records.
# AC = Accession
if "accession" in record.annotations:
self.handle.write(
"#=GS %s AC %s\n"
% (seq_name, self.clean(record.annotations["accession"]))
)
elif record.id:
self.handle.write("#=GS %s AC %s\n" % (seq_name, self.clean(record.id)))
# DE = description
if record.description:
self.handle.write(
"#=GS %s DE %s\n" % (seq_name, self.clean(record.description))
)
# DE = database links
for xref in record.dbxrefs:
self.handle.write("#=GS %s DR %s\n" % (seq_name, self.clean(xref)))
# GS = other per sequence annotation
for key, value in record.annotations.items():
if key in self.pfam_gs_mapping:
data = self.clean(str(value))
if data:
self.handle.write(
"#=GS %s %s %s\n"
% (seq_name, self.clean(self.pfam_gs_mapping[key]), data)
)
else:
# It doesn't follow the PFAM standards, but should we record
# this data anyway?
pass
# GR = per row per column sequence annotation
for key, value in record.letter_annotations.items():
if key in self.pfam_gr_mapping and len(str(value)) == len(record.seq):
data = self.clean(str(value))
if data:
self.handle.write(
"#=GR %s %s %s\n"
% (seq_name, self.clean(self.pfam_gr_mapping[key]), data)
)
else:
# It doesn't follow the PFAM standards, but should we record
# this data anyway?
pass
class StockholmIterator(AlignmentIterator):
"""Loads a Stockholm file from PFAM into MultipleSeqAlignment objects.
The file may contain multiple concatenated alignments, which are loaded
and returned incrementally.
This parser will detect if the Stockholm file follows the PFAM
conventions for sequence specific meta-data (lines starting #=GS
and #=GR) and populates the SeqRecord fields accordingly.
Any annotation which does not follow the PFAM conventions is currently
ignored.
If an accession is provided for an entry in the meta data, IT WILL NOT
be used as the record.id (it will be recorded in the record's
annotations). This is because some files have (sub) sequences from
different parts of the same accession (differentiated by different
start-end positions).
Wrap-around alignments are not supported - each sequences must be on
a single line. However, interlaced sequences should work.
For more information on the file format, please see:
http://sonnhammer.sbc.su.se/Stockholm.html
https://en.wikipedia.org/wiki/Stockholm_format
http://bioperl.org/formats/alignment_formats/Stockholm_multiple_alignment_format.html
For consistency with BioPerl and EMBOSS we call this the "stockholm"
format.
"""
# These dictionaries should be kept in sync with those
# defined in the PfamStockholmWriter class.
pfam_gr_mapping = {
"SS": "secondary_structure",
"SA": "surface_accessibility",
"TM": "transmembrane",
"PP": "posterior_probability",
"LI": "ligand_binding",
"AS": "active_site",
"IN": "intron",
}
# These GC mappings are in addition to *_cons in GR mapping:
pfam_gc_mapping = {"RF": "reference_annotation", "MM": "model_mask"}
# Following dictionary deliberately does not cover AC, DE or DR
pfam_gs_mapping = {"OS": "organism", "OC": "organism_classification", "LO": "look"}
_header = None # for caching lines between __next__ calls
def __next__(self):
"""Parse the next alignment from the handle."""
handle = self.handle
if self._header is None:
line = handle.readline()
else:
# Header we saved from when we were parsing
# the previous alignment.
line = self._header
self._header = None
if not line:
# Empty file - just give up.
raise StopIteration
if line.strip() != "# STOCKHOLM 1.0":
raise ValueError("Did not find STOCKHOLM header")
# Note: If this file follows the PFAM conventions, there should be
# a line containing the number of sequences, e.g. "#=GF SQ 67"
# We do not check for this - perhaps we should, and verify that
# if present it agrees with our parsing.
seqs = {}
ids = OrderedDict() # Really only need an OrderedSet, but python lacks this
gs = {}
gr = {}
gf = {}
gc = {}
passed_end_alignment = False
while True:
line = handle.readline()
if not line:
break # end of file
line = line.strip() # remove trailing \n
if line == "# STOCKHOLM 1.0":
self._header = line
break
elif line == "//":
# The "//" line indicates the end of the alignment.
# There may still be more meta-data
passed_end_alignment = True
elif line == "":
# blank line, ignore
pass
elif line[0] != "#":
# Sequence
# Format: "<seqname> <sequence>"
assert not passed_end_alignment
parts = [x.strip() for x in line.split(" ", 1)]
if len(parts) != 2:
# This might be someone attempting to store a zero length sequence?
raise ValueError(
"Could not split line into identifier and sequence:\n" + line
)
seq_id, seq = parts
if seq_id not in ids:
ids[seq_id] = True
seqs.setdefault(seq_id, "")
seqs[seq_id] += seq.replace(".", "-")
elif len(line) >= 5:
# Comment line or meta-data
if line[:5] == "#=GF ":
# Generic per-File annotation, free text
# Format: #=GF <feature> <free text>
feature, text = line[5:].strip().split(None, 1)
# Each feature key could be used more than once,
# so store the entries as a list of strings.
if feature not in gf:
gf[feature] = [text]
else:
gf[feature].append(text)
elif line[:5] == "#=GC ":
# Generic per-Column annotation, exactly 1 char per column
# Format: "#=GC <feature> <exactly 1 char per column>"
feature, text = line[5:].strip().split(None, 2)
if feature not in gc:
gc[feature] = ""
gc[feature] += text.strip() # append to any previous entry
# Might be interleaved blocks, so can't check length yet
elif line[:5] == "#=GS ":
# Generic per-Sequence annotation, free text
# Format: "#=GS <seqname> <feature> <free text>"
seq_id, feature, text = line[5:].strip().split(None, 2)
# if seq_id not in ids:
# ids.append(seq_id)
if seq_id not in gs:
gs[seq_id] = {}
if feature not in gs[seq_id]:
gs[seq_id][feature] = [text]
else:
gs[seq_id][feature].append(text)
elif line[:5] == "#=GR ":
# Generic per-Sequence AND per-Column markup
# Format: "#=GR <seqname> <feature> <exactly 1 char per column>"
seq_id, feature, text = line[5:].strip().split(None, 2)
# if seq_id not in ids:
# ids.append(seq_id)
if seq_id not in gr:
gr[seq_id] = {}
if feature not in gr[seq_id]:
gr[seq_id][feature] = ""
gr[seq_id][feature] += text.strip() # append to any previous entry
# Might be interleaved blocks, so can't check length yet
# Next line...
assert len(seqs) <= len(ids)
# assert len(gs) <= len(ids)
# assert len(gr) <= len(ids)
self.ids = ids.keys()
self.sequences = seqs
self.seq_annotation = gs
self.seq_col_annotation = gr
if ids and seqs:
if (
self.records_per_alignment is not None
and self.records_per_alignment != len(ids)
):
raise ValueError(
"Found %i records in this alignment, told to expect %i"
% (len(ids), self.records_per_alignment)
)
alignment_length = len(list(seqs.values())[0])
records = [] # Alignment obj will put them all in a list anyway
for seq_id in ids:
seq = seqs[seq_id]
if alignment_length != len(seq):
raise ValueError(
"Sequences have different lengths, or repeated identifier"
)
name, start, end = self._identifier_split(seq_id)
record = SeqRecord(
Seq(seq, self.alphabet),
id=seq_id,
name=name,
description=seq_id,
annotations={"accession": name},
)
# Accession will be overridden by _populate_meta_data if an explicit
# accession is provided:
record.annotations["accession"] = name
if start is not None:
record.annotations["start"] = start
if end is not None:
record.annotations["end"] = end
self._populate_meta_data(seq_id, record)
records.append(record)
for k, v in gc.items():
if len(v) != alignment_length:
raise ValueError(
"%s length %i, expected %i" % (k, len(v), alignment_length)
)
alignment = MultipleSeqAlignment(records, self.alphabet)
for k, v in sorted(gc.items()):
if k in self.pfam_gc_mapping:
alignment.column_annotations[self.pfam_gc_mapping[k]] = v
elif k.endswith("_cons") and k[:-5] in self.pfam_gr_mapping:
alignment.column_annotations[self.pfam_gr_mapping[k[:-5]]] = v
else:
# Ignore it?
alignment.column_annotations["GC:" + k] = v
# TODO - Introduce an annotated alignment class?
# For now, store the annotation a new private property:
alignment._annotations = gr
return alignment
else:
raise StopIteration
def _identifier_split(self, identifier):
"""Return (name, start, end) string tuple from an identier (PRIVATE)."""
if "/" in identifier:
name, start_end = identifier.rsplit("/", 1)
if start_end.count("-") == 1:
try:
start, end = start_end.split("-")
return name, int(start), int(end)
except ValueError:
# Non-integers after final '/' - fall through
pass
return identifier, None, None
def _get_meta_data(self, identifier, meta_dict):
"""Take an itentifier and returns dict of all meta-data matching it (PRIVATE).
For example, given "Q9PN73_CAMJE/149-220" will return all matches to
this or "Q9PN73_CAMJE" which the identifier without its /start-end
suffix.
In the example below, the suffix is required to match the AC, but must
be removed to match the OS and OC meta-data::
# STOCKHOLM 1.0
#=GS Q9PN73_CAMJE/149-220 AC Q9PN73
...
Q9PN73_CAMJE/149-220 NKA...
...
#=GS Q9PN73_CAMJE OS Campylobacter jejuni
#=GS Q9PN73_CAMJE OC Bacteria
This function will return an empty dictionary if no data is found.
"""
name, start, end = self._identifier_split(identifier)
if name == identifier:
identifier_keys = [identifier]
else:
identifier_keys = [identifier, name]
answer = {}
for identifier_key in identifier_keys:
try:
for feature_key in meta_dict[identifier_key]:
answer[feature_key] = meta_dict[identifier_key][feature_key]
except KeyError:
pass
return answer
def _populate_meta_data(self, identifier, record):
"""Add meta-date to a SecRecord's annotations dictionary (PRIVATE).
This function applies the PFAM conventions.
"""
seq_data = self._get_meta_data(identifier, self.seq_annotation)
for feature in seq_data:
# Note this dictionary contains lists!
if feature == "AC": # ACcession number
assert len(seq_data[feature]) == 1
record.annotations["accession"] = seq_data[feature][0]
elif feature == "DE": # DEscription
record.description = "\n".join(seq_data[feature])
elif feature == "DR": # Database Reference
# Should we try and parse the strings?
record.dbxrefs = seq_data[feature]
elif feature in self.pfam_gs_mapping:
record.annotations[self.pfam_gs_mapping[feature]] = ", ".join(
seq_data[feature]
)
else:
# Ignore it?
record.annotations["GS:" + feature] = ", ".join(seq_data[feature])
# Now record the per-letter-annotations
seq_col_data = self._get_meta_data(identifier, self.seq_col_annotation)
for feature in seq_col_data:
# Note this dictionary contains strings!
if feature in self.pfam_gr_mapping:
record.letter_annotations[self.pfam_gr_mapping[feature]] = seq_col_data[
feature
]
else:
# Ignore it?
record.letter_annotations["GR:" + feature] = seq_col_data[feature]
if __name__ == "__main__":
from Bio._utils import run_doctest
run_doctest()
| 42.366615
| 127
| 0.562249
|
ea1b3873ea7c932432efa1ad369f8963f7400c5e
| 68,175
|
py
|
Python
|
mindhome_alpha/erpnext/stock/doctype/stock_entry/stock_entry.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
mindhome_alpha/erpnext/stock/doctype/stock_entry/stock_entry.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | null | null | null |
mindhome_alpha/erpnext/stock/doctype/stock_entry/stock_entry.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
import frappe.defaults
from frappe import _
from frappe.utils import cstr, cint, flt, comma_or, getdate, nowdate, formatdate, format_time
from erpnext.stock.utils import get_incoming_rate
from erpnext.stock.stock_ledger import get_previous_sle, NegativeStockError, get_valuation_rate
from erpnext.stock.get_item_details import get_bin_details, get_default_cost_center, get_conversion_factor, get_reserved_qty_for_so
from erpnext.setup.doctype.item_group.item_group import get_item_group_defaults
from erpnext.setup.doctype.brand.brand import get_brand_defaults
from erpnext.stock.doctype.batch.batch import get_batch_no, set_batch_nos, get_batch_qty
from erpnext.stock.doctype.item.item import get_item_defaults
from erpnext.manufacturing.doctype.bom.bom import validate_bom_no, add_additional_cost
from erpnext.stock.utils import get_bin
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.doctype.serial_no.serial_no import update_serial_nos_after_submit, get_serial_nos
from erpnext.stock.doctype.stock_reconciliation.stock_reconciliation import OpeningEntryAccountError
from erpnext.accounts.general_ledger import process_gl_map
from erpnext.controllers.taxes_and_totals import init_landed_taxes_and_totals
import json
from six import string_types, itervalues, iteritems
class IncorrectValuationRateError(frappe.ValidationError): pass
class DuplicateEntryForWorkOrderError(frappe.ValidationError): pass
class OperationsNotCompleteError(frappe.ValidationError): pass
class MaxSampleAlreadyRetainedError(frappe.ValidationError): pass
from erpnext.controllers.stock_controller import StockController
form_grid_templates = {
"items": "templates/form_grid/stock_entry_grid.html"
}
class StockEntry(StockController):
def get_feed(self):
return self.stock_entry_type
def onload(self):
for item in self.get("items"):
item.update(get_bin_details(item.item_code, item.s_warehouse))
def before_validate(self):
from erpnext.stock.doctype.putaway_rule.putaway_rule import apply_putaway_rule
apply_rule = self.apply_putaway_rule and (self.purpose in ["Material Transfer", "Material Receipt"])
if self.get("items") and apply_rule:
apply_putaway_rule(self.doctype, self.get("items"), self.company,
purpose=self.purpose)
def validate(self):
self.pro_doc = frappe._dict()
if self.work_order:
self.pro_doc = frappe.get_doc('Work Order', self.work_order)
self.validate_posting_time()
self.validate_purpose()
self.validate_item()
self.validate_customer_provided_item()
self.validate_qty()
self.set_transfer_qty()
self.validate_uom_is_integer("uom", "qty")
self.validate_uom_is_integer("stock_uom", "transfer_qty")
self.validate_warehouse()
self.validate_work_order()
self.validate_bom()
self.mark_finished_and_scrap_items()
self.validate_finished_goods()
self.validate_with_material_request()
self.validate_batch()
self.validate_inspection()
self.validate_fg_completed_qty()
self.validate_difference_account()
self.set_job_card_data()
self.set_purpose_for_stock_entry()
if not self.from_bom:
self.fg_completed_qty = 0.0
if self._action == 'submit':
self.make_batches('t_warehouse')
else:
set_batch_nos(self, 's_warehouse')
self.validate_serialized_batch()
self.set_actual_qty()
self.calculate_rate_and_amount()
self.validate_putaway_capacity()
def on_submit(self):
self.update_stock_ledger()
update_serial_nos_after_submit(self, "items")
self.update_work_order()
self.validate_purchase_order()
if self.purchase_order and self.purpose == "Send to Subcontractor":
self.update_purchase_order_supplied_items()
self.make_gl_entries()
self.repost_future_sle_and_gle()
self.update_cost_in_project()
self.validate_reserved_serial_no_consumption()
self.update_transferred_qty()
self.update_quality_inspection()
if self.work_order and self.purpose == "Manufacture":
self.update_so_in_serial_number()
if self.purpose == 'Material Transfer' and self.add_to_transit:
self.set_material_request_transfer_status('In Transit')
if self.purpose == 'Material Transfer' and self.outgoing_stock_entry:
self.set_material_request_transfer_status('Completed')
def on_cancel(self):
if self.purchase_order and self.purpose == "Send to Subcontractor":
self.update_purchase_order_supplied_items()
if self.work_order and self.purpose == "Material Consumption for Manufacture":
self.validate_work_order_status()
self.update_work_order()
self.update_stock_ledger()
self.ignore_linked_doctypes = ('GL Entry', 'Stock Ledger Entry', 'Repost Item Valuation')
self.make_gl_entries_on_cancel()
self.repost_future_sle_and_gle()
self.update_cost_in_project()
self.update_transferred_qty()
self.update_quality_inspection()
self.delete_auto_created_batches()
self.delete_linked_stock_entry()
if self.purpose == 'Material Transfer' and self.add_to_transit:
self.set_material_request_transfer_status('Not Started')
if self.purpose == 'Material Transfer' and self.outgoing_stock_entry:
self.set_material_request_transfer_status('In Transit')
def set_job_card_data(self):
if self.job_card and not self.work_order:
data = frappe.db.get_value('Job Card',
self.job_card, ['for_quantity', 'work_order', 'bom_no'], as_dict=1)
self.fg_completed_qty = data.for_quantity
self.work_order = data.work_order
self.from_bom = 1
self.bom_no = data.bom_no
def validate_work_order_status(self):
pro_doc = frappe.get_doc("Work Order", self.work_order)
if pro_doc.status == 'Completed':
frappe.throw(_("Cannot cancel transaction for Completed Work Order."))
def validate_purpose(self):
valid_purposes = ["Material Issue", "Material Receipt", "Material Transfer",
"Material Transfer for Manufacture", "Manufacture", "Repack", "Send to Subcontractor",
"Material Consumption for Manufacture"]
if self.purpose not in valid_purposes:
frappe.throw(_("Purpose must be one of {0}").format(comma_or(valid_purposes)))
if self.job_card and self.purpose not in ['Material Transfer for Manufacture', 'Repack']:
frappe.throw(_("For job card {0}, you can only make the 'Material Transfer for Manufacture' type stock entry")
.format(self.job_card))
def delete_linked_stock_entry(self):
if self.purpose == "Send to Warehouse":
for d in frappe.get_all("Stock Entry", filters={"docstatus": 0,
"outgoing_stock_entry": self.name, "purpose": "Receive at Warehouse"}):
frappe.delete_doc("Stock Entry", d.name)
def set_transfer_qty(self):
for item in self.get("items"):
if not flt(item.qty):
frappe.throw(_("Row {0}: Qty is mandatory").format(item.idx))
if not flt(item.conversion_factor):
frappe.throw(_("Row {0}: UOM Conversion Factor is mandatory").format(item.idx))
item.transfer_qty = flt(flt(item.qty) * flt(item.conversion_factor),
self.precision("transfer_qty", item))
def update_cost_in_project(self):
if (self.work_order and not frappe.db.get_value("Work Order",
self.work_order, "update_consumed_material_cost_in_project")):
return
if self.project:
amount = frappe.db.sql(""" select ifnull(sum(sed.amount), 0)
from
`tabStock Entry` se, `tabStock Entry Detail` sed
where
se.docstatus = 1 and se.project = %s and sed.parent = se.name
and (sed.t_warehouse is null or sed.t_warehouse = '')""", self.project, as_list=1)
amount = amount[0][0] if amount else 0
additional_costs = frappe.db.sql(""" select ifnull(sum(sed.base_amount), 0)
from
`tabStock Entry` se, `tabLanded Cost Taxes and Charges` sed
where
se.docstatus = 1 and se.project = %s and sed.parent = se.name
and se.purpose = 'Manufacture'""", self.project, as_list=1)
additional_cost_amt = additional_costs[0][0] if additional_costs else 0
amount += additional_cost_amt
frappe.db.set_value('Project', self.project, 'total_consumed_material_cost', amount)
def validate_item(self):
stock_items = self.get_stock_items()
serialized_items = self.get_serialized_items()
for item in self.get("items"):
if flt(item.qty) and flt(item.qty) < 0:
frappe.throw(_("Row {0}: The item {1}, quantity must be positive number")
.format(item.idx, frappe.bold(item.item_code)))
if item.item_code not in stock_items:
frappe.throw(_("{0} is not a stock Item").format(item.item_code))
item_details = self.get_item_details(frappe._dict(
{"item_code": item.item_code, "company": self.company,
"project": self.project, "uom": item.uom, 's_warehouse': item.s_warehouse}),
for_update=True)
for f in ("uom", "stock_uom", "description", "item_name", "expense_account",
"cost_center", "conversion_factor"):
if f == "stock_uom" or not item.get(f):
item.set(f, item_details.get(f))
if f == 'conversion_factor' and item.uom == item_details.get('stock_uom'):
item.set(f, item_details.get(f))
if not item.transfer_qty and item.qty:
item.transfer_qty = flt(flt(item.qty) * flt(item.conversion_factor),
self.precision("transfer_qty", item))
if (self.purpose in ("Material Transfer", "Material Transfer for Manufacture")
and not item.serial_no
and item.item_code in serialized_items):
frappe.throw(_("Row #{0}: Please specify Serial No for Item {1}").format(item.idx, item.item_code),
frappe.MandatoryError)
def validate_qty(self):
manufacture_purpose = ["Manufacture", "Material Consumption for Manufacture"]
if self.purpose in manufacture_purpose and self.work_order:
if not frappe.get_value('Work Order', self.work_order, 'skip_transfer'):
item_code = []
for item in self.items:
if cstr(item.t_warehouse) == '':
req_items = frappe.get_all('Work Order Item',
filters={'parent': self.work_order, 'item_code': item.item_code}, fields=["item_code"])
transferred_materials = frappe.db.sql("""
select
sum(qty) as qty
from `tabStock Entry` se,`tabStock Entry Detail` sed
where
se.name = sed.parent and se.docstatus=1 and
(se.purpose='Material Transfer for Manufacture' or se.purpose='Manufacture')
and sed.item_code=%s and se.work_order= %s and ifnull(sed.t_warehouse, '') != ''
""", (item.item_code, self.work_order), as_dict=1)
stock_qty = flt(item.qty)
trans_qty = flt(transferred_materials[0].qty)
if req_items:
if stock_qty > trans_qty:
item_code.append(item.item_code)
def validate_fg_completed_qty(self):
item_wise_qty = {}
if self.purpose == "Manufacture" and self.work_order:
for d in self.items:
if d.is_finished_item:
item_wise_qty.setdefault(d.item_code, []).append(d.qty)
for item_code, qty_list in iteritems(item_wise_qty):
total = flt(sum(qty_list), frappe.get_precision("Stock Entry Detail", "qty"))
if self.fg_completed_qty != total:
frappe.throw(_("The finished product {0} quantity {1} and For Quantity {2} cannot be different")
.format(frappe.bold(item_code), frappe.bold(total), frappe.bold(self.fg_completed_qty)))
def validate_difference_account(self):
if not cint(erpnext.is_perpetual_inventory_enabled(self.company)):
return
for d in self.get("items"):
if not d.expense_account:
frappe.throw(_("Please enter <b>Difference Account</b> or set default <b>Stock Adjustment Account</b> for company {0}")
.format(frappe.bold(self.company)))
elif self.is_opening == "Yes" and frappe.db.get_value("Account", d.expense_account, "report_type") == "Profit and Loss":
frappe.throw(_("Difference Account must be a Asset/Liability type account, since this Stock Entry is an Opening Entry"), OpeningEntryAccountError)
def validate_warehouse(self):
"""perform various (sometimes conditional) validations on warehouse"""
source_mandatory = ["Material Issue", "Material Transfer", "Send to Subcontractor", "Material Transfer for Manufacture",
"Material Consumption for Manufacture"]
target_mandatory = ["Material Receipt", "Material Transfer", "Send to Subcontractor",
"Material Transfer for Manufacture"]
validate_for_manufacture = any([d.bom_no for d in self.get("items")])
if self.purpose in source_mandatory and self.purpose not in target_mandatory:
self.to_warehouse = None
for d in self.get('items'):
d.t_warehouse = None
elif self.purpose in target_mandatory and self.purpose not in source_mandatory:
self.from_warehouse = None
for d in self.get('items'):
d.s_warehouse = None
for d in self.get('items'):
if not d.s_warehouse and not d.t_warehouse:
d.s_warehouse = self.from_warehouse
d.t_warehouse = self.to_warehouse
if not (d.s_warehouse or d.t_warehouse):
frappe.throw(_("Atleast one warehouse is mandatory"))
if self.purpose in source_mandatory and not d.s_warehouse:
if self.from_warehouse:
d.s_warehouse = self.from_warehouse
else:
frappe.throw(_("Source warehouse is mandatory for row {0}").format(d.idx))
if self.purpose in target_mandatory and not d.t_warehouse:
if self.to_warehouse:
d.t_warehouse = self.to_warehouse
else:
frappe.throw(_("Target warehouse is mandatory for row {0}").format(d.idx))
if self.purpose == "Manufacture":
if validate_for_manufacture:
if d.is_finished_item or d.is_scrap_item:
d.s_warehouse = None
if not d.t_warehouse:
frappe.throw(_("Target warehouse is mandatory for row {0}").format(d.idx))
else:
d.t_warehouse = None
if not d.s_warehouse:
frappe.throw(_("Source warehouse is mandatory for row {0}").format(d.idx))
if cstr(d.s_warehouse) == cstr(d.t_warehouse) and not self.purpose == "Material Transfer for Manufacture":
frappe.throw(_("Source and target warehouse cannot be same for row {0}").format(d.idx))
def validate_work_order(self):
if self.purpose in ("Manufacture", "Material Transfer for Manufacture", "Material Consumption for Manufacture"):
# check if work order is entered
if (self.purpose=="Manufacture" or self.purpose=="Material Consumption for Manufacture") \
and self.work_order:
if not self.fg_completed_qty:
frappe.throw(_("For Quantity (Manufactured Qty) is mandatory"))
self.check_if_operations_completed()
self.check_duplicate_entry_for_work_order()
elif self.purpose != "Material Transfer":
self.work_order = None
def check_if_operations_completed(self):
"""Check if Time Sheets are completed against before manufacturing to capture operating costs."""
prod_order = frappe.get_doc("Work Order", self.work_order)
allowance_percentage = flt(frappe.db.get_single_value("Manufacturing Settings",
"overproduction_percentage_for_work_order"))
for d in prod_order.get("operations"):
total_completed_qty = flt(self.fg_completed_qty) + flt(prod_order.produced_qty)
completed_qty = d.completed_qty + (allowance_percentage/100 * d.completed_qty)
if total_completed_qty > flt(completed_qty):
job_card = frappe.db.get_value('Job Card', {'operation_id': d.name}, 'name')
if not job_card:
frappe.throw(_("Work Order {0}: Job Card not found for the operation {1}")
.format(self.work_order, d.operation))
work_order_link = frappe.utils.get_link_to_form('Work Order', self.work_order)
job_card_link = frappe.utils.get_link_to_form('Job Card', job_card)
frappe.throw(_("Row #{0}: Operation {1} is not completed for {2} qty of finished goods in Work Order {3}. Please update operation status via Job Card {4}.")
.format(d.idx, frappe.bold(d.operation), frappe.bold(total_completed_qty), work_order_link, job_card_link), OperationsNotCompleteError)
def check_duplicate_entry_for_work_order(self):
other_ste = [t[0] for t in frappe.db.get_values("Stock Entry", {
"work_order": self.work_order,
"purpose": self.purpose,
"docstatus": ["!=", 2],
"name": ["!=", self.name]
}, "name")]
if other_ste:
production_item, qty = frappe.db.get_value("Work Order",
self.work_order, ["production_item", "qty"])
args = other_ste + [production_item]
fg_qty_already_entered = frappe.db.sql("""select sum(transfer_qty)
from `tabStock Entry Detail`
where parent in (%s)
and item_code = %s
and ifnull(s_warehouse,'')='' """ % (", ".join(["%s" * len(other_ste)]), "%s"), args)[0][0]
if fg_qty_already_entered and fg_qty_already_entered >= qty:
frappe.throw(_("Stock Entries already created for Work Order ")
+ self.work_order + ":" + ", ".join(other_ste), DuplicateEntryForWorkOrderError)
def set_actual_qty(self):
allow_negative_stock = cint(frappe.db.get_value("Stock Settings", None, "allow_negative_stock"))
for d in self.get('items'):
previous_sle = get_previous_sle({
"item_code": d.item_code,
"warehouse": d.s_warehouse or d.t_warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time
})
# get actual stock at source warehouse
d.actual_qty = previous_sle.get("qty_after_transaction") or 0
# validate qty during submit
if d.docstatus==1 and d.s_warehouse and not allow_negative_stock and flt(d.actual_qty, d.precision("actual_qty")) < flt(d.transfer_qty, d.precision("actual_qty")):
frappe.throw(_("Row {0}: Quantity not available for {4} in warehouse {1} at posting time of the entry ({2} {3})").format(d.idx,
frappe.bold(d.s_warehouse), formatdate(self.posting_date),
format_time(self.posting_time), frappe.bold(d.item_code))
+ '<br><br>' + _("Available quantity is {0}, you need {1}").format(frappe.bold(d.actual_qty),
frappe.bold(d.transfer_qty)),
NegativeStockError, title=_('Insufficient Stock'))
def set_serial_nos(self, work_order):
previous_se = frappe.db.get_value("Stock Entry", {"work_order": work_order,
"purpose": "Material Transfer for Manufacture"}, "name")
for d in self.get('items'):
transferred_serial_no = frappe.db.get_value("Stock Entry Detail",{"parent": previous_se,
"item_code": d.item_code}, "serial_no")
if transferred_serial_no:
d.serial_no = transferred_serial_no
def get_stock_and_rate(self):
"""
Updates rate and availability of all the items.
Called from Update Rate and Availability button.
"""
self.set_work_order_details()
self.set_transfer_qty()
self.set_actual_qty()
self.calculate_rate_and_amount()
def calculate_rate_and_amount(self, reset_outgoing_rate=True, raise_error_if_no_rate=True):
self.set_basic_rate(reset_outgoing_rate, raise_error_if_no_rate)
init_landed_taxes_and_totals(self)
self.distribute_additional_costs()
self.update_valuation_rate()
self.set_total_incoming_outgoing_value()
self.set_total_amount()
def set_basic_rate(self, reset_outgoing_rate=True, raise_error_if_no_rate=True):
"""
Set rate for outgoing, scrapped and finished items
"""
# Set rate for outgoing items
outgoing_items_cost = self.set_rate_for_outgoing_items(reset_outgoing_rate)
finished_item_qty = sum([d.transfer_qty for d in self.items if d.is_finished_item])
# Set basic rate for incoming items
for d in self.get('items'):
if d.s_warehouse or d.set_basic_rate_manually: continue
if d.allow_zero_valuation_rate:
d.basic_rate = 0.0
elif d.is_finished_item:
if self.purpose == "Manufacture":
d.basic_rate = self.get_basic_rate_for_manufactured_item(finished_item_qty, outgoing_items_cost)
elif self.purpose == "Repack":
d.basic_rate = self.get_basic_rate_for_repacked_items(d.transfer_qty, outgoing_items_cost)
if not d.basic_rate and not d.allow_zero_valuation_rate:
d.basic_rate = get_valuation_rate(d.item_code, d.t_warehouse,
self.doctype, self.name, d.allow_zero_valuation_rate,
currency=erpnext.get_company_currency(self.company), company=self.company,
raise_error_if_no_rate=raise_error_if_no_rate)
d.basic_rate = flt(d.basic_rate, d.precision("basic_rate"))
d.basic_amount = flt(flt(d.transfer_qty) * flt(d.basic_rate), d.precision("basic_amount"))
def set_rate_for_outgoing_items(self, reset_outgoing_rate=True):
outgoing_items_cost = 0.0
for d in self.get('items'):
if d.s_warehouse:
if reset_outgoing_rate:
args = self.get_args_for_incoming_rate(d)
rate = get_incoming_rate(args)
if rate > 0:
d.basic_rate = rate
d.basic_amount = flt(flt(d.transfer_qty) * flt(d.basic_rate), d.precision("basic_amount"))
if not d.t_warehouse:
outgoing_items_cost += flt(d.basic_amount)
return outgoing_items_cost
def get_args_for_incoming_rate(self, item):
return frappe._dict({
"item_code": item.item_code,
"warehouse": item.s_warehouse or item.t_warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"qty": item.s_warehouse and -1*flt(item.transfer_qty) or flt(item.transfer_qty),
"serial_no": item.serial_no,
"voucher_type": self.doctype,
"voucher_no": self.name,
"company": self.company,
"allow_zero_valuation": item.allow_zero_valuation_rate,
})
def get_basic_rate_for_repacked_items(self, finished_item_qty, outgoing_items_cost):
finished_items = [d.item_code for d in self.get("items") if d.is_finished_item]
if len(finished_items) == 1:
return flt(outgoing_items_cost / finished_item_qty)
else:
unique_finished_items = set(finished_items)
if len(unique_finished_items) == 1:
total_fg_qty = sum([flt(d.transfer_qty) for d in self.items if d.is_finished_item])
return flt(outgoing_items_cost / total_fg_qty)
def get_basic_rate_for_manufactured_item(self, finished_item_qty, outgoing_items_cost=0):
scrap_items_cost = sum([flt(d.basic_amount) for d in self.get("items") if d.is_scrap_item])
# Get raw materials cost from BOM if multiple material consumption entries
if frappe.db.get_single_value("Manufacturing Settings", "material_consumption"):
bom_items = self.get_bom_raw_materials(finished_item_qty)
outgoing_items_cost = sum([flt(row.qty)*flt(row.rate) for row in bom_items.values()])
return flt((outgoing_items_cost - scrap_items_cost) / finished_item_qty)
def distribute_additional_costs(self):
# If no incoming items, set additional costs blank
if not any([d.item_code for d in self.items if d.t_warehouse]):
self.additional_costs = []
self.total_additional_costs = sum([flt(t.base_amount) for t in self.get("additional_costs")])
if self.purpose in ("Repack", "Manufacture"):
incoming_items_cost = sum([flt(t.basic_amount) for t in self.get("items") if t.is_finished_item])
else:
incoming_items_cost = sum([flt(t.basic_amount) for t in self.get("items") if t.t_warehouse])
if incoming_items_cost:
for d in self.get("items"):
if (self.purpose in ("Repack", "Manufacture") and d.is_finished_item) or d.t_warehouse:
d.additional_cost = (flt(d.basic_amount) / incoming_items_cost) * self.total_additional_costs
else:
d.additional_cost = 0
def update_valuation_rate(self):
for d in self.get("items"):
if d.transfer_qty:
d.amount = flt(flt(d.basic_amount) + flt(d.additional_cost), d.precision("amount"))
d.valuation_rate = flt(flt(d.basic_rate) + (flt(d.additional_cost) / flt(d.transfer_qty)),
d.precision("valuation_rate"))
def set_total_incoming_outgoing_value(self):
self.total_incoming_value = self.total_outgoing_value = 0.0
for d in self.get("items"):
if d.t_warehouse:
self.total_incoming_value += flt(d.amount)
if d.s_warehouse:
self.total_outgoing_value += flt(d.amount)
self.value_difference = self.total_incoming_value - self.total_outgoing_value
def set_total_amount(self):
self.total_amount = None
if self.purpose not in ['Manufacture', 'Repack']:
self.total_amount = sum([flt(item.amount) for item in self.get("items")])
def set_stock_entry_type(self):
if self.purpose:
self.stock_entry_type = frappe.get_cached_value('Stock Entry Type',
{'purpose': self.purpose}, 'name')
def set_purpose_for_stock_entry(self):
if self.stock_entry_type and not self.purpose:
self.purpose = frappe.get_cached_value('Stock Entry Type',
self.stock_entry_type, 'purpose')
def validate_purchase_order(self):
"""Throw exception if more raw material is transferred against Purchase Order than in
the raw materials supplied table"""
backflush_raw_materials_based_on = frappe.db.get_single_value("Buying Settings",
"backflush_raw_materials_of_subcontract_based_on")
qty_allowance = flt(frappe.db.get_single_value("Buying Settings",
"over_transfer_allowance"))
if not (self.purpose == "Send to Subcontractor" and self.purchase_order): return
if (backflush_raw_materials_based_on == 'BOM'):
purchase_order = frappe.get_doc("Purchase Order", self.purchase_order)
for se_item in self.items:
item_code = se_item.original_item or se_item.item_code
precision = cint(frappe.db.get_default("float_precision")) or 3
required_qty = sum([flt(d.required_qty) for d in purchase_order.supplied_items \
if d.rm_item_code == item_code])
total_allowed = required_qty + (required_qty * (qty_allowance/100))
if not required_qty:
bom_no = frappe.db.get_value("Purchase Order Item",
{"parent": self.purchase_order, "item_code": se_item.subcontracted_item},
"bom")
if se_item.allow_alternative_item:
original_item_code = frappe.get_value("Item Alternative", {"alternative_item_code": item_code}, "item_code")
required_qty = sum([flt(d.required_qty) for d in purchase_order.supplied_items \
if d.rm_item_code == original_item_code])
total_allowed = required_qty + (required_qty * (qty_allowance/100))
if not required_qty:
frappe.throw(_("Item {0} not found in 'Raw Materials Supplied' table in Purchase Order {1}")
.format(se_item.item_code, self.purchase_order))
total_supplied = frappe.db.sql("""select sum(transfer_qty)
from `tabStock Entry Detail`, `tabStock Entry`
where `tabStock Entry`.purchase_order = %s
and `tabStock Entry`.docstatus = 1
and `tabStock Entry Detail`.item_code = %s
and `tabStock Entry Detail`.parent = `tabStock Entry`.name""",
(self.purchase_order, se_item.item_code))[0][0]
if flt(total_supplied, precision) > flt(total_allowed, precision):
frappe.throw(_("Row {0}# Item {1} cannot be transferred more than {2} against Purchase Order {3}")
.format(se_item.idx, se_item.item_code, total_allowed, self.purchase_order))
elif backflush_raw_materials_based_on == "Material Transferred for Subcontract":
for row in self.items:
if not row.subcontracted_item:
frappe.throw(_("Row {0}: Subcontracted Item is mandatory for the raw material {1}")
.format(row.idx, frappe.bold(row.item_code)))
elif not row.po_detail:
filters = {
"parent": self.purchase_order, "docstatus": 1,
"rm_item_code": row.item_code, "main_item_code": row.subcontracted_item
}
po_detail = frappe.db.get_value("Purchase Order Item Supplied", filters, "name")
if po_detail:
row.db_set("po_detail", po_detail)
def validate_bom(self):
for d in self.get('items'):
if d.bom_no and (d.t_warehouse != getattr(self, "pro_doc", frappe._dict()).scrap_warehouse):
item_code = d.original_item or d.item_code
validate_bom_no(item_code, d.bom_no)
def mark_finished_and_scrap_items(self):
if self.purpose in ("Repack", "Manufacture"):
if any([d.item_code for d in self.items if (d.is_finished_item and d.t_warehouse)]):
return
finished_item = self.get_finished_item()
for d in self.items:
if d.t_warehouse and not d.s_warehouse:
if self.purpose=="Repack" or d.item_code == finished_item:
d.is_finished_item = 1
else:
d.is_scrap_item = 1
else:
d.is_finished_item = 0
d.is_scrap_item = 0
def get_finished_item(self):
finished_item = None
if self.work_order:
finished_item = frappe.db.get_value("Work Order", self.work_order, "production_item")
elif self.bom_no:
finished_item = frappe.db.get_value("BOM", self.bom_no, "item")
return finished_item
def validate_finished_goods(self):
"""validation: finished good quantity should be same as manufacturing quantity"""
if not self.work_order: return
production_item, wo_qty = frappe.db.get_value("Work Order",
self.work_order, ["production_item", "qty"])
finished_items = []
for d in self.get('items'):
if d.is_finished_item:
if d.item_code != production_item:
frappe.throw(_("Finished Item {0} does not match with Work Order {1}")
.format(d.item_code, self.work_order))
elif flt(d.transfer_qty) > flt(self.fg_completed_qty):
frappe.throw(_("Quantity in row {0} ({1}) must be same as manufactured quantity {2}"). \
format(d.idx, d.transfer_qty, self.fg_completed_qty))
finished_items.append(d.item_code)
if len(set(finished_items)) > 1:
frappe.throw(_("Multiple items cannot be marked as finished item"))
if self.purpose == "Manufacture":
allowance_percentage = flt(frappe.db.get_single_value("Manufacturing Settings",
"overproduction_percentage_for_work_order"))
allowed_qty = wo_qty + (allowance_percentage/100 * wo_qty)
if self.fg_completed_qty > allowed_qty:
frappe.throw(_("For quantity {0} should not be greater than work order quantity {1}")
.format(flt(self.fg_completed_qty), wo_qty))
def update_stock_ledger(self):
sl_entries = []
finished_item_row = self.get_finished_item_row()
# make sl entries for source warehouse first
self.get_sle_for_source_warehouse(sl_entries, finished_item_row)
# SLE for target warehouse
self.get_sle_for_target_warehouse(sl_entries, finished_item_row)
# reverse sl entries if cancel
if self.docstatus == 2:
sl_entries.reverse()
self.make_sl_entries(sl_entries)
def get_finished_item_row(self):
finished_item_row = None
if self.purpose in ("Manufacture", "Repack"):
for d in self.get('items'):
if d.is_finished_item:
finished_item_row = d
return finished_item_row
def get_sle_for_source_warehouse(self, sl_entries, finished_item_row):
for d in self.get('items'):
if cstr(d.s_warehouse):
sle = self.get_sl_entries(d, {
"warehouse": cstr(d.s_warehouse),
"actual_qty": -flt(d.transfer_qty),
"incoming_rate": 0
})
if cstr(d.t_warehouse):
sle.dependant_sle_voucher_detail_no = d.name
elif finished_item_row and (finished_item_row.item_code != d.item_code or finished_item_row.t_warehouse != d.s_warehouse):
sle.dependant_sle_voucher_detail_no = finished_item_row.name
sl_entries.append(sle)
def get_sle_for_target_warehouse(self, sl_entries, finished_item_row):
for d in self.get('items'):
if cstr(d.t_warehouse):
sle = self.get_sl_entries(d, {
"warehouse": cstr(d.t_warehouse),
"actual_qty": flt(d.transfer_qty),
"incoming_rate": flt(d.valuation_rate)
})
if cstr(d.s_warehouse) or (finished_item_row and d.name == finished_item_row.name):
sle.recalculate_rate = 1
sl_entries.append(sle)
def get_gl_entries(self, warehouse_account):
gl_entries = super(StockEntry, self).get_gl_entries(warehouse_account)
total_basic_amount = sum([flt(t.basic_amount) for t in self.get("items") if t.t_warehouse])
divide_based_on = total_basic_amount
if self.get("additional_costs") and not total_basic_amount:
# if total_basic_amount is 0, distribute additional charges based on qty
divide_based_on = sum(item.qty for item in list(self.get("items")))
item_account_wise_additional_cost = {}
for t in self.get("additional_costs"):
for d in self.get("items"):
if d.t_warehouse:
item_account_wise_additional_cost.setdefault((d.item_code, d.name), {})
item_account_wise_additional_cost[(d.item_code, d.name)].setdefault(t.expense_account, {
"amount": 0.0,
"base_amount": 0.0
})
multiply_based_on = d.basic_amount if total_basic_amount else d.qty
item_account_wise_additional_cost[(d.item_code, d.name)][t.expense_account]["amount"] += \
flt(t.amount * multiply_based_on) / divide_based_on
item_account_wise_additional_cost[(d.item_code, d.name)][t.expense_account]["base_amount"] += \
flt(t.base_amount * multiply_based_on) / divide_based_on
if item_account_wise_additional_cost:
for d in self.get("items"):
for account, amount in iteritems(item_account_wise_additional_cost.get((d.item_code, d.name), {})):
if not amount: continue
gl_entries.append(self.get_gl_dict({
"account": account,
"against": d.expense_account,
"cost_center": d.cost_center,
"remarks": self.get("remarks") or _("Accounting Entry for Stock"),
"credit_in_account_currency": flt(amount["amount"]),
"credit": flt(amount["base_amount"])
}, item=d))
gl_entries.append(self.get_gl_dict({
"account": d.expense_account,
"against": account,
"cost_center": d.cost_center,
"remarks": self.get("remarks") or _("Accounting Entry for Stock"),
"credit": -1 * amount['base_amount'] # put it as negative credit instead of debit purposefully
}, item=d))
return process_gl_map(gl_entries)
def update_work_order(self):
def _validate_work_order(pro_doc):
if flt(pro_doc.docstatus) != 1:
frappe.throw(_("Work Order {0} must be submitted").format(self.work_order))
if pro_doc.status == 'Stopped':
frappe.throw(_("Transaction not allowed against stopped Work Order {0}").format(self.work_order))
if self.job_card:
job_doc = frappe.get_doc('Job Card', self.job_card)
job_doc.set_transferred_qty(update_status=True)
job_doc.set_transferred_qty_in_job_card(self)
if self.work_order:
pro_doc = frappe.get_doc("Work Order", self.work_order)
_validate_work_order(pro_doc)
pro_doc.run_method("update_status")
if self.fg_completed_qty:
pro_doc.run_method("update_work_order_qty")
if self.purpose == "Manufacture":
pro_doc.run_method("update_planned_qty")
if not pro_doc.operations:
pro_doc.set_actual_dates()
def get_item_details(self, args=None, for_update=False):
item = frappe.db.sql("""select i.name, i.stock_uom, i.description, i.image, i.item_name, i.item_group,
i.has_batch_no, i.sample_quantity, i.has_serial_no, i.allow_alternative_item,
id.expense_account, id.buying_cost_center
from `tabItem` i LEFT JOIN `tabItem Default` id ON i.name=id.parent and id.company=%s
where i.name=%s
and i.disabled=0
and (i.end_of_life is null or i.end_of_life='0000-00-00' or i.end_of_life > %s)""",
(self.company, args.get('item_code'), nowdate()), as_dict = 1)
if not item:
frappe.throw(_("Item {0} is not active or end of life has been reached").format(args.get("item_code")))
item = item[0]
item_group_defaults = get_item_group_defaults(item.name, self.company)
brand_defaults = get_brand_defaults(item.name, self.company)
ret = frappe._dict({
'uom' : item.stock_uom,
'stock_uom' : item.stock_uom,
'description' : item.description,
'image' : item.image,
'item_name' : item.item_name,
'cost_center' : get_default_cost_center(args, item, item_group_defaults, brand_defaults, self.company),
'qty' : args.get("qty"),
'transfer_qty' : args.get('qty'),
'conversion_factor' : 1,
'batch_no' : '',
'actual_qty' : 0,
'basic_rate' : 0,
'serial_no' : '',
'has_serial_no' : item.has_serial_no,
'has_batch_no' : item.has_batch_no,
'sample_quantity' : item.sample_quantity,
'expense_account' : item.expense_account
})
if self.purpose == 'Send to Subcontractor':
ret["allow_alternative_item"] = item.allow_alternative_item
# update uom
if args.get("uom") and for_update:
ret.update(get_uom_details(args.get('item_code'), args.get('uom'), args.get('qty')))
if self.purpose == 'Material Issue':
ret["expense_account"] = (item.get("expense_account") or
item_group_defaults.get("expense_account") or
frappe.get_cached_value('Company', self.company, "default_expense_account"))
for company_field, field in {'stock_adjustment_account': 'expense_account',
'cost_center': 'cost_center'}.items():
if not ret.get(field):
ret[field] = frappe.get_cached_value('Company', self.company, company_field)
args['posting_date'] = self.posting_date
args['posting_time'] = self.posting_time
stock_and_rate = get_warehouse_details(args) if args.get('warehouse') else {}
ret.update(stock_and_rate)
# automatically select batch for outgoing item
if (args.get('s_warehouse', None) and args.get('qty') and
ret.get('has_batch_no') and not args.get('batch_no')):
args.batch_no = get_batch_no(args['item_code'], args['s_warehouse'], args['qty'])
if self.purpose == "Send to Subcontractor" and self.get("purchase_order") and args.get('item_code'):
subcontract_items = frappe.get_all("Purchase Order Item Supplied",
{"parent": self.purchase_order, "rm_item_code": args.get('item_code')}, "main_item_code")
if subcontract_items and len(subcontract_items) == 1:
ret["subcontracted_item"] = subcontract_items[0].main_item_code
return ret
def set_items_for_stock_in(self):
self.items = []
if self.outgoing_stock_entry and self.purpose == 'Material Transfer':
doc = frappe.get_doc('Stock Entry', self.outgoing_stock_entry)
if doc.per_transferred == 100:
frappe.throw(_("Goods are already received against the outward entry {0}")
.format(doc.name))
for d in doc.items:
self.append('items', {
's_warehouse': d.t_warehouse,
'item_code': d.item_code,
'qty': d.qty,
'uom': d.uom,
'against_stock_entry': d.parent,
'ste_detail': d.name,
'stock_uom': d.stock_uom,
'conversion_factor': d.conversion_factor,
'serial_no': d.serial_no,
'batch_no': d.batch_no
})
def get_items(self):
self.set('items', [])
self.validate_work_order()
if not self.posting_date or not self.posting_time:
frappe.throw(_("Posting date and posting time is mandatory"))
self.set_work_order_details()
self.flags.backflush_based_on = frappe.db.get_single_value("Manufacturing Settings",
"backflush_raw_materials_based_on")
if self.bom_no:
backflush_based_on = frappe.db.get_single_value("Manufacturing Settings",
"backflush_raw_materials_based_on")
if self.purpose in ["Material Issue", "Material Transfer", "Manufacture", "Repack",
"Send to Subcontractor", "Material Transfer for Manufacture", "Material Consumption for Manufacture"]:
if self.work_order and self.purpose == "Material Transfer for Manufacture":
item_dict = self.get_pending_raw_materials(backflush_based_on)
if self.to_warehouse and self.pro_doc:
for item in itervalues(item_dict):
item["to_warehouse"] = self.pro_doc.wip_warehouse
self.add_to_stock_entry_detail(item_dict)
elif (self.work_order and (self.purpose == "Manufacture"
or self.purpose == "Material Consumption for Manufacture") and not self.pro_doc.skip_transfer
and self.flags.backflush_based_on == "Material Transferred for Manufacture"):
self.get_transfered_raw_materials()
elif (self.work_order and (self.purpose == "Manufacture" or
self.purpose == "Material Consumption for Manufacture") and self.flags.backflush_based_on== "BOM"
and frappe.db.get_single_value("Manufacturing Settings", "material_consumption")== 1):
self.get_unconsumed_raw_materials()
else:
if not self.fg_completed_qty:
frappe.throw(_("Manufacturing Quantity is mandatory"))
item_dict = self.get_bom_raw_materials(self.fg_completed_qty)
#Get PO Supplied Items Details
if self.purchase_order and self.purpose == "Send to Subcontractor":
#Get PO Supplied Items Details
item_wh = frappe._dict(frappe.db.sql("""
select rm_item_code, reserve_warehouse
from `tabPurchase Order` po, `tabPurchase Order Item Supplied` poitemsup
where po.name = poitemsup.parent
and po.name = %s""",self.purchase_order))
for item in itervalues(item_dict):
if self.pro_doc and cint(self.pro_doc.from_wip_warehouse):
item["from_warehouse"] = self.pro_doc.wip_warehouse
#Get Reserve Warehouse from PO
if self.purchase_order and self.purpose=="Send to Subcontractor":
item["from_warehouse"] = item_wh.get(item.item_code)
item["to_warehouse"] = self.to_warehouse if self.purpose=="Send to Subcontractor" else ""
self.add_to_stock_entry_detail(item_dict)
# fetch the serial_no of the first stock entry for the second stock entry
if self.work_order and self.purpose == "Manufacture":
self.set_serial_nos(self.work_order)
work_order = frappe.get_doc('Work Order', self.work_order)
add_additional_cost(self, work_order)
# add finished goods item
if self.purpose in ("Manufacture", "Repack"):
self.load_items_from_bom()
self.set_scrap_items()
self.set_actual_qty()
self.calculate_rate_and_amount(raise_error_if_no_rate=False)
def set_scrap_items(self):
if self.purpose != "Send to Subcontractor" and self.purpose in ["Manufacture", "Repack"]:
scrap_item_dict = self.get_bom_scrap_material(self.fg_completed_qty)
for item in itervalues(scrap_item_dict):
item.idx = ''
if self.pro_doc and self.pro_doc.scrap_warehouse:
item["to_warehouse"] = self.pro_doc.scrap_warehouse
self.add_to_stock_entry_detail(scrap_item_dict, bom_no=self.bom_no)
def set_work_order_details(self):
if not getattr(self, "pro_doc", None):
self.pro_doc = frappe._dict()
if self.work_order:
# common validations
if not self.pro_doc:
self.pro_doc = frappe.get_doc('Work Order', self.work_order)
if self.pro_doc:
self.bom_no = self.pro_doc.bom_no
else:
# invalid work order
self.work_order = None
def load_items_from_bom(self):
if self.work_order:
item_code = self.pro_doc.production_item
to_warehouse = self.pro_doc.fg_warehouse
else:
item_code = frappe.db.get_value("BOM", self.bom_no, "item")
to_warehouse = self.to_warehouse
item = get_item_defaults(item_code, self.company)
if not self.work_order and not to_warehouse:
# in case of BOM
to_warehouse = item.get("default_warehouse")
self.add_to_stock_entry_detail({
item.name: {
"to_warehouse": to_warehouse,
"from_warehouse": "",
"qty": self.fg_completed_qty,
"item_name": item.item_name,
"description": item.description,
"stock_uom": item.stock_uom,
"expense_account": item.get("expense_account"),
"cost_center": item.get("buying_cost_center"),
"is_finished_item": 1
}
}, bom_no = self.bom_no)
def get_bom_raw_materials(self, qty):
from erpnext.manufacturing.doctype.bom.bom import get_bom_items_as_dict
# item dict = { item_code: {qty, description, stock_uom} }
item_dict = get_bom_items_as_dict(self.bom_no, self.company, qty=qty,
fetch_exploded = self.use_multi_level_bom, fetch_qty_in_stock_uom=False)
used_alternative_items = get_used_alternative_items(work_order = self.work_order)
for item in itervalues(item_dict):
# if source warehouse presents in BOM set from_warehouse as bom source_warehouse
if item["allow_alternative_item"]:
item["allow_alternative_item"] = frappe.db.get_value('Work Order',
self.work_order, "allow_alternative_item")
item.from_warehouse = self.from_warehouse or item.source_warehouse or item.default_warehouse
if item.item_code in used_alternative_items:
alternative_item_data = used_alternative_items.get(item.item_code)
item.item_code = alternative_item_data.item_code
item.item_name = alternative_item_data.item_name
item.stock_uom = alternative_item_data.stock_uom
item.uom = alternative_item_data.uom
item.conversion_factor = alternative_item_data.conversion_factor
item.description = alternative_item_data.description
return item_dict
def get_bom_scrap_material(self, qty):
from erpnext.manufacturing.doctype.bom.bom import get_bom_items_as_dict
# item dict = { item_code: {qty, description, stock_uom} }
item_dict = get_bom_items_as_dict(self.bom_no, self.company, qty=qty,
fetch_exploded = 0, fetch_scrap_items = 1)
for item in itervalues(item_dict):
item.from_warehouse = ""
item.is_scrap_item = 1
return item_dict
def get_unconsumed_raw_materials(self):
wo = frappe.get_doc("Work Order", self.work_order)
wo_items = frappe.get_all('Work Order Item',
filters={'parent': self.work_order},
fields=["item_code", "required_qty", "consumed_qty", "transferred_qty"]
)
work_order_qty = wo.material_transferred_for_manufacturing or wo.qty
for item in wo_items:
item_account_details = get_item_defaults(item.item_code, self.company)
# Take into account consumption if there are any.
wo_item_qty = item.transferred_qty or item.required_qty
req_qty_each = (
(flt(wo_item_qty) - flt(item.consumed_qty)) /
(flt(work_order_qty) - flt(wo.produced_qty))
)
qty = req_qty_each * flt(self.fg_completed_qty)
if qty > 0:
self.add_to_stock_entry_detail({
item.item_code: {
"from_warehouse": wo.wip_warehouse,
"to_warehouse": "",
"qty": qty,
"item_name": item.item_name,
"description": item.description,
"stock_uom": item_account_details.stock_uom,
"expense_account": item_account_details.get("expense_account"),
"cost_center": item_account_details.get("buying_cost_center"),
}
})
def get_transfered_raw_materials(self):
transferred_materials = frappe.db.sql("""
select
item_name, original_item, item_code, sum(qty) as qty, sed.t_warehouse as warehouse,
description, stock_uom, expense_account, cost_center
from `tabStock Entry` se,`tabStock Entry Detail` sed
where
se.name = sed.parent and se.docstatus=1 and se.purpose='Material Transfer for Manufacture'
and se.work_order= %s and ifnull(sed.t_warehouse, '') != ''
group by sed.item_code, sed.t_warehouse
""", self.work_order, as_dict=1)
materials_already_backflushed = frappe.db.sql("""
select
item_code, sed.s_warehouse as warehouse, sum(qty) as qty
from
`tabStock Entry` se, `tabStock Entry Detail` sed
where
se.name = sed.parent and se.docstatus=1
and (se.purpose='Manufacture' or se.purpose='Material Consumption for Manufacture')
and se.work_order= %s and ifnull(sed.s_warehouse, '') != ''
group by sed.item_code, sed.s_warehouse
""", self.work_order, as_dict=1)
backflushed_materials= {}
for d in materials_already_backflushed:
backflushed_materials.setdefault(d.item_code,[]).append({d.warehouse: d.qty})
po_qty = frappe.db.sql("""select qty, produced_qty, material_transferred_for_manufacturing from
`tabWork Order` where name=%s""", self.work_order, as_dict=1)[0]
manufacturing_qty = flt(po_qty.qty)
produced_qty = flt(po_qty.produced_qty)
trans_qty = flt(po_qty.material_transferred_for_manufacturing)
for item in transferred_materials:
qty= item.qty
item_code = item.original_item or item.item_code
req_items = frappe.get_all('Work Order Item',
filters={'parent': self.work_order, 'item_code': item_code},
fields=["required_qty", "consumed_qty"]
)
req_qty = flt(req_items[0].required_qty) if req_items else flt(4)
req_qty_each = flt(req_qty / manufacturing_qty)
consumed_qty = flt(req_items[0].consumed_qty) if req_items else 0
if trans_qty and manufacturing_qty > (produced_qty + flt(self.fg_completed_qty)):
if qty >= req_qty:
qty = (req_qty/trans_qty) * flt(self.fg_completed_qty)
else:
qty = qty - consumed_qty
if self.purpose == 'Manufacture':
# If Material Consumption is booked, must pull only remaining components to finish product
if consumed_qty != 0:
remaining_qty = consumed_qty - (produced_qty * req_qty_each)
exhaust_qty = req_qty_each * produced_qty
if remaining_qty > exhaust_qty :
if (remaining_qty/(req_qty_each * flt(self.fg_completed_qty))) >= 1:
qty =0
else:
qty = (req_qty_each * flt(self.fg_completed_qty)) - remaining_qty
else:
if self.flags.backflush_based_on == "Material Transferred for Manufacture":
qty = (item.qty/trans_qty) * flt(self.fg_completed_qty)
else:
qty = req_qty_each * flt(self.fg_completed_qty)
elif backflushed_materials.get(item.item_code):
for d in backflushed_materials.get(item.item_code):
if d.get(item.warehouse):
if (qty > req_qty):
qty = (qty/trans_qty) * flt(self.fg_completed_qty)
if consumed_qty and frappe.db.get_single_value("Manufacturing Settings",
"material_consumption"):
qty -= consumed_qty
if cint(frappe.get_cached_value('UOM', item.stock_uom, 'must_be_whole_number')):
qty = frappe.utils.ceil(qty)
if qty > 0:
self.add_to_stock_entry_detail({
item.item_code: {
"from_warehouse": item.warehouse,
"to_warehouse": "",
"qty": qty,
"item_name": item.item_name,
"description": item.description,
"stock_uom": item.stock_uom,
"expense_account": item.expense_account,
"cost_center": item.buying_cost_center,
"original_item": item.original_item
}
})
def get_pending_raw_materials(self, backflush_based_on=None):
"""
issue (item quantity) that is pending to issue or desire to transfer,
whichever is less
"""
item_dict = self.get_pro_order_required_items(backflush_based_on)
max_qty = flt(self.pro_doc.qty)
allow_overproduction = False
overproduction_percentage = flt(frappe.db.get_single_value("Manufacturing Settings",
"overproduction_percentage_for_work_order"))
to_transfer_qty = flt(self.pro_doc.material_transferred_for_manufacturing) + flt(self.fg_completed_qty)
transfer_limit_qty = max_qty + ((max_qty * overproduction_percentage) / 100)
if transfer_limit_qty >= to_transfer_qty:
allow_overproduction = True
for item, item_details in iteritems(item_dict):
pending_to_issue = flt(item_details.required_qty) - flt(item_details.transferred_qty)
desire_to_transfer = flt(self.fg_completed_qty) * flt(item_details.required_qty) / max_qty
if (desire_to_transfer <= pending_to_issue
or (desire_to_transfer > 0 and backflush_based_on == "Material Transferred for Manufacture")
or allow_overproduction):
item_dict[item]["qty"] = desire_to_transfer
elif pending_to_issue > 0:
item_dict[item]["qty"] = pending_to_issue
else:
item_dict[item]["qty"] = 0
# delete items with 0 qty
for item in item_dict.keys():
if not item_dict[item]["qty"]:
del item_dict[item]
# show some message
if not len(item_dict):
frappe.msgprint(_("""All items have already been transferred for this Work Order."""))
return item_dict
def get_pro_order_required_items(self, backflush_based_on=None):
item_dict = frappe._dict()
pro_order = frappe.get_doc("Work Order", self.work_order)
if not frappe.db.get_value("Warehouse", pro_order.wip_warehouse, "is_group"):
wip_warehouse = pro_order.wip_warehouse
else:
wip_warehouse = None
for d in pro_order.get("required_items"):
if ( ((flt(d.required_qty) > flt(d.transferred_qty)) or
(backflush_based_on == "Material Transferred for Manufacture")) and
(d.include_item_in_manufacturing or self.purpose != "Material Transfer for Manufacture")):
item_row = d.as_dict()
if d.source_warehouse and not frappe.db.get_value("Warehouse", d.source_warehouse, "is_group"):
item_row["from_warehouse"] = d.source_warehouse
item_row["to_warehouse"] = wip_warehouse
if item_row["allow_alternative_item"]:
item_row["allow_alternative_item"] = pro_order.allow_alternative_item
item_dict.setdefault(d.item_code, item_row)
return item_dict
def add_to_stock_entry_detail(self, item_dict, bom_no=None):
for d in item_dict:
stock_uom = item_dict[d].get("stock_uom") or frappe.db.get_value("Item", d, "stock_uom")
se_child = self.append('items')
se_child.s_warehouse = item_dict[d].get("from_warehouse")
se_child.t_warehouse = item_dict[d].get("to_warehouse")
se_child.item_code = item_dict[d].get('item_code') or cstr(d)
se_child.uom = item_dict[d]["uom"] if item_dict[d].get("uom") else stock_uom
se_child.stock_uom = stock_uom
se_child.qty = flt(item_dict[d]["qty"], se_child.precision("qty"))
se_child.allow_alternative_item = item_dict[d].get("allow_alternative_item", 0)
se_child.subcontracted_item = item_dict[d].get("main_item_code")
se_child.cost_center = (item_dict[d].get("cost_center") or
get_default_cost_center(item_dict[d], company = self.company))
se_child.is_finished_item = item_dict[d].get("is_finished_item", 0)
se_child.is_scrap_item = item_dict[d].get("is_scrap_item", 0)
for field in ["idx", "po_detail", "original_item",
"expense_account", "description", "item_name"]:
if item_dict[d].get(field):
se_child.set(field, item_dict[d].get(field))
if se_child.s_warehouse==None:
se_child.s_warehouse = self.from_warehouse
if se_child.t_warehouse==None:
se_child.t_warehouse = self.to_warehouse
# in stock uom
se_child.conversion_factor = flt(item_dict[d].get("conversion_factor")) or 1
se_child.transfer_qty = flt(item_dict[d]["qty"]*se_child.conversion_factor, se_child.precision("qty"))
# to be assigned for finished item
se_child.bom_no = bom_no
def validate_with_material_request(self):
for item in self.get("items"):
material_request = item.material_request or None
material_request_item = item.material_request_item or None
if self.purpose == 'Material Transfer' and self.outgoing_stock_entry:
parent_se = frappe.get_value("Stock Entry Detail", item.ste_detail, ['material_request','material_request_item'],as_dict=True)
if parent_se:
material_request = parent_se.material_request
material_request_item = parent_se.material_request_item
if material_request:
mreq_item = frappe.db.get_value("Material Request Item",
{"name": material_request_item, "parent": material_request},
["item_code", "warehouse", "idx"], as_dict=True)
if mreq_item.item_code != item.item_code:
frappe.throw(_("Item for row {0} does not match Material Request").format(item.idx),
frappe.MappingMismatchError)
elif self.purpose == "Material Transfer" and self.add_to_transit:
continue
def validate_batch(self):
if self.purpose in ["Material Transfer for Manufacture", "Manufacture", "Repack", "Send to Subcontractor"]:
for item in self.get("items"):
if item.batch_no:
disabled = frappe.db.get_value("Batch", item.batch_no, "disabled")
if disabled == 0:
expiry_date = frappe.db.get_value("Batch", item.batch_no, "expiry_date")
if expiry_date:
if getdate(self.posting_date) > getdate(expiry_date):
frappe.throw(_("Batch {0} of Item {1} has expired.")
.format(item.batch_no, item.item_code))
else:
frappe.throw(_("Batch {0} of Item {1} is disabled.")
.format(item.batch_no, item.item_code))
def update_purchase_order_supplied_items(self):
#Get PO Supplied Items Details
item_wh = frappe._dict(frappe.db.sql("""
select rm_item_code, reserve_warehouse
from `tabPurchase Order` po, `tabPurchase Order Item Supplied` poitemsup
where po.name = poitemsup.parent
and po.name = %s""", self.purchase_order))
#Update Supplied Qty in PO Supplied Items
frappe.db.sql("""UPDATE `tabPurchase Order Item Supplied` pos
SET
pos.supplied_qty = IFNULL((SELECT ifnull(sum(transfer_qty), 0)
FROM
`tabStock Entry Detail` sed, `tabStock Entry` se
WHERE
pos.name = sed.po_detail AND pos.rm_item_code = sed.item_code
AND pos.parent = se.purchase_order AND sed.docstatus = 1
AND se.name = sed.parent and se.purchase_order = %(po)s
), 0)
WHERE pos.docstatus = 1 and pos.parent = %(po)s""", {"po": self.purchase_order})
#Update reserved sub contracted quantity in bin based on Supplied Item Details and
for d in self.get("items"):
item_code = d.get('original_item') or d.get('item_code')
reserve_warehouse = item_wh.get(item_code)
stock_bin = get_bin(item_code, reserve_warehouse)
stock_bin.update_reserved_qty_for_sub_contracting()
def update_so_in_serial_number(self):
so_name, item_code = frappe.db.get_value("Work Order", self.work_order, ["sales_order", "production_item"])
if so_name and item_code:
qty_to_reserve = get_reserved_qty_for_so(so_name, item_code)
if qty_to_reserve:
reserved_qty = frappe.db.sql("""select count(name) from `tabSerial No` where item_code=%s and
sales_order=%s""", (item_code, so_name))
if reserved_qty and reserved_qty[0][0]:
qty_to_reserve -= reserved_qty[0][0]
if qty_to_reserve > 0:
for item in self.items:
if item.item_code == item_code:
serial_nos = (item.serial_no).split("\n")
for serial_no in serial_nos:
if qty_to_reserve > 0:
frappe.db.set_value("Serial No", serial_no, "sales_order", so_name)
qty_to_reserve -=1
def validate_reserved_serial_no_consumption(self):
for item in self.items:
if item.s_warehouse and not item.t_warehouse and item.serial_no:
for sr in get_serial_nos(item.serial_no):
sales_order = frappe.db.get_value("Serial No", sr, "sales_order")
if sales_order:
msg = (_("(Serial No: {0}) cannot be consumed as it's reserverd to fullfill Sales Order {1}.")
.format(sr, sales_order))
frappe.throw(_("Item {0} {1}").format(item.item_code, msg))
def update_transferred_qty(self):
if self.purpose == 'Material Transfer' and self.outgoing_stock_entry:
stock_entries = {}
stock_entries_child_list = []
for d in self.items:
if not (d.against_stock_entry and d.ste_detail):
continue
stock_entries_child_list.append(d.ste_detail)
transferred_qty = frappe.get_all("Stock Entry Detail", fields = ["sum(qty) as qty"],
filters = { 'against_stock_entry': d.against_stock_entry,
'ste_detail': d.ste_detail,'docstatus': 1})
stock_entries[(d.against_stock_entry, d.ste_detail)] = (transferred_qty[0].qty
if transferred_qty and transferred_qty[0] else 0.0) or 0.0
if not stock_entries: return None
cond = ''
for data, transferred_qty in stock_entries.items():
cond += """ WHEN (parent = %s and name = %s) THEN %s
""" %(frappe.db.escape(data[0]), frappe.db.escape(data[1]), transferred_qty)
if cond and stock_entries_child_list:
frappe.db.sql(""" UPDATE `tabStock Entry Detail`
SET
transferred_qty = CASE {cond} END
WHERE
name in ({ste_details}) """.format(cond=cond,
ste_details = ','.join(['%s'] * len(stock_entries_child_list))),
tuple(stock_entries_child_list))
args = {
'source_dt': 'Stock Entry Detail',
'target_field': 'transferred_qty',
'target_ref_field': 'qty',
'target_dt': 'Stock Entry Detail',
'join_field': 'ste_detail',
'target_parent_dt': 'Stock Entry',
'target_parent_field': 'per_transferred',
'source_field': 'qty',
'percent_join_field': 'against_stock_entry'
}
self._update_percent_field_in_targets(args, update_modified=True)
def update_quality_inspection(self):
if self.inspection_required:
reference_type = reference_name = ''
if self.docstatus == 1:
reference_name = self.name
reference_type = 'Stock Entry'
for d in self.items:
if d.quality_inspection:
frappe.db.set_value("Quality Inspection", d.quality_inspection, {
'reference_type': reference_type,
'reference_name': reference_name
})
def set_material_request_transfer_status(self, status):
material_requests = []
if self.outgoing_stock_entry:
parent_se = frappe.get_value("Stock Entry", self.outgoing_stock_entry, 'add_to_transit')
for item in self.items:
material_request = item.material_request or None
if self.purpose == "Material Transfer" and material_request not in material_requests:
if self.outgoing_stock_entry and parent_se:
material_request = frappe.get_value("Stock Entry Detail", item.ste_detail, 'material_request')
if material_request and material_request not in material_requests:
material_requests.append(material_request)
frappe.db.set_value('Material Request', material_request, 'transfer_status', status)
@frappe.whitelist()
def move_sample_to_retention_warehouse(company, items):
if isinstance(items, string_types):
items = json.loads(items)
retention_warehouse = frappe.db.get_single_value('Stock Settings', 'sample_retention_warehouse')
stock_entry = frappe.new_doc("Stock Entry")
stock_entry.company = company
stock_entry.purpose = "Material Transfer"
stock_entry.set_stock_entry_type()
for item in items:
if item.get('sample_quantity') and item.get('batch_no'):
sample_quantity = validate_sample_quantity(item.get('item_code'), item.get('sample_quantity'),
item.get('transfer_qty') or item.get('qty'), item.get('batch_no'))
if sample_quantity:
sample_serial_nos = ''
if item.get('serial_no'):
serial_nos = (item.get('serial_no')).split()
if serial_nos and len(serial_nos) > item.get('sample_quantity'):
serial_no_list = serial_nos[:-(len(serial_nos)-item.get('sample_quantity'))]
sample_serial_nos = '\n'.join(serial_no_list)
stock_entry.append("items", {
"item_code": item.get('item_code'),
"s_warehouse": item.get('t_warehouse'),
"t_warehouse": retention_warehouse,
"qty": item.get('sample_quantity'),
"basic_rate": item.get('valuation_rate'),
'uom': item.get('uom'),
'stock_uom': item.get('stock_uom'),
"conversion_factor": 1.0,
"serial_no": sample_serial_nos,
'batch_no': item.get('batch_no')
})
if stock_entry.get('items'):
return stock_entry.as_dict()
@frappe.whitelist()
def make_stock_in_entry(source_name, target_doc=None):
def set_missing_values(source, target):
target.set_stock_entry_type()
def update_item(source_doc, target_doc, source_parent):
target_doc.t_warehouse = ''
if source_doc.material_request_item and source_doc.material_request :
add_to_transit = frappe.db.get_value('Stock Entry', source_name, 'add_to_transit')
if add_to_transit:
warehouse = frappe.get_value('Material Request Item', source_doc.material_request_item, 'warehouse')
target_doc.t_warehouse = warehouse
target_doc.s_warehouse = source_doc.t_warehouse
target_doc.qty = source_doc.qty - source_doc.transferred_qty
doclist = get_mapped_doc("Stock Entry", source_name, {
"Stock Entry": {
"doctype": "Stock Entry",
"field_map": {
"name": "outgoing_stock_entry"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Stock Entry Detail": {
"doctype": "Stock Entry Detail",
"field_map": {
"name": "ste_detail",
"parent": "against_stock_entry",
"serial_no": "serial_no",
"batch_no": "batch_no"
},
"postprocess": update_item,
"condition": lambda doc: flt(doc.qty) - flt(doc.transferred_qty) > 0.01
},
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def get_work_order_details(work_order, company):
work_order = frappe.get_doc("Work Order", work_order)
pending_qty_to_produce = flt(work_order.qty) - flt(work_order.produced_qty)
return {
"from_bom": 1,
"bom_no": work_order.bom_no,
"use_multi_level_bom": work_order.use_multi_level_bom,
"wip_warehouse": work_order.wip_warehouse,
"fg_warehouse": work_order.fg_warehouse,
"fg_completed_qty": pending_qty_to_produce
}
def get_operating_cost_per_unit(work_order=None, bom_no=None):
operating_cost_per_unit = 0
if work_order:
if not bom_no:
bom_no = work_order.bom_no
for d in work_order.get("operations"):
if flt(d.completed_qty):
operating_cost_per_unit += flt(d.actual_operating_cost) / flt(d.completed_qty)
elif work_order.qty:
operating_cost_per_unit += flt(d.planned_operating_cost) / flt(work_order.qty)
# Get operating cost from BOM if not found in work_order.
if not operating_cost_per_unit and bom_no:
bom = frappe.db.get_value("BOM", bom_no, ["operating_cost", "quantity"], as_dict=1)
if bom.quantity:
operating_cost_per_unit = flt(bom.operating_cost) / flt(bom.quantity)
return operating_cost_per_unit
def get_used_alternative_items(purchase_order=None, work_order=None):
cond = ""
if purchase_order:
cond = "and ste.purpose = 'Send to Subcontractor' and ste.purchase_order = '{0}'".format(purchase_order)
elif work_order:
cond = "and ste.purpose = 'Material Transfer for Manufacture' and ste.work_order = '{0}'".format(work_order)
if not cond: return {}
used_alternative_items = {}
data = frappe.db.sql(""" select sted.original_item, sted.uom, sted.conversion_factor,
sted.item_code, sted.item_name, sted.conversion_factor,sted.stock_uom, sted.description
from
`tabStock Entry` ste, `tabStock Entry Detail` sted
where
sted.parent = ste.name and ste.docstatus = 1 and sted.original_item != sted.item_code
{0} """.format(cond), as_dict=1)
for d in data:
used_alternative_items[d.original_item] = d
return used_alternative_items
def get_valuation_rate_for_finished_good_entry(work_order):
work_order_qty = flt(frappe.get_cached_value("Work Order",
work_order, 'material_transferred_for_manufacturing'))
field = "(SUM(total_outgoing_value) / %s) as valuation_rate" % (work_order_qty)
stock_data = frappe.get_all("Stock Entry",
fields = field,
filters = {
"docstatus": 1,
"purpose": "Material Transfer for Manufacture",
"work_order": work_order
}
)
if stock_data:
return stock_data[0].valuation_rate
@frappe.whitelist()
def get_uom_details(item_code, uom, qty):
"""Returns dict `{"conversion_factor": [value], "transfer_qty": qty * [value]}`
:param args: dict with `item_code`, `uom` and `qty`"""
conversion_factor = get_conversion_factor(item_code, uom).get("conversion_factor")
if not conversion_factor:
frappe.msgprint(_("UOM coversion factor required for UOM: {0} in Item: {1}")
.format(uom, item_code))
ret = {'uom' : ''}
else:
ret = {
'conversion_factor' : flt(conversion_factor),
'transfer_qty' : flt(qty) * flt(conversion_factor)
}
return ret
@frappe.whitelist()
def get_expired_batch_items():
return frappe.db.sql("""select b.item, sum(sle.actual_qty) as qty, sle.batch_no, sle.warehouse, sle.stock_uom\
from `tabBatch` b, `tabStock Ledger Entry` sle
where b.expiry_date <= %s
and b.expiry_date is not NULL
and b.batch_id = sle.batch_no
group by sle.warehouse, sle.item_code, sle.batch_no""",(nowdate()), as_dict=1)
@frappe.whitelist()
def get_warehouse_details(args):
if isinstance(args, string_types):
args = json.loads(args)
args = frappe._dict(args)
ret = {}
if args.warehouse and args.item_code:
args.update({
"posting_date": args.posting_date,
"posting_time": args.posting_time,
})
ret = {
"actual_qty" : get_previous_sle(args).get("qty_after_transaction") or 0,
"basic_rate" : get_incoming_rate(args)
}
return ret
@frappe.whitelist()
def validate_sample_quantity(item_code, sample_quantity, qty, batch_no = None):
if cint(qty) < cint(sample_quantity):
frappe.throw(_("Sample quantity {0} cannot be more than received quantity {1}").format(sample_quantity, qty))
retention_warehouse = frappe.db.get_single_value('Stock Settings', 'sample_retention_warehouse')
retainted_qty = 0
if batch_no:
retainted_qty = get_batch_qty(batch_no, retention_warehouse, item_code)
max_retain_qty = frappe.get_value('Item', item_code, 'sample_quantity')
if retainted_qty >= max_retain_qty:
frappe.msgprint(_("Maximum Samples - {0} have already been retained for Batch {1} and Item {2} in Batch {3}.").
format(retainted_qty, batch_no, item_code, batch_no), alert=True)
sample_quantity = 0
qty_diff = max_retain_qty-retainted_qty
if cint(sample_quantity) > cint(qty_diff):
frappe.msgprint(_("Maximum Samples - {0} can be retained for Batch {1} and Item {2}.").
format(max_retain_qty, batch_no, item_code), alert=True)
sample_quantity = qty_diff
return sample_quantity
| 39.453125
| 166
| 0.729138
|
63e9569808a16bb20cfecdef8295a922bd868ed6
| 97
|
py
|
Python
|
data_structures/queue_/__init__.py
|
kirkirey/programming-for-linguists
|
d97c59738713fab725073e9c88c7321119a648fc
|
[
"Apache-2.0"
] | null | null | null |
data_structures/queue_/__init__.py
|
kirkirey/programming-for-linguists
|
d97c59738713fab725073e9c88c7321119a648fc
|
[
"Apache-2.0"
] | null | null | null |
data_structures/queue_/__init__.py
|
kirkirey/programming-for-linguists
|
d97c59738713fab725073e9c88c7321119a648fc
|
[
"Apache-2.0"
] | 4
|
2021-02-09T12:00:34.000Z
|
2021-05-21T18:59:38.000Z
|
"""
Programming for linguists
Queue module
"""
from data_structures.queue_.queue_ import Queue_
| 13.857143
| 48
| 0.793814
|
e59a465795b7e36a311ab9412c93ca3d0b507bb6
| 147
|
py
|
Python
|
organization/tests.py
|
adborden/WeVoteBase
|
7fd612aee1d3638c8a74cc81873ce0687f62cf33
|
[
"MIT"
] | null | null | null |
organization/tests.py
|
adborden/WeVoteBase
|
7fd612aee1d3638c8a74cc81873ce0687f62cf33
|
[
"MIT"
] | null | null | null |
organization/tests.py
|
adborden/WeVoteBase
|
7fd612aee1d3638c8a74cc81873ce0687f62cf33
|
[
"MIT"
] | 1
|
2020-03-04T00:22:39.000Z
|
2020-03-04T00:22:39.000Z
|
# organization/tests.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from django.test import TestCase
# Create your tests here.
| 18.375
| 37
| 0.693878
|
75a179a5da29a1551f8341d7bd43bb0d603a8c45
| 3,242
|
py
|
Python
|
src/keras_models.py
|
pietrobarbiero/ghcore
|
f1d4dad55b44b26d416cb038650635a80d842f58
|
[
"Apache-2.0"
] | null | null | null |
src/keras_models.py
|
pietrobarbiero/ghcore
|
f1d4dad55b44b26d416cb038650635a80d842f58
|
[
"Apache-2.0"
] | null | null | null |
src/keras_models.py
|
pietrobarbiero/ghcore
|
f1d4dad55b44b26d416cb038650635a80d842f58
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 21 21:30:31 2018
@authors: Barbiero Pietro and Ciravegna Gabriele
"""
"""Train a convnet on the MNIST database with ResNets.
ResNets are a bit overkill for this problem, but this illustrates how to use
the Residual wrapper on ConvNets.
See: https://github.com/fchollet/keras/blob/master/examples/mnist_cnn.py
"""
from numpy import random
random.seed(42) # @UndefinedVariable
from keras.datasets import mnist
from keras.models import Model
from keras.layers import Input, Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
from resnet import Residual
import numpy as np
def keras_cnn_model(X_train, y_train, X_test, y_test, \
epochs=50, batch_size=10, verbose=0):
nb_classes = len( np.unique(y_train) )
img_rows, img_cols = int( np.sqrt(X_train.shape[1]) ), int( np.sqrt(X_train.shape[1]) )
pool_size = (2, 2)
kernel_size = (3, 3)
# (X_train, y_train), (X_test, y_test) = mnist.load_data()
if K.image_dim_ordering() == 'th':
X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols)
X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, 1)
X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
# print('X_train shape:', X_train.shape)
# print(X_train.shape[0], 'train samples')
# print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
# Model
input_var = Input(shape=input_shape)
conv1 = Conv2D(64, kernel_size, padding='same', activation='relu')(input_var)
conv1 = MaxPooling2D(pool_size=pool_size)(conv1)
conv1 = Conv2D(32, kernel_size, padding='same', activation='relu')(conv1)
conv1 = MaxPooling2D(pool_size=pool_size)(conv1)
conv1 = Conv2D(8, kernel_size, padding='same', activation='relu')(conv1)
conv1 = MaxPooling2D(pool_size=pool_size)(conv1)
# resnet = conv1
# for _ in range(5):
# resnet = Residual(Convolution2D(8, kernel_size[0], kernel_size[1],
# border_mode='same'))(resnet)
# resnet = Activation('relu')(resnet)
# mxpool = MaxPooling2D(pool_size=pool_size)(resnet)
flat = Flatten()(conv1)
dense = Dropout(0.5)(flat)
# softmax = Dense(nb_classes, activation='relu')(dense)
# dense = Dropout(0.5)(dense)
softmax = Dense(nb_classes, activation='softmax')(dense)
model = Model(inputs=[input_var], outputs=[softmax])
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs,
verbose=0, validation_data=(X_test, Y_test))
#model.save('mnist_model.h5')
loss, accuracy = model.evaluate(X_test, Y_test, verbose=0)
# print('Test loss:', loss)
# print('Test accuracy:', accuracy)
return model, accuracy
| 34.126316
| 88
| 0.708822
|
602de68499540ba188c620decb8737a7d1e2fbae
| 21,846
|
py
|
Python
|
tapiriik/services/Endomondo/endomondo.py
|
jjbrunton/tapiriik
|
e7669f0d97801545137ae020dc4ce1262def0620
|
[
"Apache-2.0"
] | 11
|
2019-08-05T15:38:25.000Z
|
2022-03-12T09:50:02.000Z
|
tapiriik/services/Endomondo/endomondo.py
|
jjbrunton/tapiriik
|
e7669f0d97801545137ae020dc4ce1262def0620
|
[
"Apache-2.0"
] | 31
|
2019-03-05T20:38:11.000Z
|
2022-03-21T09:41:23.000Z
|
tapiriik/services/Endomondo/endomondo.py
|
jjbrunton/tapiriik
|
e7669f0d97801545137ae020dc4ce1262def0620
|
[
"Apache-2.0"
] | 8
|
2019-03-05T08:20:07.000Z
|
2021-08-18T08:20:17.000Z
|
from tapiriik.settings import WEB_ROOT, ENDOMONDO_CLIENT_KEY, ENDOMONDO_CLIENT_SECRET, SECRET_KEY
from tapiriik.services.service_base import ServiceAuthenticationType, ServiceBase
from tapiriik.services.interchange import UploadedActivity, ActivityType, ActivityStatistic, ActivityStatisticUnit, Waypoint, WaypointType, Location, Lap
from tapiriik.services.api import APIException, APIExcludeActivity, UserException, UserExceptionType
from tapiriik.database import redis
from django.core.urlresolvers import reverse
from datetime import timedelta, datetime
import dateutil.parser
from requests_oauthlib import OAuth1Session
import logging
import pytz
import json
import os
import hashlib
logger = logging.getLogger(__name__)
class EndomondoService(ServiceBase):
ID = "endomondo"
DisplayName = "Endomondo"
DisplayAbbreviation = "EN"
AuthenticationType = ServiceAuthenticationType.OAuth
UserProfileURL = "https://www.endomondo.com/profile/{0}"
UserActivityURL = "https://www.endomondo.com/users/{0}/workouts/{1}"
PartialSyncRequiresTrigger = True
AuthenticationNoFrame = True
ConfigurationDefaults = {
"DeviceRegistered": False,
}
# The complete list:
# running,cycling transportation,cycling sport,mountain biking,skating,roller skiing,skiing cross country,skiing downhill,snowboarding,kayaking,kite surfing,rowing,sailing,windsurfing,fitness walking,golfing,hiking,orienteering,walking,riding,swimming,spinning,other,aerobics,badminton,baseball,basketball,boxing,stair climbing,cricket,cross training,dancing,fencing,american football,rugby,soccer,handball,hockey,pilates,polo,scuba diving,squash,table tennis,tennis,beach volley,volleyball,weight training,yoga,martial arts,gymnastics,step counter,crossfit,treadmill running,skateboarding,surfing,snowshoeing,wheelchair,climbing,treadmill walking,kick scooter,standup paddling,running trail,rowing indoor,floorball,ice skating,skiing touring,rope jumping,stretching,running canicross,paddle tennis,paragliding
_activityMappings = {
"running": ActivityType.Running,
"cycling transportation": ActivityType.Cycling,
"cycling sport": ActivityType.Cycling,
"mountain biking": ActivityType.MountainBiking,
"skating": ActivityType.Skating,
"skiing cross country": ActivityType.CrossCountrySkiing,
"skiing downhill": ActivityType.DownhillSkiing,
"snowboarding": ActivityType.Snowboarding,
"rowing": ActivityType.Rowing,
"fitness walking": ActivityType.Walking,
"hiking": ActivityType.Hiking,
"orienteering": ActivityType.Running,
"walking": ActivityType.Walking,
"swimming": ActivityType.Swimming,
"spinning": ActivityType.Cycling, # indoor cycling
"other": ActivityType.Other,
"cross training": ActivityType.Elliptical, # elliptical training
"weight training": ActivityType.StrengthTraining,
"treadmill running": ActivityType.Running,
"snowshoeing": ActivityType.Walking,
"wheelchair": ActivityType.Wheelchair,
"climbing": ActivityType.Climbing,
"roller skiing": ActivityType.RollerSkiing,
"treadmill walking": ActivityType.Walking,
"running trail": ActivityType.Running,
"rowing indoor": ActivityType.Rowing,
"running canicross": ActivityType.Running,
"stand up paddling": ActivityType.StandUpPaddling,
}
_reverseActivityMappings = {
"running": ActivityType.Running,
"cycling sport": ActivityType.Cycling,
"mountain biking": ActivityType.MountainBiking,
"skating": ActivityType.Skating,
"skiing cross country": ActivityType.CrossCountrySkiing,
"skiing downhill": ActivityType.DownhillSkiing,
"snowboarding": ActivityType.Snowboarding,
"rowing": ActivityType.Rowing,
"walking": ActivityType.Walking,
"hiking": ActivityType.Hiking,
"swimming": ActivityType.Swimming,
"other": ActivityType.Other,
"wheelchair": ActivityType.Wheelchair,
"climbing" : ActivityType.Climbing,
"roller skiing": ActivityType.RollerSkiing,
"stand up paddling": ActivityType.StandUpPaddling,
}
_activitiesThatDontRoundTrip = {
ActivityType.Cycling,
ActivityType.Running,
ActivityType.Walking
}
SupportedActivities = list(_activityMappings.values())
ReceivesNonGPSActivitiesWithOtherSensorData = False
def WebInit(self):
self.UserAuthorizationURL = reverse("oauth_redirect", kwargs={"service": "endomondo"})
def _rateLimitBailout(self, response):
if response.status_code == 503 and "user_refused" in response.text:
raise APIException("Endomondo user token rate limit reached", user_exception=UserException(UserExceptionType.RateLimited))
def _oauthSession(self, connection=None, **params):
if connection:
params["resource_owner_key"] = connection.Authorization["Token"]
params["resource_owner_secret"] = connection.Authorization["Secret"]
return OAuth1Session(ENDOMONDO_CLIENT_KEY, client_secret=ENDOMONDO_CLIENT_SECRET, **params)
def GenerateUserAuthorizationURL(self, session, level=None):
oauthSession = self._oauthSession(callback_uri=WEB_ROOT + reverse("oauth_return", kwargs={"service": "endomondo"}))
tokens = oauthSession.fetch_request_token("https://api.endomondo.com/oauth/request_token")
redis_token_key = 'endomondo:oauth:%s' % tokens["oauth_token"]
redis.setex(redis_token_key, tokens["oauth_token_secret"], timedelta(hours=24))
return oauthSession.authorization_url("https://www.endomondo.com/oauth/authorize")
def RetrieveAuthorizationToken(self, req, level):
redis_token_key = "endomondo:oauth:%s" % req.GET["oauth_token"]
secret = redis.get(redis_token_key)
assert secret
redis.delete(redis_token_key)
oauthSession = self._oauthSession(resource_owner_secret=secret)
oauthSession.parse_authorization_response(req.get_full_path())
tokens = oauthSession.fetch_access_token("https://api.endomondo.com/oauth/access_token")
userInfo = oauthSession.get("https://api.endomondo.com/api/1/user")
userInfo = userInfo.json()
return (userInfo["id"], {"Token": tokens["oauth_token"], "Secret": tokens["oauth_token_secret"]})
def RevokeAuthorization(self, serviceRecord):
pass
def _parseDate(self, date):
return datetime.strptime(date, "%Y-%m-%d %H:%M:%S UTC").replace(tzinfo=pytz.utc)
def _formatDate(self, date):
return datetime.strftime(date.astimezone(pytz.utc), "%Y-%m-%d %H:%M:%S UTC")
def DownloadActivityList(self, serviceRecord, exhaustive=False):
oauthSession = self._oauthSession(serviceRecord)
activities = []
exclusions = []
page_url = "https://api.endomondo.com/api/1/workouts"
while True:
resp = oauthSession.get(page_url)
try:
respList = resp.json()["data"]
except ValueError:
self._rateLimitBailout(resp)
raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text))
for actInfo in respList:
activity = UploadedActivity()
activity.StartTime = self._parseDate(actInfo["start_time"])
logger.debug("Activity s/t %s" % activity.StartTime)
if "is_tracking" in actInfo and actInfo["is_tracking"]:
exclusions.append(APIExcludeActivity("Not complete", activity_id=actInfo["id"], permanent=False, user_exception=UserException(UserExceptionType.LiveTracking)))
continue
if "end_time" in actInfo:
activity.EndTime = self._parseDate(actInfo["end_time"])
if actInfo["sport"] in self._activityMappings:
activity.Type = self._activityMappings[actInfo["sport"]]
# "duration" is timer time
if "duration_total" in actInfo:
activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"]))
if "distance_total" in actInfo:
activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"]))
if "calories_total" in actInfo:
activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"]))
activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters)
if "altitude_max" in actInfo:
activity.Stats.Elevation.Max = float(actInfo["altitude_max"])
if "altitude_min" in actInfo:
activity.Stats.Elevation.Min = float(actInfo["altitude_min"])
if "total_ascent" in actInfo:
activity.Stats.Elevation.Gain = float(actInfo["total_ascent"])
if "total_descent" in actInfo:
activity.Stats.Elevation.Loss = float(actInfo["total_descent"])
activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour)
if "speed_max" in actInfo:
activity.Stats.Speed.Max = float(actInfo["speed_max"])
if "heart_rate_avg" in actInfo:
activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"]))
if "heart_rate_max" in actInfo:
activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(actInfo["heart_rate_max"])))
if "cadence_avg" in actInfo:
activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"]))
if "cadence_max" in actInfo:
activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"])))
if "power_avg" in actInfo:
activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=int(actInfo["power_avg"]))
if "power_max" in actInfo:
activity.Stats.Power.update(ActivityStatistic(ActivityStatisticUnit.Watts, max=int(actInfo["power_max"])))
if "title" in actInfo:
activity.Name = actInfo["title"]
activity.ServiceData = {"WorkoutID": int(actInfo["id"]), "Sport": actInfo["sport"]}
activity.CalculateUID()
activities.append(activity)
paging = resp.json()["paging"]
if "next" not in paging or not paging["next"] or not exhaustive:
break
else:
page_url = paging["next"]
return activities, exclusions
def SubscribeToPartialSyncTrigger(self, serviceRecord):
resp = self._oauthSession(serviceRecord).put("https://api.endomondo.com/api/1/subscriptions/workout/%s" % serviceRecord.ExternalID)
try:
assert resp.status_code in [200, 201] # Created, or already existed
except:
raise APIException("Could not unsubscribe - received unknown result %s - %s" % (resp.status_code, resp.text))
serviceRecord.SetPartialSyncTriggerSubscriptionState(True)
def UnsubscribeFromPartialSyncTrigger(self, serviceRecord):
resp = self._oauthSession(serviceRecord).delete("https://api.endomondo.com/api/1/subscriptions/workout/%s" % serviceRecord.ExternalID)
try:
assert resp.status_code in [204, 500] # Docs say otherwise, but no-subscription-found is 500
except:
raise APIException("Could not unsubscribe - received unknown result %s - %s" % (resp.status_code, resp.text))
serviceRecord.SetPartialSyncTriggerSubscriptionState(False)
def ExternalIDsForPartialSyncTrigger(self, req):
data = json.loads(req.body.decode("UTF-8"))
delta_external_ids = [int(x["id"]) for x in data["data"]]
return delta_external_ids
def DownloadActivity(self, serviceRecord, activity):
resp = self._oauthSession(serviceRecord).get("https://api.endomondo.com/api/1/workouts/%d" % activity.ServiceData["WorkoutID"], params={"fields": "points"})
try:
resp = resp.json()
except ValueError:
self._rateLimitBailout(resp)
res_txt = resp.text
raise APIException("Parse failure in Endomondo activity download: %s" % resp.status_code)
lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime)
activity.Laps = [lap]
activity.GPS = False
old_location = None
in_pause = False
for pt in resp["points"]:
wp = Waypoint()
if "time" not in pt:
# Manually-entered activities with a course attached to them have date-less waypoints
# It'd be nice to transfer those courses, but it's a concept few other sites support AFAIK
# So, ignore the points entirely
continue
wp.Timestamp = self._parseDate(pt["time"])
if ("lat" in pt and "lng" in pt) or "alt" in pt:
wp.Location = Location()
if "lat" in pt and "lng" in pt:
wp.Location.Latitude = pt["lat"]
wp.Location.Longitude = pt["lng"]
activity.GPS = True
if "alt" in pt:
wp.Location.Altitude = pt["alt"]
if wp.Location == old_location:
# We have seen the point with the same coordinates
# before. This causes other services (e.g Strava) to
# interpret this as if we were standing for a while,
# which causes us having wrong activity time when
# importing. We mark the point as paused in hopes this
# fixes the issue.
in_pause = True
wp.Type = WaypointType.Pause
elif in_pause:
in_pause = False
wp.Type = WaypointType.Resume
old_location = wp.Location
if "hr" in pt:
wp.HR = pt["hr"]
if "cad" in pt:
wp.Cadence = pt["cad"]
if "pow" in pt:
wp.Power = pt["pow"]
lap.Waypoints.append(wp)
activity.Stationary = len(lap.Waypoints) == 0
return activity
def _deviceId(self, serviceRecord):
csp = hashlib.new("md5")
csp.update(str(serviceRecord.ExternalID).encode("utf-8"))
csp.update(SECRET_KEY.encode("utf-8"))
return "tap-" + csp.hexdigest()
def _getSport(self, activity):
# This is an activity type that doesn't round trip
if (activity.Type in self._activitiesThatDontRoundTrip and
# We have the original sport
"Sport" in activity.ServiceData and
# We know what this sport is
activity.ServiceData["Sport"] in self._activityMappings and
# The type didn't change (if we changed from Walking to Cycling, we'd want to let the new value through)
activity.Type == self._activityMappings[activity.ServiceData["Sport"]]):
return activity.ServiceData["Sport"]
else:
return [k for k,v in self._reverseActivityMappings.items() if v == activity.Type][0]
def UploadActivity(self, serviceRecord, activity):
session = self._oauthSession(serviceRecord)
device_id = self._deviceId(serviceRecord)
if not serviceRecord.GetConfiguration()["DeviceRegistered"]:
device_info = {
"name": "tapiriik",
"vendor": "tapiriik",
"model": "tapiriik",
"os": "tapiriik",
"os_version": "1",
"app_variant": "tapiriik",
"app_version": "1"
}
device_add_resp = session.post("https://api.endomondo.com/api/1/device/%s" % device_id, data=json.dumps(device_info))
if device_add_resp.status_code != 200:
self._rateLimitBailout(device_add_resp)
raise APIException("Could not add device %s %s" % (device_add_resp.status_code, device_add_resp.text))
serviceRecord.SetConfiguration({"DeviceRegistered": True})
activity_id = "tap-" + activity.UID + "-" + str(os.getpid())
sport = self._getSport(activity)
upload_data = {
"device_id": device_id,
"sport": sport,
"start_time": self._formatDate(activity.StartTime),
"end_time": self._formatDate(activity.EndTime),
"points": []
}
if activity.Name:
upload_data["title"] = activity.Name
if activity.Notes:
upload_data["notes"] = activity.Notes
if activity.Stats.Distance.Value is not None:
upload_data["distance_total"] = activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value
if activity.Stats.TimerTime.Value is not None:
upload_data["duration_total"] = activity.Stats.TimerTime.asUnits(ActivityStatisticUnit.Seconds).Value
elif activity.Stats.MovingTime.Value is not None:
upload_data["duration_total"] = activity.Stats.MovingTime.asUnits(ActivityStatisticUnit.Seconds).Value
else:
upload_data["duration_total"] = (activity.EndTime - activity.StartTime).total_seconds()
if activity.Stats.Energy.Value is not None:
upload_data["calories_total"] = activity.Stats.Energy.asUnits(ActivityStatisticUnit.Kilocalories).Value
elev_stats = activity.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters)
if elev_stats.Max is not None:
upload_data["altitude_max"] = elev_stats.Max
if elev_stats.Min is not None:
upload_data["altitude_min"] = elev_stats.Min
if elev_stats.Gain is not None:
upload_data["total_ascent"] = elev_stats.Gain
if elev_stats.Loss is not None:
upload_data["total_descent"] = elev_stats.Loss
speed_stats = activity.Stats.Speed.asUnits(ActivityStatisticUnit.KilometersPerHour)
if speed_stats.Max is not None:
upload_data["speed_max"] = speed_stats.Max
hr_stats = activity.Stats.HR.asUnits(ActivityStatisticUnit.BeatsPerMinute)
if hr_stats.Average is not None:
upload_data["heart_rate_avg"] = hr_stats.Average
if hr_stats.Max is not None:
upload_data["heart_rate_max"] = hr_stats.Max
if activity.Stats.Cadence.Average is not None:
upload_data["cadence_avg"] = activity.Stats.Cadence.asUnits(ActivityStatisticUnit.RevolutionsPerMinute).Average
elif activity.Stats.RunCadence.Average is not None:
upload_data["cadence_avg"] = activity.Stats.RunCadence.asUnits(ActivityStatisticUnit.StepsPerMinute).Average
if activity.Stats.Cadence.Max is not None:
upload_data["cadence_max"] = activity.Stats.Cadence.asUnits(ActivityStatisticUnit.RevolutionsPerMinute).Max
elif activity.Stats.RunCadence.Max is not None:
upload_data["cadence_max"] = activity.Stats.RunCadence.asUnits(ActivityStatisticUnit.StepsPerMinute).Max
if activity.Stats.Power.Average is not None:
upload_data["power_avg"] = activity.Stats.Power.asUnits(ActivityStatisticUnit.Watts).Average
if activity.Stats.Power.Max is not None:
upload_data["power_max"] = activity.Stats.Power.asUnits(ActivityStatisticUnit.Watts).Max
for wp in activity.GetFlatWaypoints():
pt = {
"time": self._formatDate(wp.Timestamp),
}
if wp.Location:
if wp.Location.Latitude is not None and wp.Location.Longitude is not None:
pt["lat"] = wp.Location.Latitude
pt["lng"] = wp.Location.Longitude
if wp.Location.Altitude is not None:
pt["alt"] = wp.Location.Altitude
if wp.HR is not None:
pt["hr"] = round(wp.HR)
if wp.Cadence is not None:
pt["cad"] = round(wp.Cadence)
elif wp.RunCadence is not None:
pt["cad"] = round(wp.RunCadence)
if wp.Power is not None:
pt["pow"] = round(wp.Power)
if wp.Type == WaypointType.Pause:
pt["inst"] = "pause"
elif wp.Type == WaypointType.Resume:
pt["inst"] = "resume"
upload_data["points"].append(pt)
if len(upload_data["points"]):
upload_data["points"][0]["inst"] = "start"
upload_data["points"][-1]["inst"] = "stop"
upload_resp = session.post("https://api.endomondo.com/api/1/workouts/%s" % activity_id, data=json.dumps(upload_data))
if upload_resp.status_code != 200:
self._rateLimitBailout(upload_resp)
raise APIException("Could not upload activity %s %s" % (upload_resp.status_code, upload_resp.text))
return upload_resp.json()["id"]
def DeleteCachedData(self, serviceRecord):
pass
def DeleteActivity(self, serviceRecord, uploadId):
session = self._oauthSession(serviceRecord)
del_res = session.delete("https://api.endomondo.com/api/1/workouts/%s" % uploadId)
del_res.raise_for_status()
| 47.491304
| 814
| 0.648448
|
bcdd1f7194133e20658dad27344d1d9dc2fc25a3
| 45,925
|
py
|
Python
|
neptune/experiments.py
|
pitercl/neptune-client
|
1e3e105bdaad7f7ea50500646e3fd1ad298f0b4a
|
[
"Apache-2.0"
] | null | null | null |
neptune/experiments.py
|
pitercl/neptune-client
|
1e3e105bdaad7f7ea50500646e3fd1ad298f0b4a
|
[
"Apache-2.0"
] | null | null | null |
neptune/experiments.py
|
pitercl/neptune-client
|
1e3e105bdaad7f7ea50500646e3fd1ad298f0b4a
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2019, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import base64
import logging
import os
import re
import time
import traceback
import pandas as pd
import requests
import six
from pandas.errors import EmptyDataError
from neptune.api_exceptions import ExperimentAlreadyFinished, ChannelDoesNotExist, PathInProjectNotFound
from neptune.exceptions import FileNotFound, InvalidChannelValue, NoChannelValue, NotADirectory
from neptune.internal.channels.channels import ChannelValue, ChannelType, ChannelNamespace
from neptune.internal.channels.channels_values_sender import ChannelsValuesSender
from neptune.internal.execution.execution_context import ExecutionContext
from neptune.internal.storage.storage_utils import upload_to_storage, UploadEntry, normalize_file_name
from neptune.internal.utils.image import get_image_content
from neptune.utils import align_channels_on_x, is_float, is_nan_or_inf
_logger = logging.getLogger(__name__)
# pylint: disable=too-many-lines
class Experiment(object):
"""A class for managing Neptune experiment.
Each time User creates new experiment instance of this class is created.
It lets you manage experiment, :meth:`~neptune.experiments.Experiment.log_metric`,
:meth:`~neptune.experiments.Experiment.log_text`,
:meth:`~neptune.experiments.Experiment.log_image`,
:meth:`~neptune.experiments.Experiment.set_property`,
and much more.
Args:
backend (:obj:`neptune.Backend`): A Backend object
project (:obj:`neptune.Project`): The project this experiment belongs to
_id (:obj:`str`): Experiment id
internal_id (:obj:`str`): internal UUID
Example:
Assuming that `project` is an instance of :class:`~neptune.projects.Project`.
.. code:: python3
experiment = project.create_experiment()
Warning:
User should never create instances of this class manually.
Always use: :meth:`~neptune.projects.Project.create_experiment`.
"""
IMAGE_SIZE_LIMIT_MB = 15
def __init__(self, backend, project, _id, internal_id):
self._backend = backend
self._project = project
self._id = _id
self._internal_id = internal_id
self._channels_values_sender = ChannelsValuesSender(self)
self._execution_context = ExecutionContext(backend, self)
@property
def id(self):
"""Experiment short id
| Combination of project key and unique experiment number.
| Format is ``<project_key>-<experiment_number>``, for example: ``MPI-142``.
Returns:
:obj:`str` - experiment short id
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
exp_id = experiment.id
"""
return self._id
@property
def name(self):
"""Experiment name
Returns:
:obj:`str` experiment name
Examples:
Assuming that `project` is an instance of :class:`~neptune.projects.Project`.
.. code:: python3
experiment = project.create_experiment('exp_name')
exp_name = experiment.name
"""
return self._backend.get_experiment(self._internal_id).name
@property
def state(self):
"""Current experiment state
Possible values: `'running'`, `'succeeded'`, `'failed'`, `'aborted'`.
Returns:
:obj:`str` - current experiment state
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
state_str = experiment.state
"""
return self._backend.get_experiment(self._internal_id).state
@property
def internal_id(self):
return self._internal_id
@property
def limits(self):
return {
'channels': {
'numeric': 1000,
'text': 100,
'image': 100
}
}
def get_system_properties(self):
"""Retrieve experiment properties.
| Experiment properties are for example: `owner`, `created`, `name`, `hostname`.
| List of experiment properties may change over time.
Returns:
:obj:`dict` - dictionary mapping a property name to value.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
sys_properties = experiment.get_system_properties
"""
experiment = self._backend.get_experiment(self._internal_id)
return {
'id': experiment.shortId,
'name': experiment.name,
'created': experiment.timeOfCreation,
'finished': experiment.timeOfCompletion,
'running_time': experiment.runningTime,
'owner': experiment.owner,
'storage_size': experiment.storageSize,
'channels_size': experiment.channelsSize,
'size': experiment.storageSize + experiment.channelsSize,
'tags': experiment.tags,
'notes': experiment.description,
'description': experiment.description,
'hostname': experiment.hostname
}
def get_tags(self):
"""Get tags associated with experiment.
Returns:
:obj:`list` of :obj:`str` with all tags for this experiment.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
experiment.get_tags()
"""
return self._backend.get_experiment(self._internal_id).tags
def append_tag(self, tag, *tags):
"""Append tag(s) to the current experiment.
Alias: :meth:`~neptune.experiments.Experiment.append_tags`.
Only ``[a-zA-Z0-9]`` and ``-`` (dash) characters are allowed in tags.
Args:
tag (single :obj:`str` or multiple :obj:`str` or :obj:`list` of :obj:`str`):
Tag(s) to add to the current experiment.
* If :obj:`str` is passed, singe tag is added.
* If multiple - comma separated - :obj:`str` are passed, all of them are added as tags.
* If :obj:`list` of :obj:`str` is passed, all elements of the :obj:`list` are added as tags.
Examples:
.. code:: python3
neptune.append_tag('new-tag') # single tag
neptune.append_tag('first-tag', 'second-tag', 'third-tag') # few str
neptune.append_tag(['first-tag', 'second-tag', 'third-tag']) # list of str
"""
if isinstance(tag, list):
tags_list = tag
else:
tags_list = [tag] + list(tags)
self._backend.update_tags(experiment=self,
tags_to_add=tags_list,
tags_to_delete=[])
def append_tags(self, tag, *tags):
"""Append tag(s) to the current experiment.
Alias for: :meth:`~neptune.experiments.Experiment.append_tag`
"""
self.append_tag(tag, *tags)
def remove_tag(self, tag):
"""Removes single tag from the experiment.
Args:
tag (:obj:`str`): Tag to be removed
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
# assuming experiment has tags: `['tag-1', 'tag-2']`.
experiment.remove_tag('tag-1')
Note:
Removing a tag that is not assigned to this experiment is silently ignored.
"""
self._backend.update_tags(experiment=self,
tags_to_add=[],
tags_to_delete=[tag])
def get_channels(self):
"""Alias for :meth:`~neptune.experiments.Experiment.get_logs`
"""
return self.get_logs()
def get_logs(self):
"""Retrieve all log names along with their last values for this experiment.
Returns:
:obj:`dict` - A dictionary mapping a log names to the log's last value.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
exp_logs = experiment.get_logs()
"""
experiment = self._backend.get_experiment(self.internal_id)
channels_last_values_by_name = dict((ch.channelName, ch) for ch in experiment.channelsLastValues)
channels = dict()
for ch in experiment.channels:
last_value = channels_last_values_by_name.get(ch.name, None)
if last_value is not None:
ch.x = last_value.x
ch.y = last_value.y
elif ch.lastX is not None:
ch.x = ch.lastX
ch.y = None
else:
ch.x = None
ch.y = None
channels[ch.name] = ch
return channels
def _get_system_channels(self):
channels = self._backend.get_system_channels(self)
return dict((ch.name, ch) for ch in channels)
def send_metric(self, channel_name, x, y=None, timestamp=None):
"""Log metrics (numeric values) in Neptune.
Alias for :meth:`~neptune.experiments.Experiment.log_metric`
"""
return self.log_metric(channel_name, x, y, timestamp)
def log_metric(self, log_name, x, y=None, timestamp=None):
"""Log metrics (numeric values) in Neptune
| If a log with provided ``log_name`` does not exist, it is created automatically.
| If log exists (determined by ``log_name``), then new value is appended to it.
Args:
log_name (:obj:`str`): The name of log, i.e. `mse`, `loss`, `accuracy`.
x (:obj:`double`): Depending, whether ``y`` parameter is passed:
* ``y`` not passed: The value of the log (data-point).
* ``y`` passed: Index of log entry being appended. Must be strictly increasing.
y (:obj:`double`, optional, default is ``None``): The value of the log (data-point).
timestamp (:obj:`time`, optional, default is ``None``):
Timestamp to be associated with log entry. Must be Unix time.
If ``None`` is passed, `time.time() <https://docs.python.org/3.6/library/time.html#time.time>`_
(Python 3.6 example) is invoked to obtain timestamp.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment` and
'accuracy' log does not exists:
.. code:: python3
# Both calls below have the same effect
# Common invocation, providing log name and value
experiment.log_metric('accuracy', 0.5)
experiment.log_metric('accuracy', 0.65)
experiment.log_metric('accuracy', 0.8)
# Providing both x and y params
experiment.log_metric('accuracy', 0, 0.5)
experiment.log_metric('accuracy', 1, 0.65)
experiment.log_metric('accuracy', 2, 0.8)
Note:
For efficiency, logs are uploaded in batches via a queue.
Hence, if you log a lot of data, you may experience slight delays in Neptune web application.
Note:
Passing either ``x`` or ``y`` coordinate as NaN or +/-inf causes this log entry to be ignored.
Warning is printed to ``stdout``.
"""
x, y = self._get_valid_x_y(x, y)
if not is_float(y):
raise InvalidChannelValue(expected_type='float', actual_type=type(y).__name__)
if is_nan_or_inf(y):
_logger.warning(
'Invalid metric value: %s for channel %s. '
'Metrics with nan or +/-inf values will not be sent to server',
y,
log_name)
elif x is not None and is_nan_or_inf(x):
_logger.warning(
'Invalid metric x-coordinate: %s for channel %s. '
'Metrics with nan or +/-inf x-coordinates will not be sent to server',
x,
log_name)
else:
value = ChannelValue(x, dict(numeric_value=y), timestamp)
self._channels_values_sender.send(log_name, ChannelType.NUMERIC.value, value)
def send_text(self, channel_name, x, y=None, timestamp=None):
"""Log text data in Neptune.
Alias for :meth:`~neptune.experiments.Experiment.log_text`
"""
return self.log_text(channel_name, x, y, timestamp)
def log_text(self, log_name, x, y=None, timestamp=None):
"""Log text data in Neptune
| If a log with provided ``log_name`` does not exist, it is created automatically.
| If log exists (determined by ``log_name``), then new value is appended to it.
Args:
log_name (:obj:`str`): The name of log, i.e. `mse`, `my_text_data`, `timing_info`.
x (:obj:`double` or :obj:`str`): Depending, whether ``y`` parameter is passed:
* ``y`` not passed: The value of the log (data-point). Must be ``str``.
* ``y`` passed: Index of log entry being appended. Must be strictly increasing.
y (:obj:`str`, optional, default is ``None``): The value of the log (data-point).
timestamp (:obj:`time`, optional, default is ``None``):
Timestamp to be associated with log entry. Must be Unix time.
If ``None`` is passed, `time.time() <https://docs.python.org/3.6/library/time.html#time.time>`_
(Python 3.6 example) is invoked to obtain timestamp.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
# common case, where log name and data are passed
neptune.log_text('my_text_data', str(data_item))
# log_name, x and timestamp are passed
neptune.log_text(log_name='logging_losses_as_text',
x=str(val_loss),
timestamp=1560430912)
Note:
For efficiency, logs are uploaded in batches via a queue.
Hence, if you log a lot of data, you may experience slight delays in Neptune web application.
Note:
Passing ``x`` coordinate as NaN or +/-inf causes this log entry to be ignored.
Warning is printed to ``stdout``.
"""
x, y = self._get_valid_x_y(x, y)
if x is not None and is_nan_or_inf(x):
x = None
if not isinstance(y, six.string_types):
raise InvalidChannelValue(expected_type='str', actual_type=type(y).__name__)
if x is not None and is_nan_or_inf(x):
_logger.warning(
'Invalid metric x-coordinate: %s for channel %s. '
'Metrics with nan or +/-inf x-coordinates will not be sent to server',
x,
log_name)
else:
value = ChannelValue(x, dict(text_value=y), timestamp)
self._channels_values_sender.send(log_name, ChannelType.TEXT.value, value)
def send_image(self, channel_name, x, y=None, name=None, description=None, timestamp=None):
"""Log image data in Neptune.
Alias for :meth:`~neptune.experiments.Experiment.log_image`
"""
return self.log_image(channel_name, x, y, name, description, timestamp)
def log_image(self, log_name, x, y=None, image_name=None, description=None, timestamp=None):
"""Log image data in Neptune
| If a log with provided ``log_name`` does not exist, it is created automatically.
| If log exists (determined by ``log_name``), then new value is appended to it.
Args:
log_name (:obj:`str`): The name of log, i.e. `bboxes`, `visualisations`, `sample_images`.
x (:obj:`double`): Depending, whether ``y`` parameter is passed:
* ``y`` not passed: The value of the log (data-point). See ``y`` parameter.
* ``y`` passed: Index of log entry being appended. Must be strictly increasing.
y (multiple types supported, optional, default is ``None``):
The value of the log (data-point). Can be one of the following types:
* :obj:`PIL image`
`Pillow docs <https://pillow.readthedocs.io/en/latest/reference/Image.html#image-module>`_
* :obj:`matplotlib.figure.Figure`
`Matplotlib 3.1.1 docs <https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.figure.Figure.html>`_
* :obj:`str` - path to image file
* 2-dimensional :obj:`numpy.array` - interpreted as grayscale image
* 3-dimensional :obj:`numpy.array` - behavior depends on last dimension
* if last dimension is 1 - interpreted as grayscale image
* if last dimension is 3 - interpreted as RGB image
* if last dimension is 4 - interpreted as RGBA image
image_name (:obj:`str`, optional, default is ``None``): Image name
description (:obj:`str`, optional, default is ``None``): Image description
timestamp (:obj:`time`, optional, default is ``None``):
Timestamp to be associated with log entry. Must be Unix time.
If ``None`` is passed, `time.time() <https://docs.python.org/3.6/library/time.html#time.time>`_
(Python 3.6 example) is invoked to obtain timestamp.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
# path to image file
experiment.log_image('bbox_images', 'pictures/image.png')
experiment.log_image('bbox_images', x=5, 'pictures/image.png')
experiment.log_image('bbox_images', 'pictures/image.png', image_name='difficult_case')
# PIL image
img = PIL.Image.new('RGB', (60, 30), color = 'red')
experiment.log_image('fig', img)
# 2d numpy array
array = numpy.random.rand(300, 200)*255
experiment.log_image('fig', array)
# 3d grayscale array
array = numpy.random.rand(300, 200, 1)*255
experiment.log_image('fig', array)
# 3d RGB array
array = numpy.random.rand(300, 200, 3)*255
experiment.log_image('fig', array)
# 3d RGBA array
array = numpy.random.rand(300, 200, 4)*255
experiment.log_image('fig', array)
# matplotlib figure example 1
from matplotlib import pyplot
pyplot.plot([1, 2, 3, 4])
pyplot.ylabel('some numbers')
experiment.log_image('plots', plt.gcf())
# matplotlib figure example 2
from matplotlib import pyplot
import numpy
numpy.random.seed(19680801)
data = numpy.random.randn(2, 100)
figure, axs = pyplot.subplots(2, 2, figsize=(5, 5))
axs[0, 0].hist(data[0])
axs[1, 0].scatter(data[0], data[1])
axs[0, 1].plot(data[0], data[1])
axs[1, 1].hist2d(data[0], data[1])
experiment.log_image('diagrams', figure)
Note:
For efficiency, logs are uploaded in batches via a queue.
Hence, if you log a lot of data, you may experience slight delays in Neptune web application.
Note:
Passing ``x`` coordinate as NaN or +/-inf causes this log entry to be ignored.
Warning is printed to ``stdout``.
Warning:
Only images up to 15MB are supported. Larger files will not be logged to Neptune.
"""
x, y = self._get_valid_x_y(x, y)
if x is not None and is_nan_or_inf(x):
x = None
image_content = get_image_content(y)
if len(image_content) > self.IMAGE_SIZE_LIMIT_MB * 1024 * 1024:
_logger.warning('Your image is larger than %dMB. Neptune supports logging images smaller than %dMB. '
'Resize or increase compression of this image',
self.IMAGE_SIZE_LIMIT_MB,
self.IMAGE_SIZE_LIMIT_MB)
image_content = None
input_image = dict(
name=image_name,
description=description
)
if image_content:
input_image['data'] = base64.b64encode(image_content).decode('utf-8')
if x is not None and is_nan_or_inf(x):
_logger.warning(
'Invalid metric x-coordinate: %s for channel %s. '
'Metrics with nan or +/-inf x-coordinates will not be sent to server',
x,
log_name)
else:
value = ChannelValue(x, dict(image_value=input_image), timestamp)
self._channels_values_sender.send(log_name, ChannelType.IMAGE.value, value)
def send_artifact(self, artifact, destination=None):
"""Save an artifact (file) in experiment storage.
Alias for :meth:`~neptune.experiments.Experiment.log_artifact`
"""
return self.log_artifact(artifact, destination)
def log_artifact(self, artifact, destination=None):
"""Save an artifact (file) in experiment storage.
Args:
artifact (:obj:`str`): A path to the file in local filesystem.
destination (:obj:`str`, optional, default is ``None``):
A destination path.
If ``None`` is passed, an artifact file name will be used.
Raises:
`FileNotFound`: When ``artifact`` file was not found.
`StorageLimitReached`: When storage limit in the project has been reached.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
# simple use
experiment.log_artifact('images/wrong_prediction_1.png')
# save file in other directory
experiment.log_artifact('images/wrong_prediction_1.png', 'validation/images/wrong_prediction_1.png')
# save file under different name
experiment.log_artifact('images/wrong_prediction_1.png', 'images/my_image_1.png')
"""
if not os.path.exists(artifact):
raise FileNotFound(artifact)
target_name = os.path.basename(artifact) if destination is None else destination
upload_to_storage(upload_entries=[UploadEntry(os.path.abspath(artifact), normalize_file_name(target_name))],
upload_api_fun=self._backend.upload_experiment_output,
upload_tar_api_fun=self._backend.extract_experiment_output,
experiment=self)
def delete_artifacts(self, path):
"""Removes an artifact(s) (file/directory) from the experiment storage.
Args:
path (:obj:`list` or :obj:`str`): Path or list of paths to remove from the experiment's output
Raises:
`FileNotFound`: If a path in experiment artifacts does not exist.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
experiment.delete_artifacts('forest_results.pkl')
experiment.delete_artifacts(['forest_results.pkl', 'directory'])
experiment.delete_artifacts('')
"""
if path is None:
raise ValueError("path argument must not be None")
paths = path
if not isinstance(path, list):
paths = [path]
for path in paths:
if path is None:
raise ValueError("path argument must not be None")
normalized_path = os.path.normpath(path)
if normalized_path.startswith(".."):
raise ValueError("path to delete must be within project's directory")
if normalized_path == "." or normalized_path == "/" or not normalized_path:
raise ValueError("Cannot delete whole artifacts directory")
try:
for path in paths:
self._backend.rm_data(experiment=self, path=path)
except PathInProjectNotFound:
raise FileNotFound(path)
def download_artifact(self, path, destination_dir=None):
"""Download an artifact (file) from the experiment storage.
Download a file indicated by ``path`` from the experiment artifacts and save it in ``destination_dir``.
Args:
path (:obj:`str`): Path to the file to be downloaded.
destination_dir (:obj:`str`):
The directory where the file will be downloaded.
If ``None`` is passed, the file will be downloaded to the current working directory.
Raises:
`NotADirectory`: When ``destination_dir`` is not a directory.
`FileNotFound`: If a path in experiment artifacts does not exist.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
experiment.download_artifact('forest_results.pkl', '/home/user/files/')
"""
if not destination_dir:
destination_dir = os.getcwd()
project_storage_path = "/{exp_id}/output/{file}".format(exp_id=self.id, file=path)
destination_path = os.path.join(destination_dir, os.path.basename(path))
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
elif not os.path.isdir(destination_dir):
raise NotADirectory(destination_dir)
try:
self._backend.download_data(self._project, project_storage_path, destination_path)
except PathInProjectNotFound:
raise FileNotFound(path)
def download_sources(self, path=None, destination_dir=None):
"""Download a directory or a single file from experiment's sources as a ZIP archive.
Download a subdirectory (or file) ``path`` from the experiment sources and save it in ``destination_dir``
as a ZIP archive. The name of an archive will be a name of downloaded directory (or file) with '.zip' extension.
Args:
path (:obj:`str`):
Path of a directory or file in experiment sources to be downloaded.
If ``None`` is passed, all source files will be downloaded.
destination_dir (:obj:`str`): The directory where the archive will be downloaded.
If ``None`` is passed, the archive will be downloaded to the current working directory.
Raises:
`NotADirectory`: When ``destination_dir`` is not a directory.
`FileNotFound`: If a path in experiment sources does not exist.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
# Download all experiment sources to current working directory
experiment.download_sources()
# Download a single directory
experiment.download_sources('src/my-module')
# Download all experiment sources to user-defined directory
experiment.download_sources(destination_dir='/tmp/sources/')
# Download a single directory to user-defined directory
experiment.download_sources('src/my-module', 'sources/')
"""
if not path:
path = ""
if not destination_dir:
destination_dir = os.getcwd()
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
elif not os.path.isdir(destination_dir):
raise NotADirectory(destination_dir)
download_request = self._backend.prepare_source_download_reuqest(self, path)
self._download_from_request(download_request, destination_dir, path)
def download_artifacts(self, path=None, destination_dir=None):
"""Download a directory or a single file from experiment's artifacts as a ZIP archive.
Download a subdirectory (or file) ``path`` from the experiment artifacts and save it in ``destination_dir``
as a ZIP archive. The name of an archive will be a name of downloaded directory (or file) with '.zip' extension.
Args:
path (:obj:`str`):
Path of a directory or file in experiment artifacts to be downloaded.
If ``None`` is passed, all artifacts will be downloaded.
destination_dir (:obj:`str`): The directory where the archive will be downloaded.
If ``None`` is passed, the archive will be downloaded to the current working directory.
Raises:
`NotADirectory`: When ``destination_dir`` is not a directory.
`FileNotFound`: If a path in experiment artifacts does not exist.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
# Download all experiment artifacts to current working directory
experiment.download_artifacts()
# Download a single directory
experiment.download_artifacts('data/images')
# Download all experiment artifacts to user-defined directory
experiment.download_artifacts(destination_dir='/tmp/artifacts/')
# Download a single directory to user-defined directory
experiment.download_artifacts('data/images', 'artifacts/')
"""
if not path:
path = ""
if not destination_dir:
destination_dir = os.getcwd()
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
elif not os.path.isdir(destination_dir):
raise NotADirectory(destination_dir)
download_request = self._backend.prepare_output_download_reuqest(self, path)
self._download_from_request(download_request, destination_dir, path)
def _download_from_request(self, download_request, destination_dir, path):
sleep_time = 1
max_sleep_time = 16
while not hasattr(download_request, "downloadUrl"):
time.sleep(sleep_time)
sleep_time = min(sleep_time * 2, max_sleep_time)
download_request = self._backend.get_download_request(download_request.id)
# We do not use Backend here cause `downloadUrl` can be any url (not only Neptune API endpoint)
response = requests.get(
url=download_request.downloadUrl,
headers={"Accept": "application/zip"},
stream=True
)
with response:
filename = None
if 'content-disposition' in response.headers:
content_disposition = response.headers['content-disposition']
filenames = re.findall("filename=(.+)", content_disposition)
if filenames:
filename = filenames[0]
if not filename:
filename = os.path.basename(path.rstrip("/")) + ".zip"
destination_path = os.path.join(destination_dir, filename)
with open(destination_path, "wb") as f:
for chunk in response.iter_content(chunk_size=10 * 1024 * 1024):
if chunk:
f.write(chunk)
def reset_log(self, log_name):
"""Resets the log.
Removes all data from the log and enables it to be reused from scratch.
Args:
log_name (:obj:`str`): The name of log to reset.
Raises:
`ChannelDoesNotExist`: When the log with name ``log_name`` does not exist on the server.
Example:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
experiment.reset_log('my_metric')
Note:
Check Neptune web application to see that reset charts have no data.
"""
channel = self._find_channel(log_name, ChannelNamespace.USER)
if channel is None:
raise ChannelDoesNotExist(self.id, log_name)
self._backend.reset_channel(channel.id)
def get_parameters(self):
"""Retrieve parameters for this experiment.
Returns:
:obj:`dict` - dictionary mapping a parameter name to value.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
exp_params = experiment.get_parameters()
"""
experiment = self._backend.get_experiment(self.internal_id)
return dict((p.name, self._convert_parameter_value(p.value, p.parameterType)) for p in experiment.parameters)
def get_properties(self):
"""Retrieve User-defined properties for this experiment.
Returns:
:obj:`dict` - dictionary mapping a property key to value.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`.
.. code:: python3
exp_properties = experiment.get_properties()
"""
experiment = self._backend.get_experiment(self.internal_id)
return dict((p.key, p.value) for p in experiment.properties)
def set_property(self, key, value):
"""Set `key-value` pair as an experiment property.
If property with given ``key`` does not exist, it adds a new one.
Args:
key (:obj:`str`): Property key.
value (:obj:`obj`): New value of a property.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
experiment.set_property('model', 'LightGBM')
experiment.set_property('magic-number', 7)
"""
properties = {p.key: p.value for p in self._backend.get_experiment(self.internal_id).properties}
properties[key] = str(value)
return self._backend.update_experiment(
experiment=self,
properties=properties
)
def remove_property(self, key):
"""Removes a property with given key.
Args:
key (single :obj:`str`):
Key of property to remove.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
experiment.remove_property('host')
"""
properties = {p.key: p.value for p in self._backend.get_experiment(self.internal_id).properties}
del properties[key]
return self._backend.update_experiment(
experiment=self,
properties=properties
)
def get_hardware_utilization(self):
"""Retrieve GPU, CPU and memory utilization data.
Get hardware utilization metrics for entire experiment as a single
`pandas.DataFrame <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html>`_
object. Returned DataFrame has following columns (assuming single GPU with 0 index):
* `x_ram` - time (in milliseconds) from the experiment start,
* `y_ram` - memory usage in GB,
* `x_cpu` - time (in milliseconds) from the experiment start,
* `y_cpu` - CPU utilization percentage (0-100),
* `x_gpu_util_0` - time (in milliseconds) from the experiment start,
* `y_gpu_util_0` - GPU utilization percentage (0-100),
* `x_gpu_mem_0` - time (in milliseconds) from the experiment start,
* `y_gpu_mem_0` - GPU memory usage in GB.
| If more GPUs are available they have their separate columns with appropriate indices (0, 1, 2, ...),
for example: `x_gpu_util_1`, `y_gpu_util_1`.
| The returned DataFrame may contain ``NaN`` s if one of the metrics has more values than others.
Returns:
:obj:`pandas.DataFrame` - DataFrame containing the hardware utilization metrics.
Examples:
The following values denote that after 3 seconds, the experiment used 16.7 GB of RAM
* `x_ram` = 3000
* `y_ram` = 16.7
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
hardware_df = experiment.get_hardware_utilization()
"""
metrics_csv = self._backend.get_metrics_csv(self)
try:
return pd.read_csv(metrics_csv)
except EmptyDataError:
return pd.DataFrame()
def get_numeric_channels_values(self, *channel_names):
"""Retrieve values of specified metrics (numeric logs).
The returned
`pandas.DataFrame <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html>`_
contains 1 additional column `x` along with the requested metrics.
Args:
*channel_names (one or more :obj:`str`): comma-separated metric names.
Returns:
:obj:`pandas.DataFrame` - DataFrame containing values for the requested metrics.
| The returned DataFrame may contain ``NaN`` s if one of the metrics has more values than others.
Example:
Invoking ``get_numeric_channels_values('loss', 'auc')`` returns DataFrame with columns
`x`, `loss`, `auc`.
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
batch_channels = experiment.get_numeric_channels_values('batch-1-loss', 'batch-2-metric')
epoch_channels = experiment.get_numeric_channels_values('epoch-1-loss', 'epoch-2-metric')
Note:
It's good idea to get metrics with common temporal pattern (like iteration or batch/epoch number).
Thanks to this each row of returned DataFrame has metrics from the same moment in experiment.
For example, combine epoch metrics to one DataFrame and batch metrics to the other.
"""
channels_data = {}
channels_by_name = self.get_channels()
for channel_name in channel_names:
channel_id = channels_by_name[channel_name].id
try:
channels_data[channel_name] = pd.read_csv(
self._backend.get_channel_points_csv(self, channel_id),
header=None,
names=['x_{}'.format(channel_name), 'y_{}'.format(channel_name)],
dtype=float
)
except EmptyDataError:
channels_data[channel_name] = pd.DataFrame(
columns=['x_{}'.format(channel_name), 'y_{}'.format(channel_name)],
dtype=float
)
return align_channels_on_x(pd.concat(channels_data.values(), axis=1, sort=False))
def _start(self,
upload_source_entries=None,
abort_callback=None,
logger=None,
upload_stdout=True,
upload_stderr=True,
send_hardware_metrics=True,
run_monitoring_thread=True,
handle_uncaught_exceptions=True):
upload_to_storage(upload_entries=upload_source_entries,
upload_api_fun=self._backend.upload_experiment_source,
upload_tar_api_fun=self._backend.extract_experiment_source,
experiment=self)
self._execution_context.start(
abort_callback=abort_callback,
logger=logger,
upload_stdout=upload_stdout,
upload_stderr=upload_stderr,
send_hardware_metrics=send_hardware_metrics,
run_monitoring_thread=run_monitoring_thread,
handle_uncaught_exceptions=handle_uncaught_exceptions
)
def stop(self, exc_tb=None):
"""Marks experiment as finished (succeeded or failed).
Args:
exc_tb (:obj:`str`, optional, default is ``None``): Additional traceback information
to be stored in experiment details in case of failure (stacktrace, etc).
If this argument is ``None`` the experiment will be marked as succeeded.
Otherwise, experiment will be marked as failed.
Examples:
Assuming that `experiment` is an instance of :class:`~neptune.experiments.Experiment`:
.. code:: python3
# Marks experiment as succeeded
experiment.stop()
# Assuming 'ex' is some exception,
# it marks experiment as failed with exception info in experiment details.
experiment.stop(str(ex))
"""
self._channels_values_sender.join()
try:
if exc_tb is None:
self._backend.mark_succeeded(self)
else:
self._backend.mark_failed(self, exc_tb)
except ExperimentAlreadyFinished:
pass
self._execution_context.stop()
# pylint: disable=protected-access
self._project._remove_stopped_experiment(self)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_tb is None:
self.stop()
else:
self.stop("\n".join(traceback.format_tb(exc_tb)) + "\n" + repr(exc_val))
def __str__(self):
return 'Experiment({})'.format(self.id)
def __repr__(self):
return str(self)
def __eq__(self, o):
# pylint: disable=protected-access
return self._id == o._id and self._internal_id == o._internal_id and self._project == o._project
def __ne__(self, o):
return not self.__eq__(o)
@staticmethod
def _convert_parameter_value(value, parameter_type):
if parameter_type == 'double':
return float(value)
else:
return value
@staticmethod
def _get_valid_x_y(x, y):
"""
The goal of this function is to allow user to call experiment.log_* with any of:
- single parameter treated as y value
- both parameters (named/unnamed)
- single named y parameter
If intended X-coordinate is provided, it is validated to be a float value
"""
if x is None and y is None:
raise NoChannelValue()
if x is None and y is not None:
return None, y
if x is not None and y is None:
return None, x
if x is not None and y is not None:
if not is_float(x):
raise InvalidChannelValue(expected_type='float', actual_type=type(x).__name__)
return x, y
def _send_channels_values(self, channels_with_values):
self._backend.send_channels_values(self, channels_with_values)
def _get_channels(self, channels_names_with_types):
existing_channels = self.get_channels()
channels_by_name = {}
for (channel_name, channel_type) in channels_names_with_types:
channel = existing_channels.get(channel_name, None)
if channel is None:
channel = self._create_channel(channel_name, channel_type)
channels_by_name[channel.name] = channel
return channels_by_name
def _get_channel(self, channel_name, channel_type, channel_namespace=ChannelNamespace.USER):
channel = self._find_channel(channel_name, channel_namespace)
if channel is None:
channel = self._create_channel(channel_name, channel_type, channel_namespace)
return channel
def _find_channel(self, channel_name, channel_namespace):
if channel_namespace == ChannelNamespace.USER:
return self.get_channels().get(channel_name, None)
elif channel_namespace == ChannelNamespace.SYSTEM:
return self._get_system_channels().get(channel_name, None)
else:
raise RuntimeError("Unknown channel namespace {}".format(channel_namespace))
def _create_channel(self, channel_name, channel_type, channel_namespace=ChannelNamespace.USER):
if channel_namespace == ChannelNamespace.USER:
return self._backend.create_channel(self, channel_name, channel_type)
elif channel_namespace == ChannelNamespace.SYSTEM:
return self._backend.create_system_channel(self, channel_name, channel_type)
else:
raise RuntimeError("Unknown channel namespace {}".format(channel_namespace))
| 39.79636
| 120
| 0.607795
|
19cc84f7bdff709ce60e418b0bb642225b767b69
| 6,408
|
py
|
Python
|
megnet/utils/descriptor.py
|
dgaines2/megnet
|
b2fd0903c743237646a1f5a9cfafc9614da182ed
|
[
"BSD-3-Clause"
] | 1
|
2021-07-29T13:44:07.000Z
|
2021-07-29T13:44:07.000Z
|
megnet/utils/descriptor.py
|
dgaines2/megnet
|
b2fd0903c743237646a1f5a9cfafc9614da182ed
|
[
"BSD-3-Clause"
] | 47
|
2021-08-16T13:24:24.000Z
|
2022-03-30T13:19:20.000Z
|
megnet/utils/descriptor.py
|
a-ws-m/megnet
|
25893c6b2b5d842d7662e6baf0c4f87bee94c22f
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This module implements atom/bond/structure-wise descriptor calculated from
pretrained megnet model
"""
import os
from typing import Union, Dict
import numpy as np
from tensorflow.keras.models import Model
from megnet.models import MEGNetModel, GraphModel
from megnet.utils.typing import StructureOrMolecule
DEFAULT_MODEL = os.path.join(os.path.dirname(__file__), "../../mvl_models/mp-2019.4.1/formation_energy.hdf5")
class MEGNetDescriptor:
"""
MEGNet descriptors. This class takes a trained model and
then compute the intermediate outputs as structure features
"""
def __init__(self, model_name: Union[str, GraphModel, MEGNetModel] = DEFAULT_MODEL, use_cache: bool = True):
"""
Args:
model_name (str or MEGNetModel): trained model. If it is
str, then only models in mvl_models are used.
use_cache (bool): whether to use cache for structure
graph calculations
"""
if isinstance(model_name, str):
model = MEGNetModel.from_file(model_name)
elif isinstance(model_name, GraphModel):
model = model_name
else:
raise ValueError("model_name only support str " "or GraphModel object")
layers = model.layers
important_prefix = ["meg", "set", "concatenate"]
all_names = [i.name for i in layers if any(i.name.startswith(j) for j in important_prefix)]
if any(i.startswith("megnet") for i in all_names):
self.version = "v2"
else:
self.version = "v1"
valid_outputs = [i.output for i in layers if any(i.name.startswith(j) for j in important_prefix)]
outputs = []
valid_names = []
for i, j in zip(all_names, valid_outputs):
if isinstance(j, list):
for k, l in enumerate(j):
valid_names.append(i + "_%d" % k)
outputs.append(l)
else:
valid_names.append(i)
outputs.append(j)
full_model = Model(inputs=model.inputs, outputs=outputs)
model.model = full_model
self.model = model
self.valid_names = valid_names
self._cache: Dict[str, float] = {}
self.use_cache = use_cache
def _predict_structure(self, structure: StructureOrMolecule) -> np.ndarray:
graph = self.model.graph_converter.convert(structure)
inp = self.model.graph_converter.graph_to_input(graph)
return self.model.predict(inp)
def _predict_feature(self, structure: StructureOrMolecule) -> np.ndarray:
if not self.use_cache:
return self._predict_structure(structure)
s = str(structure)
if s in self._cache:
return self._cache[s]
result = self._predict_structure(structure)
self._cache[s] = result
return result
def _get_features(self, structure: StructureOrMolecule, prefix: str, level: int, index: int = None) -> np.ndarray:
name = prefix
if level is not None:
name = prefix + "_%d" % level
if index is not None:
name += "_%d" % index
if name not in self.valid_names:
raise ValueError("%s not in original megnet model" % name)
ind = self.valid_names.index(name)
out_all = self._predict_feature(structure)
return out_all[ind][0]
def _get_updated_prefix_level(self, prefix: str, level: int):
mapping = {
"meg_net_layer": ["megnet", level - 1],
"set2_set": ["set2set_atom" if level == 1 else "set2set_bond", None],
"concatenate": ["concatenate", None],
}
if self.version == "v2":
return mapping[prefix][0], mapping[prefix][1] # type: ignore
return prefix, level
def get_atom_features(self, structure: StructureOrMolecule, level: int = 3) -> np.ndarray:
"""
Get megnet atom features from structure
Args:
structure: pymatgen structure or molecule
level: int, indicating the block number of megnet, starting
from 1
Returns:
nxm atomic feature matrix
"""
prefix, level = self._get_updated_prefix_level("meg_net_layer", level)
return self._get_features(structure, prefix=prefix, level=level, index=0)
def get_bond_features(self, structure: StructureOrMolecule, level: int = 3) -> np.ndarray:
"""
Get bond features at megnet block level
Args:
structure: pymatgen structure
level: int
Returns:
n_bond x m bond feature matrix
"""
prefix, level = self._get_updated_prefix_level("meg_net_layer", level)
return self._get_features(structure, prefix=prefix, level=level, index=1)
def get_global_features(self, structure: StructureOrMolecule, level: int = 2) -> np.ndarray:
"""
Get state features at megnet block level
Args:
structure: pymatgen structure or molecule
level: int
Returns:
1 x m_g global feature vector
"""
prefix, level = self._get_updated_prefix_level("meg_net_layer", level)
return self._get_features(structure, prefix=prefix, level=level, index=2)
def get_set2set(self, structure: StructureOrMolecule, ftype: str = "atom") -> np.ndarray:
"""
Get set2set output as features
Args:
structure (StructureOrMolecule): pymatgen structure
or molecule
ftype (str): atom or bond
Returns:
feature matrix, each row is a vector for an atom
or bond
"""
mapping = {"atom": 1, "bond": 2}
prefix, level = self._get_updated_prefix_level("set2_set", level=mapping[ftype])
return self._get_features(structure, prefix=prefix, level=level)
def get_structure_features(self, structure: StructureOrMolecule) -> np.ndarray:
"""
Get structure level feature vector
Args:
structure (StructureOrMolecule): pymatgen structure
or molecule
Returns:
one feature vector for the structure
"""
prefix, level = self._get_updated_prefix_level("concatenate", level=1)
return self._get_features(structure, prefix=prefix, level=level)
| 35.208791
| 118
| 0.618602
|
b68b9a1569d6f0883b58a1cdf73ef5c22d56a813
| 2,445
|
py
|
Python
|
ml_service/pipelines/run_train_pipeline.py
|
hmcdowelle/MLOps
|
927a412ae1b0d4412811a06df2cc4f97198ccd13
|
[
"MIT"
] | 791
|
2019-05-07T06:45:51.000Z
|
2022-03-31T01:52:05.000Z
|
ml_service/pipelines/run_train_pipeline.py
|
hmcdowelle/MLOps
|
927a412ae1b0d4412811a06df2cc4f97198ccd13
|
[
"MIT"
] | 254
|
2019-05-08T01:26:49.000Z
|
2022-03-25T01:51:30.000Z
|
ml_service/pipelines/run_train_pipeline.py
|
hmcdowelle/MLOps
|
927a412ae1b0d4412811a06df2cc4f97198ccd13
|
[
"MIT"
] | 902
|
2019-05-06T21:24:07.000Z
|
2022-03-31T06:35:12.000Z
|
from azureml.pipeline.core import PublishedPipeline
from azureml.core import Experiment, Workspace
import argparse
from ml_service.util.env_variables import Env
def main():
parser = argparse.ArgumentParser("register")
parser.add_argument(
"--output_pipeline_id_file",
type=str,
default="pipeline_id.txt",
help="Name of a file to write pipeline ID to"
)
parser.add_argument(
"--skip_train_execution",
action="store_true",
help=("Do not trigger the execution. "
"Use this in Azure DevOps when using a server job to trigger")
)
args = parser.parse_args()
e = Env()
aml_workspace = Workspace.get(
name=e.workspace_name,
subscription_id=e.subscription_id,
resource_group=e.resource_group
)
# Find the pipeline that was published by the specified build ID
pipelines = PublishedPipeline.list(aml_workspace)
matched_pipes = []
for p in pipelines:
if p.name == e.pipeline_name:
if p.version == e.build_id:
matched_pipes.append(p)
if(len(matched_pipes) > 1):
published_pipeline = None
raise Exception(f"Multiple active pipelines are published for build {e.build_id}.") # NOQA: E501
elif(len(matched_pipes) == 0):
published_pipeline = None
raise KeyError(f"Unable to find a published pipeline for this build {e.build_id}") # NOQA: E501
else:
published_pipeline = matched_pipes[0]
print("published pipeline id is", published_pipeline.id)
# Save the Pipeline ID for other AzDO jobs after script is complete
if args.output_pipeline_id_file is not None:
with open(args.output_pipeline_id_file, "w") as out_file:
out_file.write(published_pipeline.id)
if(args.skip_train_execution is False):
pipeline_parameters = {"model_name": e.model_name}
tags = {"BuildId": e.build_id}
if (e.build_uri is not None):
tags["BuildUri"] = e.build_uri
experiment = Experiment(
workspace=aml_workspace,
name=e.experiment_name)
run = experiment.submit(
published_pipeline,
tags=tags,
pipeline_parameters=pipeline_parameters)
print("Pipeline run initiated ", run.id)
if __name__ == "__main__":
main()
| 33.040541
| 105
| 0.631902
|
c56a1d7d6cb4c82b112e1bb9855378209aceda20
| 302
|
py
|
Python
|
digest/management/commands/mark_all_cls_off.py
|
PURNA-ROCK/pythondigest
|
ba21758a25a47de19800b208c420f16d6688a16b
|
[
"MIT"
] | 124
|
2015-08-17T19:41:16.000Z
|
2022-01-12T00:25:52.000Z
|
digest/management/commands/mark_all_cls_off.py
|
PURNA-ROCK/pythondigest
|
ba21758a25a47de19800b208c420f16d6688a16b
|
[
"MIT"
] | 62
|
2015-08-17T02:13:20.000Z
|
2020-04-17T19:07:40.000Z
|
digest/management/commands/mark_all_cls_off.py
|
PURNA-ROCK/pythondigest
|
ba21758a25a47de19800b208c420f16d6688a16b
|
[
"MIT"
] | 73
|
2015-08-18T13:50:47.000Z
|
2021-09-27T14:09:47.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from digest.models import ItemClsCheck
class Command(BaseCommand):
help = 'lala'
def handle(self, *args, **options):
ItemClsCheck.objects.all().update(status=False)
| 21.571429
| 55
| 0.725166
|
9e034e5c3bb7c3d714bcc33e8142d40f347187de
| 1,845
|
py
|
Python
|
FigureGeneration/makeFigure8.py
|
federatedcloud/Lake_Problem_DPS
|
07600c49ed543165ccdc642c1097b3bed87c28f0
|
[
"BSD-3-Clause"
] | null | null | null |
FigureGeneration/makeFigure8.py
|
federatedcloud/Lake_Problem_DPS
|
07600c49ed543165ccdc642c1097b3bed87c28f0
|
[
"BSD-3-Clause"
] | 3
|
2018-10-03T21:12:42.000Z
|
2019-07-08T21:32:43.000Z
|
FigureGeneration/makeFigure8.py
|
federatedcloud/Lake_Problem_DPS
|
07600c49ed543165ccdc642c1097b3bed87c28f0
|
[
"BSD-3-Clause"
] | 2
|
2020-06-29T17:30:42.000Z
|
2020-06-30T22:01:49.000Z
|
import numpy as np
import matplotlib.pyplot as plt
def makeFigure8():
IT = 100*np.loadtxt('./../Re-evaluation/ITrobustness.txt',delimiter=' ')
DPS = 100*np.loadtxt('./../Re-evaluation/DPSrobustness.txt',delimiter=' ')
titles = ['a) Economic Benefits > 0.2','b) Reliability > 95%','c) Economic Benefits > 0.2 & Reliability > 95%']
p1 = plt.Rectangle((0, 0), 1, 1, fc='#08519c', edgecolor='none') # DPS color
p2 = plt.Rectangle((0, 0), 1, 1, fc='#a50f15', edgecolor='none') # intertemporal color
multiplier = [0.06, 0.03, 0.0]
fig = plt.figure()
for i in range(len(titles)):
ax = fig.add_subplot(3,1,i+1)
ax.plot(range(np.shape(DPS)[0]+1),np.append(np.sort(DPS[:,i])[::-1],0),color='#08519c', linewidth=2)
ax.plot(range(np.shape(IT)[0]+1),np.append(np.sort(IT[:,i])[::-1],0),color='#a50f15', linewidth=2)
ax.fill_between(range(np.shape(DPS)[0]+1),np.append(np.sort(DPS[:,i])[::-1],0),color='#08519c')
ax.fill_between(range(np.shape(IT)[0]+1),np.append(np.sort(IT[:,i])[::-1],0),color='#a50f15')
ax.tick_params(axis='both',labelsize=14)
ax.set_xlim([0,np.shape(DPS)[0]+1])
ax.set_ylim([0,100])
ax.set_title(titles[i],fontsize=16,loc='left')
box = ax.get_position()
ax.set_position([box.x0,box.y0+box.height*multiplier[i], box.width, box.height*0.97])
if i == 2:
ax.set_xlabel('Solution # (sorted by rank)',fontsize=16)
fig.text(0.02, 0.5, 'Percent of Sampled SOWs in which Criteria are Met', va='center', rotation='vertical',fontsize=14)
plt.figlegend([p1,p2],['DPS','Intertemporal'], loc='upper center', ncol=2)
fig.set_size_inches([6.1625, 12.35])
fig.savefig('Figure8.pdf')
fig.clf()
return None
makeFigure8()
| 48.552632
| 123
| 0.592954
|
0a1874a2e1a5b3278153f8dba2acc3ee3c56bbbc
| 932
|
py
|
Python
|
tests/test_classify.py
|
Sunkist-Cherry/Spam-Filter
|
8246824e7b50b84be6697bb4cc2a6381ddcd0ca9
|
[
"MIT"
] | 433
|
2019-07-23T06:51:05.000Z
|
2022-03-29T03:43:49.000Z
|
tests/test_classify.py
|
Sunkist-Cherry/Spam-Filter
|
8246824e7b50b84be6697bb4cc2a6381ddcd0ca9
|
[
"MIT"
] | 7
|
2019-09-06T09:34:02.000Z
|
2022-01-19T07:16:04.000Z
|
tests/test_classify.py
|
Sunkist-Cherry/Spam-Filter
|
8246824e7b50b84be6697bb4cc2a6381ddcd0ca9
|
[
"MIT"
] | 34
|
2019-08-27T09:50:29.000Z
|
2022-03-25T01:55:35.000Z
|
import os
import unittest
import cherry
from unittest import mock
from cherry import classify
from sklearn.exceptions import NotFittedError
class ClassifyTest(unittest.TestCase):
def setUp(self):
pass
# __init__()
@mock.patch('cherry.classifyer.Classify._classify')
@mock.patch('cherry.classifyer.Classify._load_cache')
def test_init(self, mock_load, mock_classify):
mock_load.return_value = ('foo', 'bar')
cherry.classifyer.Classify(model='random', text=['random text'])
mock_load.assert_called_once_with('random')
mock_classify.assert_called_once_with(['random text'])
# _load_cache()
@mock.patch('cherry.classifyer.Classify._classify')
@mock.patch('cherry.classifyer.load_cache')
def test_load_cache(self, mock_load, mock_classify):
res = cherry.classifyer.Classify(model='foo', text=['random text'])
mock_load.assert_not_called()
| 31.066667
| 75
| 0.717811
|
9b54a0cadf04ec19da55832b20293b2d87891e46
| 2,512
|
py
|
Python
|
examples/adxbuyer/v201502/basic_operations/get_third_party_redirect_ads.py
|
coxmediagroup/googleads-python-lib
|
f85d5d8ab771e93b03b616ef65e2d3082aeef484
|
[
"Apache-2.0"
] | 1
|
2015-08-12T14:47:40.000Z
|
2015-08-12T14:47:40.000Z
|
examples/adxbuyer/v201502/basic_operations/get_third_party_redirect_ads.py
|
coxmediagroup/googleads-python-lib
|
f85d5d8ab771e93b03b616ef65e2d3082aeef484
|
[
"Apache-2.0"
] | 1
|
2020-07-24T15:10:10.000Z
|
2020-07-24T15:10:10.000Z
|
examples/adxbuyer/v201502/basic_operations/get_third_party_redirect_ads.py
|
coxmediagroup/googleads-python-lib
|
f85d5d8ab771e93b03b616ef65e2d3082aeef484
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all third party ads for a given ad group.
To add a third party ad, add_third_party_redirect_ad.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: AdGroupAdService.get
"""
__author__ = 'api.kwinter@gmail.com (Kevin Winter)'
from googleads import adwords
PAGE_SIZE = 500
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201502')
# Construct selector and get all ads for a given ad group.
offset = 0
selector = {
'fields': ['Id', 'AdGroupId', 'Status'],
'predicates': [
{
'field': 'AdGroupId',
'operator': 'EQUALS',
'values': [ad_group_id]
},
{
'field': 'AdType',
'operator': 'EQUALS',
'values': ['THIRD_PARTY_REDIRECT_AD']
}
],
'paging': {
'startIndex': str(offset),
'numberResults': str(PAGE_SIZE)
}
}
more_pages = True
while more_pages:
page = ad_group_ad_service.get(selector)
# Display results.
if 'entries' in page:
for ad in page['entries']:
print ('Ad with id \'%s\', status \'%s\', and of type \'%s\' was found.'
% (ad['ad']['id'], ad['status'], ad['ad']['Ad.Type']))
else:
print 'No ads were found.'
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
more_pages = offset < int(page['totalNumEntries'])
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID)
| 29.904762
| 80
| 0.658041
|
65b6363837947ff3ae28563ca801c189d67e7f9b
| 5,652
|
py
|
Python
|
deep_staple/preprocessing/tools/VS_Seg/preprocessing/TCIA_data_convert_into_convenient_folder_structure.py
|
multimodallearning/deep_staple
|
a27ed9e214bbac96aeab122b05b59b0222cce5c5
|
[
"MIT"
] | null | null | null |
deep_staple/preprocessing/tools/VS_Seg/preprocessing/TCIA_data_convert_into_convenient_folder_structure.py
|
multimodallearning/deep_staple
|
a27ed9e214bbac96aeab122b05b59b0222cce5c5
|
[
"MIT"
] | null | null | null |
deep_staple/preprocessing/tools/VS_Seg/preprocessing/TCIA_data_convert_into_convenient_folder_structure.py
|
multimodallearning/deep_staple
|
a27ed9e214bbac96aeab122b05b59b0222cce5c5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
import os
from glob import glob
from natsort import natsorted
import pydicom
import shutil
import re
import argparse
parser = argparse.ArgumentParser(description='Create a new folder that contains the whole TCIA dataset in a more convenient folder structure')
parser.add_argument('--input', type=str, help='(string) path to TCIA dataset, in "Descriptive Directory Name" format, for example /home/user/.../manifest-1614264588831/Vestibular-Schwannoma-SEG')
parser.add_argument('--output', type=str, help='(string) path to output folder')
args = parser.parse_args()
input_path = args.input
output_path = args.output
if not os.path.isdir(output_path):
os.makedirs(output_path, exist_ok=True)
cases = natsorted(glob(os.path.join(input_path, '*')))
print(cases)
for case in cases:
folders = glob(case+'/*/*')
MRs = []
MRs_paths = []
RTSTRUCTs = []
RTSTRUCTs_paths = []
RTPLANs = []
RTPLANs_paths = []
RTDOSEs = []
RTDOSEs_paths = []
for folder in folders:
first_file = glob(folder+"/*")[0]
dd = pydicom.read_file(first_file)
if dd['Modality'].value == 'MR':
MRs.append(dd)
MRs_paths.append(first_file)
elif dd['Modality'].value == 'RTSTRUCT':
RTSTRUCTs.append(dd)
RTSTRUCTs_paths.append(first_file)
elif dd['Modality'].value == 'RTPLAN':
RTPLANs.append(dd)
RTPLANs_paths.append(first_file)
elif dd['Modality'].value == 'RTDOSE':
RTDOSEs.append(dd)
RTDOSEs_paths.append(first_file)
assert(len(MRs) == len(RTSTRUCTs) == len(RTPLANs) == len(RTDOSEs)), f"Did not find all required files."
found = [False, False, False, False, False, False, False, False]
file_paths = [None] * 8
# sort for T1 or T2
for MR, path in zip(MRs, MRs_paths):
if "t1_" in MR['SeriesDescription'].value:
MR_T1 = MR
found[0] = True
file_paths[0] = path
elif "t2_" in MR['SeriesDescription'].value:
MR_T2 = MR
found[1] = True
file_paths[1] = path
else:
raise Exception
# assign RTSTRUCTs
for RTSTRUCT, path in zip(RTSTRUCTs, RTSTRUCTs_paths):
refUID = RTSTRUCT['ReferencedFrameOfReferenceSequence'][0]['RTReferencedStudySequence'][0] ['RTReferencedSeriesSequence'][0]['SeriesInstanceUID'].value
MR_T1_UID = MR_T1['SeriesInstanceUID'].value
MR_T2_UID = MR_T2['SeriesInstanceUID'].value
if refUID == MR_T1_UID:
RTSTRUCT_T1 = RTSTRUCT
found[2] = True
file_paths[2] = path
elif refUID == MR_T2_UID:
RTSTRUCT_T2 = RTSTRUCT
found[3] = True
file_paths[3] = path
# assign RTPLANs
for RTPLAN, path in zip(RTPLANs, RTPLANs_paths):
refUID = RTPLAN['ReferencedStructureSetSequence'][0]['ReferencedSOPInstanceUID'].value
RTSTRUCT_T1_UID = RTSTRUCT_T1['SOPInstanceUID'].value
RTSTRUCT_T2_UID = RTSTRUCT_T2['SOPInstanceUID'].value
if refUID == RTSTRUCT_T1_UID:
RTPLAN_T1 = RTPLAN
found[4] = True
file_paths[4] = path
elif refUID == RTSTRUCT_T2_UID:
RTPLAN_T2 = RTPLAN
found[5] = True
file_paths[5] = path
# assign RTDOSEs
for RTDOSE, path in zip(RTDOSEs, RTDOSEs_paths):
refUID = RTDOSE['ReferencedRTPlanSequence'][0]['ReferencedSOPInstanceUID'].value
RTPLAN_T1_UID = RTPLAN_T1['SOPInstanceUID'].value
RTPLAN_T2_UID = RTPLAN_T2['SOPInstanceUID'].value
if refUID == RTPLAN_T1_UID:
RTDOSE_T1 = RTPLAN
found[6] = True
file_paths[6] = path
elif refUID == RTPLAN_T2_UID:
RTDOSE_T2 = RTPLAN
found[7] = True
file_paths[7] = path
assert(all(found)), f"Not all required files found"
assert(all([p != None for p in file_paths]))
# write files into new folder structure
p = re.compile(r'VS-SEG-(\d+)')
case_idx = int(p.findall(case)[0])
print(case_idx)
new_T1_path = os.path.join(output_path, 'vs_gk_' + str(case_idx) +'_t1')
new_T2_path = os.path.join(output_path, 'vs_gk_' + str(case_idx) +'_t2')
if os.path.isdir(new_T1_path) and os.path.isdir(new_T2_path): continue
if not os.path.isdir(new_T1_path):
os.mkdir(new_T1_path)
if not os.path.isdir(new_T2_path):
os.mkdir(new_T2_path)
old_T1_folder = os.path.dirname(file_paths[0])
old_T2_folder = os.path.dirname(file_paths[1])
old_T1_files = natsorted(os.listdir(old_T1_folder))
old_T2_files = natsorted(os.listdir(old_T2_folder))
for file_idx, file in enumerate(old_T1_files):
new_file_path = os.path.join(new_T1_path, 'IMG'+ str(file_idx).zfill(10) +'.dcm')
shutil.copy(os.path.join(old_T1_folder, file), new_file_path)
for file_idx, file in enumerate(old_T2_files):
new_file_path = os.path.join(new_T2_path, 'IMG'+ str(file_idx).zfill(10) +'.dcm')
shutil.copy(os.path.join(old_T2_folder, file), new_file_path)
# copy RT files
shutil.copy(file_paths[2], os.path.join(new_T1_path, 'RTSS.dcm'))
shutil.copy(file_paths[3], os.path.join(new_T2_path, 'RTSS.dcm'))
shutil.copy(file_paths[4], os.path.join(new_T1_path, 'RTPLAN.dcm'))
shutil.copy(file_paths[5], os.path.join(new_T2_path, 'RTPLAN.dcm'))
shutil.copy(file_paths[6], os.path.join(new_T1_path, 'RTDOSE.dcm'))
shutil.copy(file_paths[7], os.path.join(new_T2_path, 'RTDOSE.dcm'))
print("Complete")
| 33.642857
| 195
| 0.637473
|
e01b867ecad0758ba56f6a7671654e5318fd07f5
| 754
|
py
|
Python
|
apps/inventory/admin.py
|
lsdlab/djshop_toturial
|
6d450225cc05e6a1ecd161de2b522e1af0b68cc0
|
[
"MIT"
] | null | null | null |
apps/inventory/admin.py
|
lsdlab/djshop_toturial
|
6d450225cc05e6a1ecd161de2b522e1af0b68cc0
|
[
"MIT"
] | 6
|
2020-06-07T15:18:58.000Z
|
2021-09-22T19:07:33.000Z
|
apps/inventory/admin.py
|
lsdlab/djshop_toturial
|
6d450225cc05e6a1ecd161de2b522e1af0b68cc0
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Stock, ReplenishLog
@admin.register(Stock)
class StockAdmin(admin.ModelAdmin):
list_display = (
'id',
'name',
'desc',
'nums',
'merchant',
'created_at',
'updated_at',
)
list_filter = ('created_at', 'updated_at')
search_fields = ('name',)
date_hierarchy = 'created_at'
@admin.register(ReplenishLog)
class ReplenishLogAdmin(admin.ModelAdmin):
list_display = (
'id',
'name',
'nums',
'note',
'user',
'merchant',
'created_at',
'updated_at',
)
list_filter = ('created_at', 'updated_at')
search_fields = ('name',)
date_hierarchy = 'created_at'
| 20.378378
| 46
| 0.566313
|
73b90773872f9e426fa66c8731a4cda42b3774fd
| 951
|
py
|
Python
|
GeeksforGeeks/Rotate by 90 degree.py
|
rayvantsahni/Competitive-Programming-Codes
|
39ba91b69ad8ce7dce554f7817c2f0d5545ef471
|
[
"MIT"
] | 1
|
2021-07-05T14:01:36.000Z
|
2021-07-05T14:01:36.000Z
|
GeeksforGeeks/Rotate by 90 degree.py
|
rayvantsahni/Competitive-Programming-and-Interview-Prep
|
39ba91b69ad8ce7dce554f7817c2f0d5545ef471
|
[
"MIT"
] | null | null | null |
GeeksforGeeks/Rotate by 90 degree.py
|
rayvantsahni/Competitive-Programming-and-Interview-Prep
|
39ba91b69ad8ce7dce554f7817c2f0d5545ef471
|
[
"MIT"
] | null | null | null |
#User function Template for python3
class Solution:
#Function to rotate matrix anticlockwise by 90 degrees.
def rotateby90(self,a, n):
# code here
for row in a:
row.reverse()
for i in range(n):
for j in range(i, n):
a[j][i], a[i][j] = a[i][j], a[j][i]
#{
# Driver Code Starts
#Initial Template for Python 3
if __name__ == '__main__':
t = int (input ())
for _ in range (t):
n = int(input())
matrix = [[0 for j in range(n)] for i in range(n)]
line1 = [int(x) for x in input().strip().split()]
k=0
for i in range(n):
for j in range (n):
matrix[i][j]=line1[k]
k+=1
obj = Solution()
obj.rotateby90(matrix,n)
for i in range(n):
for j in range(n):
print(matrix[i][j],end=" ")
print()
# } Driver Code Ends
| 23.775
| 59
| 0.477392
|
8ac3ef2bc842bbda05dd93214a82e64e39fd52bc
| 468
|
py
|
Python
|
resolwe/flow/migrations/0026_data_scheduled.py
|
zagm/resolwe
|
da371a3ec0260a45ccab848704c6a339a0de79cc
|
[
"Apache-2.0"
] | null | null | null |
resolwe/flow/migrations/0026_data_scheduled.py
|
zagm/resolwe
|
da371a3ec0260a45ccab848704c6a339a0de79cc
|
[
"Apache-2.0"
] | null | null | null |
resolwe/flow/migrations/0026_data_scheduled.py
|
zagm/resolwe
|
da371a3ec0260a45ccab848704c6a339a0de79cc
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2019-02-07 02:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('flow', '0025_entity_type'),
]
operations = [
migrations.AddField(
model_name='data',
name='scheduled',
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
]
| 22.285714
| 77
| 0.617521
|
7ef499d250916f8848465134f10fae51f3b9c691
| 5,965
|
py
|
Python
|
ImageCaptioning/tools/hdf5_manager.py
|
GT-AcerZhang/MyImageCaptioningModel
|
83ccda0fb2b542d5c6693270247435f68a242629
|
[
"Apache-2.0"
] | 2
|
2020-09-09T16:33:47.000Z
|
2021-02-27T17:58:52.000Z
|
ImageCaptioning/tools/hdf5_manager.py
|
GT-AcerZhang/MyImageCaptioningModel
|
83ccda0fb2b542d5c6693270247435f68a242629
|
[
"Apache-2.0"
] | 1
|
2020-06-12T12:11:59.000Z
|
2020-06-12T12:11:59.000Z
|
ImageCaptioning/tools/hdf5_manager.py
|
GT-AcerZhang/MyImageCaptioningModel
|
83ccda0fb2b542d5c6693270247435f68a242629
|
[
"Apache-2.0"
] | 1
|
2021-03-05T11:07:17.000Z
|
2021-03-05T11:07:17.000Z
|
import json
import numpy as np
import os
import re
import h5py
import config
_split_file_pattern = re.compile(r'.*?\.hdf5_[0-9]+$')
_db_name_filter = re.compile(r'(.*?)\.hdf5[_0-9]*$')
_db_index_filter = re.compile(r'.*?\.hdf5_([0-9]+)$')
_use_float16 = True
class Hdf5Manager:
def __init__(self):
self._db_files = []
self._db_lens = []
self._name2idx = None
def load_name2idx(self, file_path):
with open(file_path, 'r', encoding='utf-8') as f:
self._name2idx = json.load(f)
def load_database(self, db_path):
"""载入数据集
:param db_path: 数据集的目录(str)
"""
self.close()
if not isinstance(db_path, list):
db_path = [db_path]
dbs = []
for dbp in db_path:
files = os.listdir(dbp)
db = [name for name in files if _split_file_pattern.match(name) is not None]
if len(db) == 0:
raise Exception('{} 下未找到数据集'.format(db_path))
assert_name = _db_name_filter.findall(db[0])[0]
is_names_equal = map(lambda x: _db_name_filter.findall(x)[0] == assert_name, db)
if not all(is_names_equal):
raise Exception('{} 目录下存在多个数据集'.format(db_path))
dbs.extend(map(lambda x: os.path.join(dbp, x), db))
if len(dbs) > 1:
dbs.sort(key=lambda x: int(_db_index_filter.findall(x)[0]))
print('读入数据集:\n{}'.format('\n'.join(dbs)))
for path in dbs:
hdf5_file = h5py.File(path, mode='r')
self._db_files.append(hdf5_file)
self._db_lens.append(hdf5_file['data'].shape[0])
for i in range(1, len(self._db_lens)):
self._db_lens[i] += self._db_lens[i-1]
def _read(self, index):
for idx, file in enumerate(self._db_files):
if self._db_lens[idx] <= index:
continue
if idx != 0:
index -= self._db_lens[idx - 1]
return file['data'][index]
def read(self, name):
idx = self._name2idx[name]
return self._read(idx)
def close(self):
for f in self._db_files:
f.close()
self._db_files.clear()
self._db_lens.clear()
def __del__(self):
self.close()
class DbBuilder:
def __init__(self, output_path, name, shape, max_size, db_length, dtype='float32'):
"""为3维数据创建数据集
:param max_size: 单个文件最多放多少张图片
:param db_length: 数据集总大小
"""
if isinstance(shape, list):
shape = tuple(shape)
# assert len(shape) == 3
self.output_path = output_path
self.name = name
self.shape = shape
self.max_length = max_size
self.db_length = db_length
self.dtype = dtype
self.file_index = 0
self.ptr_index = 0
self.cur_file = None
self.cur_db = None
def __enter__(self):
if self.max_length >= self.db_length:
p = os.path.join(self.output_path, self.name + '.hdf5')
else:
p = os.path.join(self.output_path, self.name + '.hdf5_{}'.format(self.file_index))
self.file_index += 1
self._create_new_file(p, min(self.max_length, self.db_length))
def _create_new_file(self, path, length):
if self.cur_file is not None:
self.cur_file.close()
self.cur_file = h5py.File(path, 'w')
# gzip压缩等级并不会影响解压速度!
self.cur_db = self.cur_file.create_dataset(name='data',
shape=[length] + list(self.shape),
dtype=self.dtype,
chunks=tuple([1] + list(self.shape)),
compression='gzip',
compression_opts=config.build_dataset['compression_opts'])
def append(self, data):
assert np.shape(data) == self.shape
if self.ptr_index >= self.max_length:
p = os.path.join(self.output_path, self.name + '.hdf5_{}'.format(self.file_index))
self.file_index += 1
self.ptr_index = 0
self.db_length -= self.max_length
self._create_new_file(p, min(self.max_length, self.db_length))
self.cur_db[self.ptr_index] = data
self.ptr_index += 1
def __exit__(self, exc_type, exc_val, exc_tb):
if self.cur_file is not None:
self.cur_file.close()
def gen_hdf5():
import time
begin_time = time.time()
import reader
image_paths = config.build_dataset['ImagePaths']
output_path = config.build_dataset['OutputPath']
images = []
name2idx = {}
for image_path in image_paths:
names = os.listdir(image_path)
print(image_path, len(names))
names = set([name for name in names if name.endswith('.jpg')])
names = [(name, os.path.join(image_path, name)) for name in names]
images = images + names
builder = DbBuilder(output_path, 'aic_flk', shape=(3, 224, 224), max_size=30000, db_length=len(images),
dtype='float16' if _use_float16 else 'float32')
with builder:
for idx, (name, p) in enumerate(images):
name2idx[name] = idx
feat = reader.process_image(reader.read_image(p))
if _use_float16:
feat = feat.astype('float16')
builder.append(feat)
if idx % 10000 == 0:
print("生成hdf5文件 {}/{}".format(idx, len(images)))
json.dump(name2idx, open(os.path.join(output_path, 'name2idx.json'), 'w'))
end_time = time.time()
print("运行时间: {}s".format(end_time - begin_time))
if __name__ == '__main__':
gen_hdf5()
| 34.883041
| 110
| 0.543671
|
cc7aced628ea3b546a731726256ec6e038870717
| 5,680
|
py
|
Python
|
nova/tests/unit/test_test.py
|
nicholaskuechler/nova
|
ff412c3888b234eb123161cc4e6d0d0d69c0004e
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/test_test.py
|
nicholaskuechler/nova
|
ff412c3888b234eb123161cc4e6d0d0d69c0004e
|
[
"Apache-2.0"
] | 5
|
2016-07-11T20:59:47.000Z
|
2020-07-28T09:56:35.000Z
|
nova/tests/unit/test_test.py
|
nicholaskuechler/nova
|
ff412c3888b234eb123161cc4e6d0d0d69c0004e
|
[
"Apache-2.0"
] | 3
|
2018-01-29T00:44:44.000Z
|
2020-07-24T01:19:20.000Z
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for the testing base code."""
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
import six
from nova import rpc
from nova import test
from nova.tests import fixtures
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_opt('use_local', 'nova.conductor.api', group='conductor')
class IsolationTestCase(test.TestCase):
"""Ensure that things are cleaned up after failed tests.
These tests don't really do much here, but if isolation fails a bunch
of other tests should fail.
"""
def test_service_isolation(self):
self.flags(use_local=True, group='conductor')
self.useFixture(fixtures.ServiceFixture('compute'))
def test_rpc_consumer_isolation(self):
class NeverCalled(object):
def __getattribute__(*args):
assert False, "I should never get called."
server = rpc.get_server(messaging.Target(topic='compute',
server=CONF.host),
endpoints=[NeverCalled()])
server.start()
class JsonTestCase(test.TestCase):
def test_json_equal(self):
expected = {
"employees": [
{"firstName": "Anna", "lastName": "Smith"},
{"firstName": "John", "lastName": "Doe"},
{"firstName": "Peter", "lastName": "Jones"}
],
"locations": set(['Boston', 'Mumbai', 'Beijing', 'Perth'])
}
observed = """{
"employees": [
{
"lastName": "Doe",
"firstName": "John"
},
{
"lastName": "Smith",
"firstName": "Anna"
},
{
"lastName": "Jones",
"firstName": "Peter"
}
],
"locations": [
"Perth",
"Boston",
"Mumbai",
"Beijing"
]
}"""
self.assertJsonEqual(expected, observed)
def test_json_equal_fail_on_length(self):
expected = {
'top': {
'l1': {
'l2': ['a', 'b', 'c']
}
}
}
observed = {
'top': {
'l1': {
'l2': ['c', 'a', 'b', 'd']
}
}
}
try:
self.assertJsonEqual(expected, observed)
except Exception as e:
# error reported is going to be a cryptic length failure
# on the level2 structure.
self.assertEqual(e.mismatch.describe(), "3 != 4")
self.assertIn(
"Matchee: {'top': {'l1': {'l2': ['c', 'a', 'b', 'd']}}}",
six.text_type(e))
self.assertIn(
"Matcher: {'top': {'l1': {'l2': ['a', 'b', 'c']}}}",
six.text_type(e))
else:
self.fail("This should have raised a mismatch exception")
def test_json_equal_fail_on_inner(self):
expected = {
'top': {
'l1': {
'l2': ['a', 'b', 'c']
}
}
}
observed = {
'top': {
'l1': {
'l2': ['c', 'a', 'd']
}
}
}
try:
self.assertJsonEqual(expected, observed)
except Exception as e:
# error reported is going to be a cryptic length failure
# on the level2 structure.
self.assertEqual(e.mismatch.describe(), "'b' != 'c'")
self.assertIn(
"Matchee: {'top': {'l1': {'l2': ['c', 'a', 'd']}}}",
six.text_type(e))
self.assertIn(
"Matcher: {'top': {'l1': {'l2': ['a', 'b', 'c']}}}",
six.text_type(e))
else:
self.fail("This should have raised a mismatch exception")
class BadLogTestCase(test.TestCase):
"""Make sure a mis-formatted debug log will get caught."""
def test_bad_debug_log(self):
self.assertRaises(KeyError,
LOG.debug, "this is a misformated %(log)s", {'nothing': 'nothing'})
class MatchTypeTestCase(test.TestCase):
def test_match_type_simple(self):
matcher = test.MatchType(dict)
self.assertEqual(matcher, {})
self.assertEqual(matcher, {"hello": "world"})
self.assertEqual(matcher, {"hello": ["world"]})
self.assertNotEqual(matcher, [])
self.assertNotEqual(matcher, [{"hello": "world"}])
self.assertNotEqual(matcher, 123)
self.assertNotEqual(matcher, "foo")
def test_match_type_object(self):
class Hello(object):
pass
class World(object):
pass
matcher = test.MatchType(Hello)
self.assertEqual(matcher, Hello())
self.assertNotEqual(matcher, World())
self.assertNotEqual(matcher, 123)
self.assertNotEqual(matcher, "foo")
| 30.537634
| 79
| 0.530458
|
1096c470d6778d02afbd59de1d2fa9b6aca73749
| 12,119
|
py
|
Python
|
train.py
|
izzykayu/fairseq-py
|
7e86e30cc5b97db30403b738de2dbbb55f06a92b
|
[
"BSD-3-Clause"
] | 3
|
2018-03-11T23:01:50.000Z
|
2021-07-17T05:58:03.000Z
|
train.py
|
sliedes/fairseq-py
|
b7993be19ce87f1d6e35aaa3d42aeceab04d6f94
|
[
"BSD-3-Clause"
] | null | null | null |
train.py
|
sliedes/fairseq-py
|
b7993be19ce87f1d6e35aaa3d42aeceab04d6f94
|
[
"BSD-3-Clause"
] | 1
|
2021-04-06T22:56:56.000Z
|
2021-04-06T22:56:56.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
#
import collections
import os
import torch
import math
from fairseq import data, options, utils
from fairseq.meters import AverageMeter, StopwatchMeter, TimeMeter
from fairseq.multiprocessing_trainer import MultiprocessingTrainer
def main():
parser = options.get_parser('Trainer')
dataset_args = options.add_dataset_args(parser)
dataset_args.add_argument('--max-tokens', default=6000, type=int, metavar='N',
help='maximum number of tokens in a batch')
dataset_args.add_argument('--max-sentences', type=int, metavar='N',
help='maximum number of sentences in a batch')
dataset_args.add_argument('--train-subset', default='train', metavar='SPLIT',
choices=['train', 'valid', 'test'],
help='data subset to use for training (train, valid, test)')
dataset_args.add_argument('--valid-subset', default='valid', metavar='SPLIT',
help='comma separated list of data subsets '
' to use for validation (train, valid, valid1,test, test1)')
dataset_args.add_argument('--max-sentences-valid', type=int, metavar='N',
help='maximum number of sentences in a validation batch')
options.add_optimization_args(parser)
options.add_checkpoint_args(parser)
options.add_model_args(parser)
args = utils.parse_args_and_arch(parser)
if args.no_progress_bar and args.log_format is None:
args.log_format = 'simple'
if args.max_sentences_valid is None:
args.max_sentences_valid = args.max_sentences
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
torch.manual_seed(args.seed)
# Load dataset
splits = ['train', 'valid']
if data.has_binary_files(args.data, splits):
dataset = data.load_dataset(args.data, splits, args.source_lang, args.target_lang)
else:
dataset = data.load_raw_text_dataset(args.data, splits, args.source_lang, args.target_lang)
if args.source_lang is None or args.target_lang is None:
# record inferred languages in args, so that it's saved in checkpoints
args.source_lang, args.target_lang = dataset.src, dataset.dst
if not torch.cuda.is_available():
raise NotImplementedError('Training on CPU is not supported')
args.num_gpus = torch.cuda.device_count()
print(args)
print('| [{}] dictionary: {} types'.format(dataset.src, len(dataset.src_dict)))
print('| [{}] dictionary: {} types'.format(dataset.dst, len(dataset.dst_dict)))
for split in splits:
print('| {} {} {} examples'.format(args.data, split, len(dataset.splits[split])))
print('| using {} GPUs (with max tokens per GPU = {} and max sentences per GPU = {})'.format(
args.num_gpus, args.max_tokens, args.max_sentences))
# Build model and criterion
model = utils.build_model(args, dataset.src_dict, dataset.dst_dict)
criterion = utils.build_criterion(args, dataset.src_dict, dataset.dst_dict)
print('| model {}, criterion {}'.format(args.arch, criterion.__class__.__name__))
print('| num. model params: {}'.format(sum(p.data.numel() for p in model.parameters())))
# The max number of positions can be different for train and valid
# e.g., RNNs may support more positions at test time than seen in training
max_positions_train = (
min(args.max_source_positions, model.max_encoder_positions()),
min(args.max_target_positions, model.max_decoder_positions())
)
max_positions_valid = (model.max_encoder_positions(), model.max_decoder_positions())
# Start multiprocessing
trainer = MultiprocessingTrainer(args, model, criterion)
# Load the latest checkpoint if one is available
checkpoint_path = os.path.join(args.save_dir, args.restore_file)
extra_state = trainer.load_checkpoint(checkpoint_path)
if extra_state is not None:
epoch = extra_state['epoch']
batch_offset = extra_state['batch_offset']
print('| loaded checkpoint {} (epoch {})'.format(checkpoint_path, epoch))
if batch_offset == 0:
epoch += 1
else:
epoch, batch_offset = 1, 0
# Train until the learning rate gets too small
val_loss = None
max_epoch = args.max_epoch or math.inf
lr = trainer.get_lr()
train_meter = StopwatchMeter()
train_meter.start()
while lr > args.min_lr and epoch <= max_epoch:
# train for one epoch
train(args, epoch, batch_offset, trainer, dataset, max_positions_train)
# evaluate on validate set
for k, subset in enumerate(args.valid_subset.split(',')):
val_loss = validate(args, epoch, trainer, dataset, max_positions_valid, subset)
if k == 0:
if not args.no_save:
# save checkpoint
save_checkpoint(trainer, args, epoch, 0, val_loss)
# only use first validation loss to update the learning schedule
lr = trainer.lr_step(val_loss, epoch)
epoch += 1
batch_offset = 0
train_meter.stop()
print('| done training in {:.1f} seconds'.format(train_meter.sum))
# Stop multiprocessing
trainer.stop()
def get_perplexity(loss):
try:
return round(math.pow(2, loss), 2)
except OverflowError:
return float('inf')
def train(args, epoch, batch_offset, trainer, dataset, max_positions):
"""Train the model for one epoch."""
seed = args.seed + epoch
torch.manual_seed(seed)
trainer.set_seed(seed)
itr = dataset.train_dataloader(
args.train_subset, num_workers=args.workers,
max_tokens=args.max_tokens, max_sentences=args.max_sentences,
max_positions=max_positions, seed=seed, epoch=epoch,
sample_without_replacement=args.sample_without_replacement,
sort_by_source_size=(epoch <= args.curriculum))
loss_meter = AverageMeter()
nll_loss_meter = AverageMeter()
bsz_meter = AverageMeter() # sentences per batch
wpb_meter = AverageMeter() # words per batch
wps_meter = TimeMeter() # words per second
clip_meter = AverageMeter() # % of updates clipped
extra_meters = collections.defaultdict(lambda: AverageMeter())
lr = trainer.get_lr()
with utils.build_progress_bar(args, itr, epoch) as t:
for i, sample in data.skip_group_enumerator(t, args.num_gpus, batch_offset):
loss_dict = trainer.train_step(sample)
loss = loss_dict['loss']
del loss_dict['loss'] # don't include in extra_meters or extra_postfix
ntokens = sum(s['ntokens'] for s in sample)
if 'nll_loss' in loss_dict:
nll_loss = loss_dict['nll_loss']
nll_loss_meter.update(nll_loss, ntokens)
nsentences = sum(s['net_input']['src_tokens'].size(0) for s in sample)
loss_meter.update(loss, nsentences if args.sentence_avg else ntokens)
bsz_meter.update(nsentences)
wpb_meter.update(ntokens)
wps_meter.update(ntokens)
clip_meter.update(1 if loss_dict['gnorm'] > args.clip_norm else 0)
extra_postfix = []
for k, v in loss_dict.items():
extra_meters[k].update(v)
extra_postfix.append((k, extra_meters[k].avg))
t.log(collections.OrderedDict([
('loss', loss_meter),
('wps', round(wps_meter.avg)),
('wpb', round(wpb_meter.avg)),
('bsz', round(bsz_meter.avg)),
('lr', lr),
('clip', '{:.0%}'.format(clip_meter.avg)),
] + extra_postfix))
if i == 0:
# ignore the first mini-batch in words-per-second calculation
wps_meter.reset()
if args.save_interval > 0 and (i + 1) % args.save_interval == 0:
save_checkpoint(trainer, args, epoch, i + 1)
t.print(collections.OrderedDict([
('train loss', round(loss_meter.avg, 2)),
('train ppl', get_perplexity(nll_loss_meter.avg
if nll_loss_meter.count > 0
else loss_meter.avg)),
('s/checkpoint', round(wps_meter.elapsed_time)),
('words/s', round(wps_meter.avg)),
('words/batch', round(wpb_meter.avg)),
('bsz', round(bsz_meter.avg)),
('lr', lr),
('clip', '{:3.0f}%'.format(clip_meter.avg * 100)),
] + [
(k, meter.avg)
for k, meter in extra_meters.items()
]))
def save_checkpoint(trainer, args, epoch, batch_offset, val_loss):
extra_state = {
'epoch': epoch,
'batch_offset': batch_offset,
'val_loss': val_loss,
}
if batch_offset == 0:
if not args.no_epoch_checkpoints:
epoch_filename = os.path.join(args.save_dir, 'checkpoint{}.pt'.format(epoch))
trainer.save_checkpoint(epoch_filename, extra_state)
assert val_loss is not None
if not hasattr(save_checkpoint, 'best') or val_loss < save_checkpoint.best:
save_checkpoint.best = val_loss
best_filename = os.path.join(args.save_dir, 'checkpoint_best.pt')
trainer.save_checkpoint(best_filename, extra_state)
elif not args.no_epoch_checkpoints:
epoch_filename = os.path.join(
args.save_dir, 'checkpoint{}_{}.pt'.format(epoch, batch_offset))
trainer.save_checkpoint(epoch_filename, extra_state)
last_filename = os.path.join(args.save_dir, 'checkpoint_last.pt')
trainer.save_checkpoint(last_filename, extra_state)
def validate(args, epoch, trainer, dataset, max_positions, subset):
"""Evaluate the model on the validation set and return the average loss."""
itr = dataset.eval_dataloader(
subset, max_tokens=args.max_tokens, max_sentences=args.max_sentences_valid,
max_positions=max_positions,
skip_invalid_size_inputs_valid_test=args.skip_invalid_size_inputs_valid_test,
descending=True, # largest batch first to warm the caching allocator
)
loss_meter = AverageMeter()
nll_loss_meter = AverageMeter()
extra_meters = collections.defaultdict(lambda: AverageMeter())
prefix = 'valid on \'{}\' subset'.format(subset)
with utils.build_progress_bar(args, itr, epoch, prefix) as t:
for _, sample in data.skip_group_enumerator(t, args.num_gpus):
loss_dict = trainer.valid_step(sample)
ntokens = sum(s['ntokens'] for s in sample)
loss = loss_dict['loss']
del loss_dict['loss'] # don't include in extra_meters or extra_postfix
if 'nll_loss' in loss_dict:
nll_loss = loss_dict['nll_loss']
nll_loss_meter.update(nll_loss, ntokens)
loss_meter.update(loss, ntokens)
extra_postfix = []
for k, v in loss_dict.items():
extra_meters[k].update(v)
extra_postfix.append((k, extra_meters[k].avg))
t.log(collections.OrderedDict([
('valid loss', round(loss_meter.avg, 2)),
] + extra_postfix))
t.print(collections.OrderedDict([
('valid loss', round(loss_meter.avg, 2)),
('valid ppl', get_perplexity(nll_loss_meter.avg
if nll_loss_meter.count > 0
else loss_meter.avg)),
] + [
(k, meter.avg)
for k, meter in extra_meters.items()
]))
# update and return the learning rate
return loss_meter.avg
if __name__ == '__main__':
main()
| 41.081356
| 99
| 0.63124
|
f3521936a16f8972d4aa3c9a46ed4b4d79d9e902
| 7,103
|
py
|
Python
|
predict.py
|
Artamus/fpointnet-tiny
|
61d210193c58ccfd9db5a3af99f12bf2b7093783
|
[
"MIT"
] | 1
|
2021-02-19T14:44:04.000Z
|
2021-02-19T14:44:04.000Z
|
predict.py
|
Artamus/fpointnet-tiny
|
61d210193c58ccfd9db5a3af99f12bf2b7093783
|
[
"MIT"
] | null | null | null |
predict.py
|
Artamus/fpointnet-tiny
|
61d210193c58ccfd9db5a3af99f12bf2b7093783
|
[
"MIT"
] | 1
|
2019-09-17T17:03:32.000Z
|
2019-09-17T17:03:32.000Z
|
import os
import argparse
import preprocessing
import time
import numpy as np
import tensorflow as tf
from fpointnet_tiny_functional import get_compiled_model
from scipy import stats
def read_raw_data(data_path, allowed_class, sample_limit=None):
data_filenames = sorted(os.listdir(data_path))
data_filenames = [filename for filename in data_filenames if filename.endswith('.npz')]
frustums_data = list()
kept_frustums = list()
num_samples = 0
for filename in data_filenames:
file_path = os.path.join(data_path, filename)
with np.load(file_path) as data:
class_name = data['class_name']
point_data = data['points']
if class_name != allowed_class:
continue
frustums_data.append(point_data)
kept_frustums.append(filename)
num_samples += 1
if sample_limit and num_samples >= sample_limit:
break
return frustums_data, kept_frustums
def sample_points(labelled_points, num_points, sample_at_least_once=False):
scene_points = np.array(labelled_points)
if sample_at_least_once:
if len(scene_points) > num_points:
mask = np.random.choice(len(scene_points), num_points, replace=False)
elif len(scene_points) == num_points:
mask = np.arange(len(scene_points))
np.random.shuffle(mask)
else:
mask = np.zeros(shape=num_points, dtype=np.int32)
mask[:len(scene_points)] = np.arange(len(scene_points), dtype=np.int32)
mask[len(scene_points):] = np.random.choice(len(scene_points), num_points - len(scene_points), replace=True)
np.random.shuffle(mask)
else:
mask = np.random.choice(len(scene_points), num_points, replace=True)
sampled_labelled_points = scene_points[mask]
return sampled_labelled_points, mask
def structure_data(scenes_labelled_points, num_points):
points = np.zeros(shape=(len(scenes_labelled_points), num_points, 3))
labels = np.zeros(shape=(len(scenes_labelled_points), num_points))
masks = np.zeros(shape=(len(scenes_labelled_points), num_points))
for i, labelled_points in enumerate(scenes_labelled_points):
sampled_labelled_points, mask = sample_points(labelled_points, num_points, True)
points[i] = sampled_labelled_points[:, :3]
labels[i] = sampled_labelled_points[:, 3]
masks[i] = mask
points = np.expand_dims(points, axis=2)
return points, labels, masks
def all_samples_softmax(x):
x_exp = np.exp(x)
probabilities = x_exp / x_exp.sum(axis=2)[:, :, None]
return np.argmax(probabilities, axis=2)
def match_predictions_points(frustums, predicted_labels, masks):
predicted_frustums = list()
for points, predictions, mask in zip(frustums, predicted_labels, masks):
points = np.array(points)
for point_index in range(len(points)):
points_matching_original = np.where(mask == point_index)[0]
if len(points_matching_original) == 0:
mode_label = 0
else:
mode_label = stats.mode(predictions[points_matching_original]).mode[0]
points[point_index, 3] = float(mode_label)
predicted_frustums.append(points)
return predicted_frustums
def save_predictions(output_dir, filenames, frustum_data):
for filename, data in zip(filenames, frustum_data):
output_file_path = os.path.join(output_dir, filename)
np.savez(output_file_path, points=data)
def calculate_accuracy(predictions, values):
return (predictions == values).mean()
def calculate_true_accuracy(predictions, values):
assert len(predictions) == len(values), 'Predictions and ground truth don\'t have the same length'
counts = np.zeros(shape=(len(predictions), 2))
for index in range(len(predictions)):
counts[index] = [(predictions[index][:, 3] == values[index][:, 3]).sum(), len(predictions[index])]
total_counts = counts.sum(axis=0)
return 1.0 * total_counts[0] / total_counts[1]
def get_arguments():
parser = argparse.ArgumentParser(description='The program to predict from validation data.')
parser.add_argument(
'input', type=str,
help='Path to directory containing data to perform predictions on (XYZ points with label per point saved in the .npz format)'
)
parser.add_argument(
'output', type=str,
help='Directory to save output to, will be created if it does not exist'
)
parser.add_argument(
'--model', type=str, required=True,
help='Path to the model file or directory containing models (in case of >1 models they will be sorted alphabetically and last will be used)'
)
parser.add_argument(
'-np', '--num_points', type=int, default=512,
help='Number of points to sample from each frustum'
)
parser.add_argument(
'--class_name', default='person',
choices=['person', 'car'],
help='Class to use from the KITTI dataset'
)
parser.add_argument(
'--eval', action='store_true', default=False,
help='Perform evaluation of the predictions'
)
return parser.parse_args()
if __name__ == '__main__':
args = get_arguments()
input_dir = args.input
if not input_dir or not os.path.isdir(input_dir):
exit('Invalid input directory')
output_dir = args.output
os.makedirs(output_dir, exist_ok=True)
model_path = args.model
if os.path.isdir(model_path):
files = os.listdir(model_path)
files = sorted([filename for filename in files if filename.endswith('.h5') or filename.endswith('.hdf5')])
model_path = os.path.join(model_path, files[-1])
num_points = args.num_points
allowed_class = args.class_name
frustums_data, filenames = read_raw_data(input_dir, allowed_class)
processed_frustums_data = list()
for frustum in frustums_data:
processed_frustum = preprocessing.rotate_to_center(frustum)
processed_frustum = preprocessing.scale_standard(processed_frustum)
processed_frustums_data.append(processed_frustum)
data_x, data_y, masks = structure_data(processed_frustums_data, num_points)
model = get_compiled_model(num_points, 3e-4) # learning rate is just for reusing the model code
model.load_weights(model_path)
start_time = time.perf_counter()
prediction_logits = model.predict(data_x)
end_time = time.perf_counter()
print(f'Inference took {end_time - start_time} s')
predictions = all_samples_softmax(prediction_logits)
frustums_with_predicted_labels = match_predictions_points(frustums_data, predictions, masks)
save_predictions(output_dir, filenames, frustums_with_predicted_labels)
if not args.eval:
exit()
accuracy = calculate_accuracy(predictions, data_y)
print(f'Accuracy on structured points is {accuracy:.3f}')
true_accuracy = calculate_true_accuracy(frustums_with_predicted_labels, frustums_data)
print(f'Accuracy on raw points is {true_accuracy:.3f}')
| 32.884259
| 148
| 0.693651
|
04c17b1bdfc1d55d6054ac102d3abcb953c411e3
| 503
|
py
|
Python
|
Day1/hellopython.py
|
azeemchaudhrry/30DaysofPython
|
8aa80c81967d87e4bc70254a41517d0303ca0599
|
[
"MIT"
] | null | null | null |
Day1/hellopython.py
|
azeemchaudhrry/30DaysofPython
|
8aa80c81967d87e4bc70254a41517d0303ca0599
|
[
"MIT"
] | null | null | null |
Day1/hellopython.py
|
azeemchaudhrry/30DaysofPython
|
8aa80c81967d87e4bc70254a41517d0303ca0599
|
[
"MIT"
] | null | null | null |
# 30 days of python
# Day 1
print(2 + 3)
print(3 - 2)
print(-2 - 3)
print(2 * 3)
print(3 / 2)
print(3 % 2)
print(3 // 2)
print(3 ** 2)
print('Hafiz Muhammad')
print('Azeem')
print('Pakistan -> United Arab Emirates -> oo')
print('30 days python')
print(type(10))
print(type(9.8))
print(type(3.14))
print(type(4-4j))
print(type(['Azeem','Python','Dubai']))
print(type({0:'Zero', 1:'One', 2:'Two'}))
print(type((9.8,3.14,2.7)))
print(type('Hafiz Muhammad'))
print(type('Azeem'))
print(type('Pakistan'))
| 17.964286
| 47
| 0.624254
|
68c5f295645fea5bdbb143d324ce91fc6e9101b7
| 8,160
|
py
|
Python
|
caffe2/python/parallel_workers.py
|
KevinKecc/caffe2
|
a2b6c6e2f0686358a84277df65e9489fb7d9ddb2
|
[
"Apache-2.0"
] | 58
|
2019-01-03T02:20:41.000Z
|
2022-02-25T14:24:13.000Z
|
caffe2/python/parallel_workers.py
|
KevinKecc/caffe2
|
a2b6c6e2f0686358a84277df65e9489fb7d9ddb2
|
[
"Apache-2.0"
] | 27
|
2018-04-14T06:44:22.000Z
|
2018-08-01T18:02:39.000Z
|
caffe2/python/parallel_workers.py
|
KevinKecc/caffe2
|
a2b6c6e2f0686358a84277df65e9489fb7d9ddb2
|
[
"Apache-2.0"
] | 23
|
2018-04-13T10:47:31.000Z
|
2021-05-06T08:38:06.000Z
|
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
# @package parallel_workers
# Module caffe2.python.parallel_workers
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
'''
This module provides a python-land multithreaded mechanism for executing work.
Basic usage is as follows:
coordinator = parallel_workers.init_workers(
my_worker_fun,
worker_name="train"
)
...
coordinator.start()
First argument is the function to run in a loop on potentially multiple threads.
It has the call signature
worker_fun(worker_id)
Argument 'worker_name' is used to distinguish different workers,
such as workers processing train data or workers processing test data.
Optionally, one can define an "init function" that is called once before
threads start, and has call signature:
my_init_fun(worker_coordinator, global_coordinator)
Note that for data_parallel_models, init_workers will be called
for each GPU. Note that the 'coordinator' returned by the function is same
each time.
'''
import logging
import threading
import atexit
import time
import collections
import six
import traceback
from abc import ABCMeta, abstractmethod
log = logging.getLogger("parallel_workers")
log.setLevel(logging.INFO)
LOG_INT_SECS = 60
def init_workers(
worker_fun,
num_worker_threads=2,
worker_name="train",
init_fun=None,
external_loggers=None,
shutdown_fun=None,
):
global global_coordinator
metrics = Metrics(external_loggers)
# Create coordinator object
coordinator = WorkerCoordinator(
worker_name, init_fun, shutdown_fun=shutdown_fun)
# Launch fetch worker threads
worker_ids = [
global_coordinator.get_new_worker_id()
for i in range(num_worker_threads)
]
workers = [
threading.Thread(
target=run_worker,
name="parallel_workers worker id {}".format(worker_id),
args=[coordinator,
Worker(coordinator, worker_id, worker_fun, metrics)],
) for worker_id in worker_ids
]
coordinator._workers = workers
global_coordinator.add(coordinator)
return global_coordinator
class Metrics(object):
def __init__(self, external_loggers):
self._metrics = collections.defaultdict(lambda: 0)
self._external_loggers = external_loggers
def reset_metrics(self):
self._metrics = collections.defaultdict(lambda: 0)
def log_metrics(self):
if not self._external_loggers:
return
for logger in self._external_loggers:
try:
logger.log(self._metrics)
except Exception as e:
print("Failed to call ExternalLogger: {}".format(e))
def put_metric(self, key, value, count=True):
self._metrics[key] += value
if count:
count_key = '{}_count'.format(key)
self._metrics[count_key] += 1
class State():
six.add_metaclass(ABCMeta)
@abstractmethod
def start(self):
pass
@abstractmethod
def stop(self):
pass
@abstractmethod
def cleanup(self):
pass
class WorkerCoordinator(object):
def __init__(self, worker_name, init_fun, state=None, shutdown_fun=None):
self._active = True
self._started = False
self._workers = []
self._worker_name = worker_name
self._init_fun = init_fun
self._state = state
self._shutdown_fun = shutdown_fun
def is_active(self):
return self._active
def init(self, global_coordinator):
if self._init_fun and not self._started:
self._init_fun(self, global_coordinator)
def _start(self):
if self._started:
return
self._active = True
self._started = True
if self._state:
self._state.start()
for w in self._workers:
w.daemon = True
w.start()
def _stop(self, reason=None):
self._active = False
if reason is not None:
log.error("Data input failed due to an error: {}".format(reason))
if self._shutdown_fun and self._started:
self._shutdown_fun()
if self._state:
self._state.stop()
self._started = False
def _wait_finish(self, cleanup=None):
print("Wait for workers to die: {}".format(self._worker_name))
for w in self._workers:
if w != threading.current_thread():
w.join(5.0) # don't wait forever, thread may be blocked in i/o
success = True
for w in self._workers:
if w.isAlive():
print("Worker {} failed to close while waiting".format(w))
success = False
# Release memory for the scratch blobs
if success and self._state:
self._state.cleanup()
print("All workers terminated: {}".format(success))
return success
class GlobalWorkerCoordinator(object):
def __init__(self):
self._coordinators = []
self._fetcher_id_seq = 0
self._worker_ids = []
self.register_shutdown_handler()
def add(self, coordinator):
self._coordinators.append(coordinator)
def get_new_worker_id(self):
worker_id = self._fetcher_id_seq
self._worker_ids.append(worker_id)
self._fetcher_id_seq += 1
return worker_id
def get_worker_ids(self):
return self._worker_ids
def start(self):
for c in self._coordinators:
c.init(self)
c._start()
def stop(self):
all_success = True
for c in self._coordinators:
c._stop()
for c in self._coordinators:
success = c._wait_finish()
all_success = all_success and success
self._coordinators = []
return all_success
def stop_coordinator(self, worker_name):
'''
Stop a specific coordinator
'''
for c in self._coordinators:
if c._worker_name == worker_name:
c._stop()
c._wait_finish()
self._coordinators = [
c for c in self._coordinators
if c._worker_name != worker_name
]
def register_shutdown_handler(self):
def cleanup():
self.stop()
atexit.register(cleanup)
class Worker(object):
def __init__(
self,
coordinator,
worker_id,
worker_fun=None,
metrics=None
):
self._coordinator = coordinator
self._worker_id = worker_id
self._worker_fun = worker_fun
self._metrics = metrics
def start(self):
self._start_time = time.time()
def run(self):
self._worker_fun(self._worker_id)
def handle_exception(self, e):
traceback.print_exc()
logging.exception("Exception in worker", e)
self._coordinator._stop("Exception in worker {}: {}".format(
self._worker_id, e
))
def finish(self):
self._metrics.put_metric(
'worker_time', time.time() - self._start_time)
self._metrics.log_metrics()
global_coordinator = GlobalWorkerCoordinator()
def run_worker(coordinator, worker):
while coordinator.is_active():
worker.start()
try:
worker.run()
except Exception as e:
worker.handle_exception(e)
finally:
worker.finish()
| 27.29097
| 80
| 0.636152
|
d92249e728b743bce9764f30eab79800d5ad0fae
| 1,941
|
py
|
Python
|
main.py
|
yunzhe99/Hadoop_Scheduling
|
938afd0fa21f5c6a845e4f0d0a44a3b0355cd63b
|
[
"MIT"
] | null | null | null |
main.py
|
yunzhe99/Hadoop_Scheduling
|
938afd0fa21f5c6a845e4f0d0a44a3b0355cd63b
|
[
"MIT"
] | null | null | null |
main.py
|
yunzhe99/Hadoop_Scheduling
|
938afd0fa21f5c6a845e4f0d0a44a3b0355cd63b
|
[
"MIT"
] | null | null | null |
from env import ResourceScheduler
from sample_fun import sol_init, sample
from tools import maxtf_job
import numpy as np
from SA import SAoptimizer
import joblib
rs = ResourceScheduler(taskType=1, caseID=2)
b=rs.max_k
block_size=rs.dataSize
s_list=rs.Sc
if rs.taskType==2:
s_t=rs.St
alpha=rs.alpha
block_location=rs.location
core_location=rs.core_location
job_schedule, job_order = sol_init(rs.numJob, len(rs.core_location), rs.dataSize, max(rs.jobBlock))
def init_f():
schedule, order, x=sample(job_schedule, job_order, len(rs.core_location), rs.numJob, max(rs.jobBlock), 1)
return x
def randf(x_old, job_schedule=job_schedule, job_order=job_order):
schedule, order, x_new=sample(job_schedule, job_order, rs.hostCore[0], rs.numJob, max(rs.jobBlock), 1)
job_schedule, job_order = schedule, order
return x_new
if __name__ == '__main__':
sa = SAoptimizer()
#print(maxtf_job(init_f()))
# b=rs.max_k
# block_size=rs.dataSize
# s_list=rs.Sc
# if rs.taskType==2:
# s_t=rs.St
# alpha=rs.alpha
# block_location=rs.location
# core_location=rs.core_location
x_best, y_best = sa.optimize(f = maxtf_job, ybound=(0, np.inf), initf=init_f, randf=randf,
t=1000, alpha=0.98, stop=0.01, iterPerT=10, l=1)
print(y_best)
print(x_best)
# joblib.dump((x_best, y_best))
# x, y = sa.optimize(f = maxtf_job, ybound=(0, np.inf), initf=init_f, randf=randf,
# t=1000, alpha=0.98, stop=0.01, iterPerT=1, l=1)
# print(y)
for i in range(1):
# job_schedule, job_order = sol_init(rs.numJob, len(rs.core_location), rs.dataSize, max(rs.jobBlock))
x, y = sa.optimize(f = maxtf_job, ybound=(0, np.inf), initf=init_f, randf=randf,
t=1000, alpha=0.98, stop=0.01, iterPerT=10, l=1)
print(y)
if y < y_best:
y_best=y
x_best=x
# print()
# print(y_best)
# print(x_best)
| 28.544118
| 109
| 0.66306
|
311abc1fc7388b2f9cd5fc18f98816c45bf4c455
| 1,120
|
py
|
Python
|
tests/iosxe/ogacl/configure/unconfigure_ipv6_object_group_service/test_api_unconfigure_ipv6_object_group_service.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | 1
|
2022-01-16T10:00:24.000Z
|
2022-01-16T10:00:24.000Z
|
tests/iosxe/ogacl/configure/unconfigure_ipv6_object_group_service/test_api_unconfigure_ipv6_object_group_service.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
tests/iosxe/ogacl/configure/unconfigure_ipv6_object_group_service/test_api_unconfigure_ipv6_object_group_service.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from pyats.topology import loader
from genie.libs.sdk.apis.iosxe.ogacl.configure import unconfigure_ipv6_object_group_service
class TestUnconfigureIpv6ObjectGroupService(unittest.TestCase):
@classmethod
def setUpClass(self):
testbed = """
devices:
Intrepid-DUT-1:
connections:
defaults:
class: unicon.Unicon
a:
command: mock_device_cli --os iosxe --mock_data_dir mock_data --state connect
protocol: unknown
os: iosxe
platform: C9600
type: C9600
"""
self.testbed = loader.load(testbed)
self.device = self.testbed.devices['Intrepid-DUT-1']
self.device.connect(
learn_hostname=True,
init_config_commands=[],
init_exec_commands=[]
)
def test_unconfigure_ipv6_object_group_service(self):
result = unconfigure_ipv6_object_group_service(device=self.device, og_name='v6-serv-all')
expected_output = None
self.assertEqual(result, expected_output)
| 32
| 97
| 0.625893
|
0838311e26678251e7dd30ffbd30606878e06a84
| 2,920
|
py
|
Python
|
solace/settings.py
|
burhan/solace
|
40d2bc025ac3a78e67602f374c32355badafb4d2
|
[
"BSD-3-Clause"
] | null | null | null |
solace/settings.py
|
burhan/solace
|
40d2bc025ac3a78e67602f374c32355badafb4d2
|
[
"BSD-3-Clause"
] | null | null | null |
solace/settings.py
|
burhan/solace
|
40d2bc025ac3a78e67602f374c32355badafb4d2
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
solace.settings
~~~~~~~~~~~~~~~
This module just stores the solace settings.
:copyright: (c) 2010 by the Solace Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
del with_statement
# propagate early. That way we can import "from solace import settings"
# when the settings is not yet set up. This is needed because during
# bootstrapping we're have carefully crafted circular dependencies between
# the settings and the internationalization support module.
import sys, solace
solace.settings = sys.modules['solace.settings']
del sys, solace
#: i18n support, leave in place for custom settings modules
from solace.i18n import lazy_gettext as _
def configure(**values):
"""Configuration shortcut."""
for key, value in values.iteritems():
if key.startswith('_') or not key.isupper():
raise TypeError('invalid configuration variable %r' % key)
d[key] = value
def revert_to_default():
"""Reverts the known settings to the defaults."""
from os.path import join, dirname
configure_from_file(join(dirname(__file__), 'default_settings.cfg'))
def autodiscover_settings():
"""Finds settings in the environment."""
import os
if 'SOLACE_SETTINGS_FILE' in os.environ:
configure_from_file(os.environ['SOLACE_SETTINGS_FILE'])
def configure_from_file(filename):
"""Configures from a file."""
d = globals()
ns = dict(d)
execfile(filename, ns)
for key, value in ns.iteritems():
if not key.startswith('_') and key.isupper():
d[key] = value
def describe_settings():
"""Describes the settings. Returns a list of
``(key, current_value, description)`` tuples.
"""
import re
from pprint import pformat
from os.path import join, dirname
assignment_re = re.compile(r'\s*([A-Z_][A-Z0-9_]*)\s*=')
# use items() here instead of iteritems so that if a different
# thread somehow fiddles with the globals, we don't break
items = dict((k, (pformat(v).decode('utf-8', 'replace'), u''))
for (k, v) in globals().items() if k.isupper())
with open(join(dirname(__file__), 'default_settings.cfg')) as f:
comment_buf = []
for line in f:
line = line.rstrip().decode('utf-8')
if line.startswith('#:'):
comment_buf.append(line[2:].lstrip())
else:
match = assignment_re.match(line)
if match is not None:
key = match.group(1)
tup = items.get(key)
if tup is not None and comment_buf:
items[key] = (tup[0], u'\n'.join(comment_buf))
del comment_buf[:]
return sorted([(k,) + v for k, v in items.items()])
revert_to_default()
autodiscover_settings()
| 32.087912
| 74
| 0.630137
|
322dda659558109f88d9fdc1b584c49ba748b072
| 722
|
py
|
Python
|
apps/medicamento/admin.py
|
alejandrobolivar/sist_inv_coesbicop
|
36a068f21adb28f1f711b540841786538dbf8411
|
[
"CC0-1.0"
] | null | null | null |
apps/medicamento/admin.py
|
alejandrobolivar/sist_inv_coesbicop
|
36a068f21adb28f1f711b540841786538dbf8411
|
[
"CC0-1.0"
] | null | null | null |
apps/medicamento/admin.py
|
alejandrobolivar/sist_inv_coesbicop
|
36a068f21adb28f1f711b540841786538dbf8411
|
[
"CC0-1.0"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from apps.medicamento.models import Medicamento
#admin.site.register(Medicamento)
@admin.register(Medicamento)
class PostAdmin(admin.ModelAdmin):
list_display = ('cod_med', 'principio_activo_med', 'nombre_comercial_med', 'nombre_lab_med', 'grupo_med', 'subgrupo_med', 'fecha_vencimiento_med')
list_filter = ('cod_med', 'principio_activo_med', 'nombre_comercial_med', 'nombre_lab_med')
search_fields = ('cod_med', 'principio_activo_med')
prepopulated_fields = {'principio_activo_med': ('cod_med',)}
# raw_id_fields = ('nombre_comercial_med',)
date_hierarchy = 'fecha_vencimiento_med'
ordering = ('cod_med', 'nombre_comercial_med')
| 45.125
| 150
| 0.760388
|
c7398dfa47738ab48740491b48f0b4a71fa3332a
| 4,003
|
py
|
Python
|
adaptive/learner/sequence_learner.py
|
basnijholt-test/adaptive
|
80a91bb44bc800b95b552642e9776fc7fd4aecef
|
[
"BSD-3-Clause"
] | null | null | null |
adaptive/learner/sequence_learner.py
|
basnijholt-test/adaptive
|
80a91bb44bc800b95b552642e9776fc7fd4aecef
|
[
"BSD-3-Clause"
] | null | null | null |
adaptive/learner/sequence_learner.py
|
basnijholt-test/adaptive
|
80a91bb44bc800b95b552642e9776fc7fd4aecef
|
[
"BSD-3-Clause"
] | null | null | null |
from copy import copy
from sortedcontainers import SortedDict, SortedSet
from adaptive.learner.base_learner import BaseLearner
class _IgnoreFirstArgument:
"""Remove the first argument from the call signature.
The SequenceLearner's function receives a tuple ``(index, point)``
but the original function only takes ``point``.
This is the same as `lambda x: function(x[1])`, however, that is not
pickable.
"""
def __init__(self, function):
self.function = function
def __call__(self, index_point, *args, **kwargs):
index, point = index_point
return self.function(point, *args, **kwargs)
def __getstate__(self):
return self.function
def __setstate__(self, function):
self.__init__(function)
class SequenceLearner(BaseLearner):
r"""A learner that will learn a sequence. It simply returns
the points in the provided sequence when asked.
This is useful when your problem cannot be formulated in terms of
another adaptive learner, but you still want to use Adaptive's
routines to run, save, and plot.
Parameters
----------
function : callable
The function to learn. Must take a single element `sequence`.
sequence : sequence
The sequence to learn.
Attributes
----------
data : dict
The data as a mapping from "index of element in sequence" => value.
Notes
-----
From primitive tests, the `~adaptive.SequenceLearner` appears to have a
similar performance to `ipyparallel`\s ``load_balanced_view().map``. With
the added benefit of having results in the local kernel already.
"""
def __init__(self, function, sequence):
self._original_function = function
self.function = _IgnoreFirstArgument(function)
self._to_do_indices = SortedSet({i for i, _ in enumerate(sequence)})
self._ntotal = len(sequence)
self.sequence = copy(sequence)
self.data = SortedDict()
self.pending_points = set()
def ask(self, n, tell_pending=True):
indices = []
points = []
loss_improvements = []
for index in self._to_do_indices:
if len(points) >= n:
break
point = self.sequence[index]
indices.append(index)
points.append((index, point))
loss_improvements.append(1 / self._ntotal)
if tell_pending:
for i, p in zip(indices, points):
self.tell_pending((i, p))
return points, loss_improvements
def _get_data(self):
return self.data
def _set_data(self, data):
if data:
indices, values = zip(*data.items())
# the points aren't used by tell, so we can safely pass None
points = [(i, None) for i in indices]
self.tell_many(points, values)
def loss(self, real=True):
if not (self._to_do_indices or self.pending_points):
return 0
else:
npoints = self.npoints + (0 if real else len(self.pending_points))
return (self._ntotal - npoints) / self._ntotal
def remove_unfinished(self):
for i in self.pending_points:
self._to_do_indices.add(i)
self.pending_points = set()
def tell(self, point, value):
index, point = point
self.data[index] = value
self.pending_points.discard(index)
self._to_do_indices.discard(index)
def tell_pending(self, point):
index, point = point
self.pending_points.add(index)
self._to_do_indices.discard(index)
def done(self):
return not self._to_do_indices and not self.pending_points
def result(self):
"""Get the function values in the same order as ``sequence``."""
if not self.done():
raise Exception("Learner is not yet complete.")
return list(self.data.values())
@property
def npoints(self):
return len(self.data)
| 30.557252
| 78
| 0.629028
|
450723fc0136c243ab32fee2770ed050fc254f08
| 3,148
|
py
|
Python
|
py/src/ai/h2o/sparkling/ml/params/H2OGBMParams.py
|
salliewalecka/sparkling-water
|
497306fbc7f4f374fe367f1303289db13be4ec48
|
[
"Apache-2.0"
] | null | null | null |
py/src/ai/h2o/sparkling/ml/params/H2OGBMParams.py
|
salliewalecka/sparkling-water
|
497306fbc7f4f374fe367f1303289db13be4ec48
|
[
"Apache-2.0"
] | null | null | null |
py/src/ai/h2o/sparkling/ml/params/H2OGBMParams.py
|
salliewalecka/sparkling-water
|
497306fbc7f4f374fe367f1303289db13be4ec48
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark.ml.param import *
from ai.h2o.sparkling.ml.params.H2OSharedTreeParams import H2OSharedTreeParams
from ai.h2o.sparkling.ml.params.H2OTypeConverters import H2OTypeConverters
from ai.h2o.sparkling.ml.params.HasMonotoneConstraints import HasMonotoneConstraints
from ai.h2o.sparkling.ml.params.HasQuantileAlpha import HasQuantileAlpha
class H2OGBMParams(H2OSharedTreeParams, HasMonotoneConstraints, HasQuantileAlpha):
##
# Param definitions
##
learnRate = Param(
Params._dummy(),
"learnRate",
"Learning rate (from 0.0 to 1.0)",
H2OTypeConverters.toFloat())
learnRateAnnealing = Param(
Params._dummy(),
"learnRateAnnealing",
"Scale the learning rate by this factor after each tree (e.g., 0.99 or 0.999)",
H2OTypeConverters.toFloat())
colSampleRate = Param(
Params._dummy(),
"colSampleRate",
"Column sample rate (from 0.0 to 1.0)",
H2OTypeConverters.toFloat())
maxAbsLeafnodePred = Param(
Params._dummy(),
"maxAbsLeafnodePred",
"Maximum absolute value of a leaf node prediction",
H2OTypeConverters.toFloat())
predNoiseBandwidth = Param(
Params._dummy(),
"predNoiseBandwidth",
"Bandwidth (sigma) of Gaussian multiplicative noise ~N(1,sigma) for tree node predictions",
H2OTypeConverters.toFloat())
##
# Getters
##
def getLearnRate(self):
return self.getOrDefault(self.learnRate)
def getLearnRateAnnealing(self):
return self.getOrDefault(self.learnRateAnnealing)
def getColSampleRate(self):
return self.getOrDefault(self.colSampleRate)
def getMaxAbsLeafnodePred(self):
return self.getOrDefault(self.maxAbsLeafnodePred)
def getPredNoiseBandwidth(self):
return self.getOrDefault(self.predNoiseBandwidth)
##
# Setters
##
def setLearnRate(self, value):
return self._set(learnRate=value)
def setLearnRateAnnealing(self, value):
return self._set(learnRateAnnealing=value)
def setColSampleRate(self, value):
return self._set(colSampleRate=value)
def setMaxAbsLeafnodePred(self, value):
return self._set(maxAbsLeafnodePred=value)
def setPredNoiseBandwidth(self, value):
return self._set(predNoiseBandwidth=value)
| 32.791667
| 99
| 0.711245
|
30c7ce6aa3f59f399672445360e7e1cc45a67021
| 2,136
|
py
|
Python
|
elifinokuzu/dictionary/models.py
|
lyk2018-python/elifin-okuzu
|
8eb54e47ba013e403b0f0c939587df5fe47c743a
|
[
"MIT"
] | 28
|
2018-07-22T16:55:26.000Z
|
2022-03-13T22:48:00.000Z
|
elifinokuzu/dictionary/models.py
|
iamnotagentleman/elifin-okuzu
|
8eb54e47ba013e403b0f0c939587df5fe47c743a
|
[
"MIT"
] | 49
|
2018-07-23T13:57:51.000Z
|
2022-01-13T08:38:08.000Z
|
elifinokuzu/dictionary/models.py
|
iamnotagentleman/elifin-okuzu
|
8eb54e47ba013e403b0f0c939587df5fe47c743a
|
[
"MIT"
] | 52
|
2018-07-22T13:14:25.000Z
|
2021-03-15T02:18:31.000Z
|
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
LANGUAGE_CHOICES = (
('tr', _('Turkish')),
('ar', _('Arabic')),
('gr', _('Greek')),
('fr', _('French')),
('en', _('English')),
('de', _('German')),
('pl', _('Polish')),
('kur', _('Kurdish')),
('es', _('Spanish')),
#('lt', _('Latin')),
)
EDGE_TYPE_CHOICES = (
('derives_from', _('Derives from')),
('symbol_of', _('Symbol of')),
('compound_of', _('Compound of')),
)
class Node(models.Model):
"""
Node (düğüm)
The most base entity in the dictionary
"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True,
on_delete=models.CASCADE,
)
name = models.CharField(max_length=255, unique=True)
language = models.CharField(
max_length=255,
choices=LANGUAGE_CHOICES
)
model_id = models.IntegerField()
def __str__(self):
return self.name
class Edge(models.Model):
"""
Holds the relationships between nodes.
"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True,
on_delete=models.CASCADE,
)
source = models.ForeignKey(
Node,
related_name='incoming',
on_delete=models.CASCADE,
)
destination = models.ForeignKey(
Node,
related_name='outgoing',
on_delete=models.CASCADE,
)
is_directed = models.BooleanField()
resource = models.CharField(
max_length=255,
blank=True,
null=True,
)
type_of_edge = models.CharField(
max_length=255,
choices=EDGE_TYPE_CHOICES
)
model_id = models.IntegerField()
def __str__(self):
if self.is_directed:
arrow = '---[%s]-->' % self.type_of_edge
else:
arrow = '<--[%s]-->' % self.type_of_edge
return '(%s:%s) %s (%s:%s)' % (
self.source.language,
self.source.name.lower(),
arrow,
self.destination.language,
self.destination.name.lower(),
)
| 23.733333
| 56
| 0.560393
|
ab493d0856581dc4c87bcf8dcffe12309d6cca2f
| 1,842
|
py
|
Python
|
cli/tests/tests_env_vars/test_agent.py
|
polyaxon/cli
|
3543c0220a8a7c06fc9573cd2a740f8ae4930641
|
[
"Apache-2.0"
] | null | null | null |
cli/tests/tests_env_vars/test_agent.py
|
polyaxon/cli
|
3543c0220a8a7c06fc9573cd2a740f8ae4930641
|
[
"Apache-2.0"
] | 1
|
2022-01-24T11:26:47.000Z
|
2022-03-18T23:17:58.000Z
|
cli/tests/tests_env_vars/test_agent.py
|
polyaxon/cli
|
3543c0220a8a7c06fc9573cd2a740f8ae4930641
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from polyaxon.env_vars.getters import get_agent_info
from polyaxon.env_vars.keys import EV_KEYS_AGENT_INSTANCE
from polyaxon.exceptions import PolyaxonAgentError
from polyaxon.utils.test_utils import BaseTestCase
class TestAgentEnvVars(BaseTestCase):
def test_get_agent_info(self):
with self.assertRaises(PolyaxonAgentError):
get_agent_info(None)
with self.assertRaises(PolyaxonAgentError):
get_agent_info("foo")
with self.assertRaises(PolyaxonAgentError):
get_agent_info("foo.bar")
with self.assertRaises(PolyaxonAgentError):
get_agent_info("foo/bar")
with self.assertRaises(PolyaxonAgentError):
get_agent_info("foo/bar/moo")
with self.assertRaises(PolyaxonAgentError):
get_agent_info("foo.bar.moo")
assert get_agent_info("foo.agents.moo") == ("foo", "moo")
current = os.environ.get(EV_KEYS_AGENT_INSTANCE)
os.environ[EV_KEYS_AGENT_INSTANCE] = "foo.agents.moo"
assert get_agent_info("foo.agents.moo") == ("foo", "moo")
if current:
os.environ[EV_KEYS_AGENT_INSTANCE] = current
else:
del os.environ[EV_KEYS_AGENT_INSTANCE]
| 34.754717
| 74
| 0.709555
|
8442df3bcc44c7902260d84d1b88e68071e96e9a
| 14,239
|
py
|
Python
|
PGCAltas/utils/StatExpr/StatProcessor/FeaturesProcessor/processors.py
|
IzayoiRin/PGCAltas
|
44d1ac826c64f0eb67d129861895fae37221a4a2
|
[
"MIT"
] | null | null | null |
PGCAltas/utils/StatExpr/StatProcessor/FeaturesProcessor/processors.py
|
IzayoiRin/PGCAltas
|
44d1ac826c64f0eb67d129861895fae37221a4a2
|
[
"MIT"
] | 5
|
2020-02-12T03:23:40.000Z
|
2020-08-06T00:50:15.000Z
|
PGCAltas/utils/StatExpr/StatProcessor/FeaturesProcessor/processors.py
|
IzayoiRin/PGCAltas
|
44d1ac826c64f0eb67d129861895fae37221a4a2
|
[
"MIT"
] | null | null | null |
import copy
import pickle
import numpy as np
import sklearn.preprocessing as pp
import sklearn.impute as ipt
import sklearn.ensemble as esb
from sklearn.decomposition import PCA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.manifold import TSNE
from sklearn.decomposition import TruncatedSVD
# import sklearn.feature_selection as fs
from sklearn.metrics import accuracy_score
from PGCAltas.utils.statUniversal import train_test_split
from PGCAltas.utils.errors import MessProcessesError, FailInitialedError
class FeaturesProcessBase(object):
class Meta:
@classmethod
def route(cls, func, callfn=None):
def execute(self, method, *fargs, mparams=(), **fkwargs):
name = func.__name__.split('_', 1)[1].upper()
process = getattr(self, name, None)
assert process, 'No such process mapping'
method = process.get(method, None)
assert method, 'No such scale model'
margs, mkwargs = cls.resolute_params(mparams)
fit = method(*margs, **mkwargs)
if callfn:
fit = getattr(fit, callfn, None)
assert fit, 'The fit function is not callable'
return func(self, fit, *fargs, **fkwargs)
return execute
@staticmethod
def resolute_params(params):
ret = [tuple(), dict()]
for p in params:
if isinstance(p, (tuple, list)):
ret[0] = p
elif isinstance(p, dict):
ret[1] = p
return ret
def __init__(self):
self.dataset = None
self._dataset = None
self.labels = None
self._labels = None
def init_from_data(self, dataset, labels):
self.dataset = dataset
self._dataset = copy.deepcopy(self.dataset)
self.labels = labels
self._labels = copy.deepcopy(self.labels)
return self
@classmethod
def load(cls, buffer, **kwargs):
return pickle.load(buffer, **kwargs)
def dumps(self, buffer, **kwargs):
pickle.dump(self, buffer, **kwargs)
class GenericFeaturesProcess(FeaturesProcessBase):
meta = FeaturesProcessBase.Meta
selector = None
__initial = False
def __new__(cls, *args, **kwargs):
if not cls.__initial:
callfn = getattr(cls, 'callfn', None)
actions = [i for i in dir(cls) if i.startswith('fit')]
for action in actions:
func = getattr(cls, action)
setattr(cls, action, cls.meta.route(func, callfn))
cls.__initial = super().__new__(cls)
return cls.__initial
def __init__(self):
super().__init__()
def get_dataset(self, *args, **kwargs):
return self.dataset
def get_labels(self, *args, **kwargs):
return self.labels
def get_selector(self, *args, **kwargs):
return self.selector
class WholePreProcessMixin(object):
def fit_dimensionless(self, fit, *args, **kwargs):
self.dataset = fit(self.dataset, self.labels, *args, **kwargs)
return self
def fit_binarize(self, fit, *args, **kwargs):
self.dataset = fit(self.dataset, self.labels, *args, **kwargs)
return self
def fit_encode(self, fit, *args, **kwargs):
self.labels = fit(self.labels.reshape(-1, 1), *args, **kwargs).toarray()
return self
def fit_na(self, fit, *args, **kwargs):
self.dataset = fit(self.dataset, *args, **kwargs)
return self
def fit_funcitrans(self, fit, *args, **kwargs):
self.dataset = fit(self.dataset, *args, **kwargs)
return self
class FeaturesWholePreProcessor(GenericFeaturesProcess, WholePreProcessMixin):
DIMENSIONLESS = {
"STANDARDIZE": pp.StandardScaler,
"MINMAX": pp.MinMaxScaler,
"NORMALIZE": pp.Normalizer,
}
BINARIZE = {
"BINARY": pp.Binarizer,
}
ENCODE = {
"ONEHOT": pp.OneHotEncoder,
}
NA = {
# "mean", "median", "most_frequent", "constant"
"IMPUTE": ipt.SimpleImputer,
}
FUNCTRANS = {
"POLYNOMIAL": pp.PolynomialFeatures,
'CUSTOM': pp.FunctionTransformer,
}
class BasicPreProcessMixin(object):
def fit_dimensionless(self, fit, *args, **kwargs):
labels = self.get_labels()
self.dataset = fit(self.dataset, labels, *args, **kwargs)
return self
def fit_na(self, fit, *args, **kwargs):
self.dataset = fit(self.dataset, *args, **kwargs)
return self
def fit_encode(self, fit, *args, **kwargs):
self.labels = fit(self.labels.reshape(-1, 1)).toarray()
return self
class FeaturesBasicPreProcessor(GenericFeaturesProcess, BasicPreProcessMixin):
callfn = "fit_transform"
DIMENSIONLESS = {
"STANDARDIZE": pp.StandardScaler,
"MINMAX": pp.MinMaxScaler,
"NORMALIZE": pp.Normalizer
}
NA = {
# "mean", "median", "most_frequent", "constant"
"IMPUTE": ipt.SimpleImputer,
}
ENCODE = {
"ONEHOT": pp.OneHotEncoder,
}
def __call__(self, method, **kwargs):
if len(method) > 3:
raise MessProcessesError("Wrong processes queue")
self.kwargs = kwargs
# fit each labels as ONE HOT CODE
self.fit_encode(method[0], mparams=({'categories': self.kwargs.get('categories', None)}, ))
# replace NA with most frequent value then dimensionless
self.fit_na(method[1], mparams=({'strategy': self.kwargs.get('strategy', 'mean')}, )).\
fit_dimensionless(method[2])
class BasicScreenMixin(object):
def fit_ensemble(self, fit):
xtr, ytr = self.dataset, self.get_labels()
if self.kwargs.get('split', None):
xtr, xte, ytr, yte = self.train_or_test()
fit.fit(xtr, ytr)
acc = accuracy_score(yte, fit.predict(xte)) if self.kwargs.get('split') else 1.0
setattr(self, 'acc_', acc)
return fit
def cal_importance_rank(self):
fit = getattr(self, 'fit_', None)
if fit is None:
return
m, n = self.dataset.shape
improtances = fit.feature_importances_ * n # type: np.ndarray
self.asc_order = improtances.argsort()
self.importance_ = improtances[self.asc_order]
class FeaturesBasicScreenProcessor(GenericFeaturesProcess, BasicScreenMixin):
ENSEMBLE = {
"RANDOM_FOREST": esb.RandomForestClassifier
}
spilter = {'test_size': 0.3,
'random_state': 0}
def train_or_test(self):
labels = self.get_labels()
return train_test_split(mode='L')(self.dataset, labels, **self.spilter)
def __call__(self, method, mparams=(), **kwargs):
# execute(self, method, *fargs, mparams=(), **fkwargs)
# func(self, fit, *fargs, **fkwargs)
self.kwargs = kwargs
self.fit_ = self.fit_ensemble(method, mparams=mparams)
self.cal_importance_rank()
return self
class BasicExtractMixin(object):
# def fit_reduce(self, fit, *fargs, **fkwargs):
# if fkwargs.get('supervised'):
# xtr, xte = self.dataset[self._trno, :], self.dataset[self._teno, :]
# labels = self.get_labels()
# ytr, yte = labels[self._trno], labels[self._teno]
# # fitting LDA model
# fit.fit(xtr, ytr)
# acc = accuracy_score(yte, fit.predict(xte))
# setattr(self, 'supervised_acc_', acc)
# else:
# fit.fit(self.dataset)
# self.dataset = fit.transform(self.dataset)
# return self
def fit_reduce(self, fit, *fargs, **fkwargs):
xtr, ytr = fargs
if fkwargs.get('supervised'):
# training set
fit.fit(xtr, ytr)
return fit
fit.fit(xtr)
xtr = fit.transform(xtr)
return [xtr, ytr]
class FeatureBasicExtractProcessor(GenericFeaturesProcess, BasicExtractMixin):
REDUCE = {
"PRINCIPAL_COMPONENTS": PCA,
"LINEAR_DISCRIMINANT": LinearDiscriminantAnalysis,
}
spilter = {
'test_size': 0.2,
'random_state': 0,
}
def __init__(self):
super(FeatureBasicExtractProcessor, self).__init__()
self._teno, self._trno = None, None
def init_from_data(self, dataset, labels, training=True):
# from testing set
if not training:
self.dataset, self.labels = dataset, labels
return self
# from training set
if isinstance(dataset, tuple) and isinstance(labels, tuple):
xtr, xte = dataset
ytr, yte = labels
elif not isinstance(dataset, tuple) and not isinstance(labels, tuple):
self.dataset, self.labels = dataset, labels
xtr, xte, ytr, yte = self.train_or_test()
else:
raise FailInitialedError('Wrong init-data format')
# record test and train's row numbers
self._teno, self._trno = range(ytr.shape[0], ytr.shape[0] + yte.shape[0]), range(ytr.shape[0])
# recombination [tr, te]
self.dataset = np.vstack([xtr, xte])
self.labels = np.hstack([ytr, yte])
return self
def train_or_test(self):
labels = self.get_labels()
return train_test_split(mode='L')(self.dataset, labels, **self.spilter)
# def layer_samples(self, labels):
# layers = np.unique(labels)
# ret = [np.argwhere(labels == l).reshape(-1) for l in layers]
# test, train = list(), list()
# for idxs in ret:
# n = idxs.shape[0]
# # layer with only one sample, copy itself
# if n == 1:
# idxs = np.hstack([idxs, idxs])
# n = 2
# # now, every layer with at least 2 samples, then cal testN
# testN = np.floor(n * self.spilter.get('test_size'))
# # if testN == 0, test set must have one sample, thus testN must be 1
# if testN == 0:
# testN += 1
# # now, every layer with at least 2 samples and its testN all larger than 0
# np.random.seed(self.spilter.get('random_state'))
# # random select from WHOLE SET idxs
# testingSet = np.random.choice(idxs, size=int(testN))
# test.append(testingSet)
# # the diff set between WHOLE SET idxs and SUB SET testingSet
# trainingSet = np.setdiff1d(idxs, testingSet)
# train.append(trainingSet)
#
# test, train = np.hstack(test), np.hstack(train)
# return self.dataset[train, :], self.dataset[test, :], labels[train], labels[test]
def __call__(self, method, mparams=(), **kwargs):
self.kwargs = kwargs
self.fit_reduce(method, mparams=mparams, supervised=True)
class FeatureFilterExtractProcessor(FeatureBasicExtractProcessor):
def __call__(self, *methods, **kwargs):
"""
init_from_data:
train --1:
self.dataset, self.labels ----> whole
self._teno, self._trno ----> rows
train --0:
self.dataset, self.labels ----> vd
self._teno, self._trno ----> None
__call__:
train --1:
fit_reduce(tr) ----> fit
train --0:
load_fit() ----> fit
fit ----> predict(te)
:param methods: [(filter, flt_params), (reducer, rdc_params)]
:param kwargs:
:return:
"""
self.kwargs = kwargs
if len(methods) != 2:
raise MessProcessesError("Wrong processes queue")
(flt, flt_params), (rdc, rdc_params) = methods
ori_features = self.dataset.shape[1]
flag = ori_features > flt_params['n_components'] > rdc_params['n_components'] > 0
if not flag:
raise MessProcessesError(
"Wrong processes params: features[%d], filter[%d], components[%d]" %
(ori_features, flt_params['n_components'], rdc_params['n_components'])
)
def _fitting(xtr, xte, ytr, yte):
tr_set = xtr, ytr
te_set = xte, yte
# PCA reduce training set, return PCA reduced data
tr_re = self.fit_reduce(flt, *tr_set, mparams=(flt_params,), supervised=False) # type: tuple
# LDA fitting training set, return fitted model
fit = self.fit_reduce(rdc, *tr_re, mparams=(rdc_params,), supervised=True)
# PCA reduce testing set and LDA predicting testing set, return PCA reduced data
te_re = _predict(fit, *te_set)
# LDA reduce whole set
self.dataset = fit.transform(np.vstack([tr_re[0], te_re[0]]))
return fit
def _predict(fit, *te_set):
"""PCA reduce testing set and LDA predicting testing set, return PCA reduced data"""
re = self.fit_reduce(flt, *te_set, mparams=(flt_params,), supervised=False) # type: tuple
ypr = fit.predict(re[0])
acc = accuracy_score(re[1], ypr)
setattr(self, 'supervised_acc_', acc)
return re
# training model, return fitted model
if kwargs.get('training', True):
xtr, xte = self.dataset[self._trno, :], self.dataset[self._teno, :]
labels = self.get_labels()
ytr, yte = labels[self._trno], labels[self._teno]
return _fitting(xtr, xte, ytr, yte)
# testing model, return None
else:
fit = kwargs.get('loaded_fit')
if fit:
re = _predict(fit, self.dataset, self.get_labels())
self.dataset = fit.transform(re[0])
class Viewer2DMixin(object):
def fit_estimate(self, fit, *fargs, **fkwargs):
self.dataset = fit.fit_transform(self.dataset)
return fit
class Feature2DViewerProcessor(GenericFeaturesProcess, Viewer2DMixin):
ESTIMATE = {
"T_STOCHASTIC": TSNE,
"SPARSE_SVD": TruncatedSVD
}
def __call__(self, method, mparams=(), **kwargs):
self.kwargs = kwargs
self.fit_ = self.fit_estimate(method, mparams=mparams)
| 32.658257
| 105
| 0.59007
|
ad882d43321cb57134d71a5e4b1ea836e0e8c2c2
| 1,803
|
py
|
Python
|
generate_report.py
|
IsaacLuo/daily_trade_reporting_engine_practice
|
54947da71bcae23f6d5dfed9babc9a3f956c60de
|
[
"MIT"
] | null | null | null |
generate_report.py
|
IsaacLuo/daily_trade_reporting_engine_practice
|
54947da71bcae23f6d5dfed9babc9a3f956c60de
|
[
"MIT"
] | null | null | null |
generate_report.py
|
IsaacLuo/daily_trade_reporting_engine_practice
|
54947da71bcae23f6d5dfed9babc9a3f956c60de
|
[
"MIT"
] | null | null | null |
"""generate report shows:
Amount in USD settled incoming everyday
Amount in USD settled outgoing everyday
Ranking of entities based on incoming and outgoing amount.
"""
import sys
def generate_report(data):
"""
Args:
data: the sample_data dict
"""
#sort data by amount
for item in data:
item['amount'] = item['fx'] * item['units'] * item['unit_price']
data.sort(key=lambda item: item['amount'], reverse=True)
rank = 1
date_buckets = {'B': {}, 'S': {}}
for item in data:
item['rank'] = rank
rank+= 1
settlement_date = item['set_date']
buy_sell = item['buy_sell']
if settlement_date in date_buckets[buy_sell]:
date_buckets[buy_sell][settlement_date] += item['amount']
else:
date_buckets[buy_sell][settlement_date] = item['amount']
def print_report_csv(pipe=sys.stdout):
# show incoming everyday
print('incoming report', file=pipe)
for date in sorted(date_buckets['S'].keys()):
print(date, date_buckets['S'][date], sep=', ', file=pipe)
print(file=pipe)
# show outgoing everyday
print('outgoing report', file=pipe)
for date in sorted(date_buckets['B'].keys()):
print(date, date_buckets['B'][date], sep=', ', file=pipe)
print(file=pipe)
# show rank
print('Rank, Entity, Buy/Sell, AgreedFx, Currency, InstructionDate, '\
'SettlementDate, Units, Price per Unit', file=pipe)
for item in data:
print(item['rank'], item['entity'], item['buy_sell'], item['fx'],
item['currency'], item['ins_date'], item['set_date'], item['units'],
item['unit_price'], sep=', ', file=pipe)
print_report_csv()
| 35.352941
| 85
| 0.589018
|
bb8e93342d22ce0f95fa0c5b8d7c0eea58888822
| 10,412
|
py
|
Python
|
mkt/ratings/views.py
|
oremj/zamboni
|
a751dc6d22f7af947da327b0a091cbab0a999f49
|
[
"BSD-3-Clause"
] | null | null | null |
mkt/ratings/views.py
|
oremj/zamboni
|
a751dc6d22f7af947da327b0a091cbab0a999f49
|
[
"BSD-3-Clause"
] | null | null | null |
mkt/ratings/views.py
|
oremj/zamboni
|
a751dc6d22f7af947da327b0a091cbab0a999f49
|
[
"BSD-3-Clause"
] | null | null | null |
from django import http
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404, redirect
import commonware.log
import jingo
from tower import ugettext as _
from access import acl
import amo
import amo.log
from amo.urlresolvers import reverse
from addons.decorators import addon_view_factory, has_purchased_or_refunded
from addons.models import Addon
from amo.decorators import (json_view, login_required, post_required,
restricted_content)
from mkt.fragments.decorators import bust_fragments_on_post
from reviews.forms import ReviewReplyForm
from reviews.models import Review, ReviewFlag
from reviews.views import get_flags
from stats.models import ClientData, Contribution
from mkt.site import messages
from mkt.ratings.forms import ReviewForm
from mkt.webapps.models import Installed
log = commonware.log.getLogger('mkt.ratings')
addon_view = addon_view_factory(qs=Addon.objects.valid)
def _review_details(request, addon, form):
d = dict(addon_id=addon.id, user_id=request.user.id,
ip_address=request.META.get('REMOTE_ADDR', ''))
if addon.is_packaged:
d['version_id'] = addon.current_version.id
d.update(**form.cleaned_data)
return d
@addon_view
def review_list(request, addon, review_id=None, user_id=None, rating=None):
qs = Review.objects.valid().filter(addon=addon).order_by('-created')
# Mature regions show only reviews from within that region.
if not request.REGION.adolescent:
qs = qs.filter(client_data__region=request.REGION.id)
ctx = {'product': addon, 'score': rating, 'review_perms': {}}
if review_id is not None:
qs = qs.filter(pk=review_id)
ctx['page'] = 'detail'
# If this is a dev reply, find the first msg for context.
review = get_object_or_404(Review, pk=review_id)
if review.reply_to_id:
review_id = review.reply_to_id
ctx['reply'] = review
elif user_id is not None:
qs = qs.filter(user=user_id)
ctx['page'] = 'user'
if not qs:
raise http.Http404()
else:
ctx['page'] = 'list'
qs = qs.filter(is_latest=True)
ctx['ratings'] = ratings = amo.utils.paginate(request, qs, 20)
if not ctx.get('reply'):
ctx['replies'] = Review.get_replies(ratings.object_list)
if request.user.is_authenticated():
ctx['review_perms'] = {
'is_admin': acl.action_allowed(request, 'Addons', 'Edit'),
'is_editor': acl.check_reviewer(request),
'is_author': acl.check_addon_ownership(request, addon, viewer=True,
dev=True, support=True),
}
ctx['flags'] = get_flags(request, ratings.object_list)
ctx['has_review'] = addon.reviews.filter(user=request.user.id).exists()
return jingo.render(request, 'ratings/listing.html', ctx)
@addon_view
@json_view
@login_required(redirect=False)
@post_required
def edit(request, addon, review_id):
return http.HttpResponse()
@bust_fragments_on_post('/app/{app_slug}')
@addon_view
@login_required
@post_required
def reply(request, addon, review_id):
is_admin = acl.action_allowed(request, 'Addons', 'Edit')
is_author = acl.check_addon_ownership(request, addon, dev=True)
if not (is_admin or is_author):
raise PermissionDenied
review = get_object_or_404(Review.objects, pk=review_id, addon=addon)
form = ReviewReplyForm(request.POST or None)
if form.is_valid():
d = dict(reply_to=review, addon=addon,
defaults=dict(user=request.amo_user))
reply, new = Review.objects.get_or_create(**d)
for k, v in _review_details(request, addon, form).items():
setattr(reply, k, v)
reply.save()
action = 'New' if new else 'Edited'
if new:
amo.log(amo.LOG.ADD_REVIEW, addon, reply)
else:
amo.log(amo.LOG.EDIT_REVIEW, addon, reply)
log.debug('%s reply to %s: %s' % (action, review_id, reply.id))
messages.success(request,
_('Your reply was successfully added.') if new else
_('Your reply was successfully updated.'))
return redirect(addon.get_ratings_url('list'))
@bust_fragments_on_post('/app/{app_slug}')
@addon_view
@login_required
@restricted_content
@has_purchased_or_refunded
def add(request, addon):
if addon.has_author(request.user):
# Don't let app owners review their own apps.
raise PermissionDenied
# Get user agent of user submitting review. If there is an install with
# logged user agent that matches the current user agent, hook up that
# install's client data with the rating. If there aren't any install that
# match, use the most recent install. This implies that user must have an
# install to submit a review, but not sure if that logic is worked in, so
# default client_data to None.
client_data = None
user_agent = request.META.get('HTTP_USER_AGENT', '')
install = (Installed.objects.filter(user=request.user, addon=addon)
.order_by('-created'))
install_w_user_agent = (install.filter(client_data__user_agent=user_agent)
.order_by('-created'))
has_review = False
try:
if install_w_user_agent:
client_data = install_w_user_agent[0].client_data
elif install:
client_data = install[0].client_data
except ClientData.DoesNotExist:
client_data = None
data = request.POST or None
# Try to get an existing review of the app by this user if we can.
filters = dict(addon=addon, user=request.user)
if addon.is_packaged:
filters['version'] = addon.current_version
try:
existing_review = Review.objects.valid().filter(**filters)[0]
except IndexError:
existing_review = None
# If the user is posting back, try to process the submission.
if data:
form = ReviewForm(data)
if form.is_valid():
cleaned = form.cleaned_data
if existing_review:
# If there's a review to overwrite, overwrite it.
if (cleaned['body'] != existing_review.body or
cleaned['rating'] != existing_review.rating):
existing_review.body = cleaned['body']
existing_review.rating = cleaned['rating']
ip = request.META.get('REMOTE_ADDR', '')
existing_review.ip_address = ip
if 'flag' in cleaned and cleaned['flag']:
existing_review.flag = True
existing_review.editorreview = True
rf = ReviewFlag(review=existing_review,
user_id=request.user.id,
flag=ReviewFlag.OTHER, note='URLs')
rf.save()
existing_review.save()
amo.log(amo.LOG.EDIT_REVIEW, addon, existing_review)
log.debug('[Review:%s] Edited by %s' % (existing_review.id,
request.user.id))
messages.success(request,
_('Your review was updated successfully!'))
# If there is a developer reply to the review, delete it. We do
# this per bug 777059.
try:
reply = existing_review.replies.all()[0]
except IndexError:
pass
else:
log.debug('[Review:%s] Deleted reply to %s' % (
reply.id, existing_review.id))
reply.delete()
else:
# If there isn't a review to overwrite, create a new review.
review = Review.objects.create(client_data=client_data,
**_review_details(
request, addon, form))
if 'flag' in cleaned and cleaned['flag']:
rf = ReviewFlag(review=review, user_id=request.user.id,
flag=ReviewFlag.OTHER, note='URLs')
rf.save()
amo.log(amo.LOG.ADD_REVIEW, addon, review)
log.debug('[Review:%s] Created by user %s ' %
(review.id, request.user.id))
messages.success(request,
_('Your review was successfully added!'))
return redirect(addon.get_ratings_url('list'))
# If the form isn't valid, we've set `form` so that it can be used when
# the template is rendered below.
elif existing_review:
# If the user isn't posting back but has an existing review, populate
# the form with their existing review and rating.
form = ReviewForm({'rating': existing_review.rating or 1,
'body': existing_review.body})
has_review = True
else:
# If the user isn't posting back and doesn't have an existing review,
# just show a blank version of the form.
form = ReviewForm()
# Get app's support url, either from support flow if contribution exists or
# author's support url.
support_email = str(addon.support_email) if addon.support_email else None
try:
contrib_id = (Contribution.objects
.filter(user=request.user, addon=addon,
type__in=(amo.CONTRIB_PURCHASE,
amo.CONTRIB_INAPP,
amo.CONTRIB_REFUND))
.order_by('-created')[0].id)
support_url = reverse('support', args=[contrib_id])
except IndexError:
support_url = addon.support_url
return jingo.render(request, 'ratings/add.html',
{'product': addon, 'form': form,
'support_url': support_url,
'has_review': has_review,
'support_email': support_email,
'page_parent': addon.get_detail_url() if
not existing_review else ''})
| 40.356589
| 79
| 0.596331
|
484164613fe2d5a49f8066285a4e743772357283
| 69
|
py
|
Python
|
miasma/__init__.py
|
bmcfee/miasma
|
4a98e7bb94006733f60d94db5b7ad43ff3f600c4
|
[
"BSD-3-Clause"
] | 1
|
2018-10-16T10:32:02.000Z
|
2018-10-16T10:32:02.000Z
|
miasma/__init__.py
|
bmcfee/miasma
|
4a98e7bb94006733f60d94db5b7ad43ff3f600c4
|
[
"BSD-3-Clause"
] | null | null | null |
miasma/__init__.py
|
bmcfee/miasma
|
4a98e7bb94006733f60d94db5b7ad43ff3f600c4
|
[
"BSD-3-Clause"
] | null | null | null |
# CREATED: 2/20/17 19:02 by Justin Salamon <justin.salamon@nyu.edu>
| 23
| 67
| 0.724638
|
927d4759106a14aa8dacc06c6a3f6eaa47c53b19
| 3,523
|
py
|
Python
|
helpers/date_time_helper.py
|
SpiralDevelopment/crypto-hft-data
|
205f01fd555eab4f636ffbb701dfcde53d27becc
|
[
"MIT"
] | 31
|
2020-07-20T14:11:39.000Z
|
2022-03-17T03:18:33.000Z
|
helpers/date_time_helper.py
|
SpiralDevelopment/crypto-hft-data
|
205f01fd555eab4f636ffbb701dfcde53d27becc
|
[
"MIT"
] | null | null | null |
helpers/date_time_helper.py
|
SpiralDevelopment/crypto-hft-data
|
205f01fd555eab4f636ffbb701dfcde53d27becc
|
[
"MIT"
] | 11
|
2020-07-20T14:11:52.000Z
|
2022-03-14T04:20:19.000Z
|
from datetime import datetime, timedelta
from dateutil import tz
import pytz
import calendar
import math
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
SHORT_DATE_TIME_FORMAT_START = "%d %b, %H:%M"
SHORT_TIME_FORMAT_START = "%H:%M"
DATE_TIME_FORMAT = "%d %b,"
def str_to_date_time(date_time_str, from_time_format=TIME_FORMAT):
return datetime.strptime(date_time_str, from_time_format)
def date_time_to_str(date_time, time_format=TIME_FORMAT):
return datetime.strftime(date_time, time_format)
def from_utc_str_to_local_dtm(date_time, from_time_format=TIME_FORMAT):
utc_date_time = datetime.strptime(date_time, from_time_format)
return from_utc_dtm_to_local_dtm(utc_date_time)
def from_utc_dtm_to_local_dtm(utc_date_time):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
utc = utc_date_time.replace(tzinfo=from_zone)
central = utc.astimezone(to_zone)
return central
def from_local_dtm_to_utc_dtm(date_time, remove_tz=False):
local = pytz.timezone("Asia/Seoul")
local_dt = local.localize(date_time, is_dst=None)
utc_dt = local_dt.astimezone(pytz.utc)
if remove_tz:
utc_dt = utc_dt.replace(tzinfo=None)
return utc_dt
def from_utc_str_to_local_str(date_time, from_time_format=TIME_FORMAT, to_time_format=TIME_FORMAT):
return from_utc_str_to_local_dtm(date_time, from_time_format=from_time_format).strftime(to_time_format)
def from_utc_timestamp_to_local_dtm(date_time):
return datetime.fromtimestamp(int(date_time))
def from_utc_timestamp_to_local_string(date_time, to_time_format=TIME_FORMAT):
return from_utc_timestamp_to_local_dtm(date_time).strftime(to_time_format)
def localize_date_time(date_time):
time_zone = pytz.timezone('Asia/Seoul')
return time_zone.localize(date_time)
def set_utc_timezone(date_time):
return pytz.utc.localize(date_time)
def get_n_minutes_before(minutes, start_date_time=None):
if start_date_time is None:
start_date_time = datetime.now()
if minutes != 0:
return start_date_time - timedelta(minutes=minutes)
else:
return start_date_time
def from_utc_timestamp_to_utc_dtm(epoch_time):
return datetime.utcfromtimestamp(epoch_time)
def from_utc_dtm_to_utc_timestamp(utc_date_time):
return calendar.timegm(utc_date_time.utctimetuple())
def floor_datetime_n_minutes(tm, minutes):
return tm - timedelta(minutes=tm.minute % minutes,
seconds=tm.second,
microseconds=tm.microsecond)
def ceil_datetime_n_seconds(tm, seconds):
nsecs = tm.minute * 60 + tm.second + tm.microsecond * 1e-6
delta = math.ceil(nsecs / seconds) * seconds - nsecs
return tm + timedelta(seconds=delta)
def ceil_datetime_n_minutes(tm, minutes):
return ceil_datetime_n_seconds(tm, minutes * 60)
def floor_epoch_n_minutes(epoch, minutes):
return (epoch // (minutes * 60)) * minutes * 60
def ceil_epoch_n_minutes(epoch, minutes):
return floor_epoch_n_minutes(epoch, minutes) + minutes * 60
def get_date_by_epoch(timestamp):
epoch = datetime.utcfromtimestamp(0)
return (from_utc_timestamp_to_utc_dtm(timestamp).replace(hour=0, minute=0, second=0) - epoch).total_seconds()
def round_datetime_n_minutes(tm, minutes):
discard = timedelta(minutes=tm.minute % minutes,
seconds=tm.second,
microseconds=tm.microsecond)
tm -= discard
if discard >= timedelta(minutes=(minutes / 2)):
tm += timedelta(minutes=minutes)
return tm
| 28.642276
| 113
| 0.741981
|
912ab6d5f0731325af070ed6324ec22fe819bb95
| 534
|
py
|
Python
|
src/libs/common.py
|
spunkmars/django-spms
|
95ac193891f93da07c3a26feeaf846e6030f3466
|
[
"BSD-3-Clause"
] | 23
|
2020-04-14T07:50:38.000Z
|
2022-01-27T09:07:19.000Z
|
src/libs/common.py
|
bjzhangyong/django-spms
|
95ac193891f93da07c3a26feeaf846e6030f3466
|
[
"BSD-3-Clause"
] | 8
|
2021-03-19T09:01:16.000Z
|
2022-02-10T12:28:55.000Z
|
src/libs/common.py
|
bjzhangyong/django-spms
|
95ac193891f93da07c3a26feeaf846e6030f3466
|
[
"BSD-3-Clause"
] | 6
|
2020-04-14T13:34:29.000Z
|
2022-01-25T04:05:16.000Z
|
# coding=utf-8
from hashlib import md5
import time
def create_uuid(*args):
id = md5()
timestamp = time.time()
m_str = None
if args:
m_str = '%s%f' % (''.join(args).encode('utf-8'), timestamp)
else:
m_str = '%f' % timestamp
m_str = m_str.encode('utf-8')
id.update(m_str)
id = id.hexdigest()[8:-8].strip().lower()
return id
def get_device_uuid(m_sn=None):
m = md5()
m.update(m_sn.upper())
uuid = m.hexdigest()[8:-8].lower()
return uuid
| 19.777778
| 68
| 0.544944
|
d160492804f1233f74a2ec0f1d2362777e27ced6
| 8,838
|
py
|
Python
|
train_extract_model.py
|
ys10/GCIClassify
|
a66b1a257ac26b10732a68228721023b99f67a8e
|
[
"MIT"
] | null | null | null |
train_extract_model.py
|
ys10/GCIClassify
|
a66b1a257ac26b10732a68228721023b99f67a8e
|
[
"MIT"
] | null | null | null |
train_extract_model.py
|
ys10/GCIClassify
|
a66b1a257ac26b10732a68228721023b99f67a8e
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import argparse
import os
import tensorflow as tf
import tqdm
from feature_extraction.extract_model import ExtractModel
from feature_extraction.data_set import get_testing_set
from model_loader import load_model, save_model
from data_set_args import get_rab_set_args, get_ked_set_args,\
get_bdl_set_args, get_jmk_set_args, get_slt_set_args,\
get_mix2_set_args, get_mix3_set_args
def get_args():
parser = argparse.ArgumentParser(description="GlottalNet")
parser.add_argument("--save_path", type=str, default="./save/mix3/")
parser.add_argument("--log_path", type=str, default="./log/mix3/")
parser.add_argument("--training_epochs", type=int, default=100)
parser.add_argument("--training_batch_size", type=int, default=128)
parser.add_argument("--validation_batch_size", type=int, default=128)
parser.add_argument("--save_per_epochs", type=int, default=10)
parser.add_argument("--validation_per_epochs", type=int, default=1)
return parser.parse_args()
def main():
tf.logging.set_verbosity(tf.logging.INFO)
args = get_args()
data_set_args = get_mix3_set_args()
net = ExtractModel()
graph = tf.Graph()
with graph.as_default():
with tf.variable_scope("data"):
training_set = get_testing_set(key=data_set_args.training_set_name,
epochs=args.training_epochs, batch_size=args.training_batch_size)
validation_set = get_testing_set(key=data_set_args.validation_set_name,
epochs=args.training_epochs // args.validation_per_epochs,
batch_size=args.validation_batch_size)
iterator = training_set.make_one_shot_iterator()
next_element = iterator.get_next()
training_init_op = iterator.make_initializer(training_set)
validation_init_op = iterator.make_initializer(validation_set)
with tf.variable_scope("extract_model"):
tensor_dict = net.build(next_element, training=True)
"""training summary"""
loss_summary = tf.summary.scalar("loss", tensor_dict["loss"])
accuracy_summary = tf.summary.scalar("accuracy", tensor_dict["accuracy"])
recall_summary = tf.summary.scalar("recall", tensor_dict["recall"])
precision_summary = tf.summary.scalar("precision", tensor_dict["precision"])
f1_score_summary = tf.summary.scalar("f1_score", tensor_dict["f1_score"])
"""validation summary"""
validation_loss = tf.placeholder(tf.float32, shape=())
validation_accuracy = tf.placeholder(tf.float32, shape=())
validation_recall = tf.placeholder(tf.float32, shape=())
validation_precision = tf.placeholder(tf.float32, shape=())
validation_f1_score = tf.placeholder(tf.float32, shape=())
validation_loss_summary = tf.summary.scalar("loss", validation_loss)
validation_accuracy_summary = tf.summary.scalar("accuracy", validation_accuracy)
validation_recall_summary = tf.summary.scalar("recall", validation_recall)
validation_precision_summary = tf.summary.scalar("precision", validation_precision)
validation_f1_score_summary = tf.summary.scalar("f1_score", validation_f1_score)
"""training"""
global_step = tf.Variable(0, dtype=tf.int32, name="global_step")
opt = tf.train.AdamOptimizer(1e-3)
extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(extra_update_ops):
upd = opt.minimize(tensor_dict["loss"], global_step=global_step)
saver = tf.train.Saver(max_to_keep=50)
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with tf.Session(graph=graph, config=config) as sess:
sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()])
save_path = os.path.join(args.save_path, net.name)
if not load_model(saver, sess, save_path):
tf.logging.info("Run on an initialized graph.")
sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()])
training_writer = tf.summary.FileWriter(os.path.join(args.log_path, "training"), sess.graph)
validation_writer = tf.summary.FileWriter(os.path.join(args.log_path, "validation"), sess.graph)
global_step_eval = sess.run(global_step)
training_steps = args.training_epochs * data_set_args.training_set_size // args.training_batch_size
save_steps = args.save_per_epochs * data_set_args.training_set_size // args.training_batch_size
validation_steps = args.validation_per_epochs * data_set_args.training_set_size // args.training_batch_size
pbar = tqdm.tqdm(total=training_steps)
pbar.update(global_step_eval)
sess.run(training_init_op)
while global_step_eval < training_steps:
"""validation"""
if global_step_eval % validation_steps == 0:
sess.run(validation_init_op)
total_loss = 0.0
total_accuracy = 0.0
total_recall = 0.0
total_precision = 0.0
validation_steps = data_set_args.validation_set_size // args.validation_batch_size
for s in range(validation_steps):
tensor_dict_eval = sess.run(tensor_dict)
total_loss += tensor_dict_eval["loss"]
total_accuracy += tensor_dict_eval["accuracy"]
total_recall += tensor_dict_eval["recall"]
total_precision += tensor_dict_eval["precision"]
total_loss /= validation_steps
total_accuracy /= validation_steps
total_recall /= validation_steps
total_precision /= validation_steps
total_f1_score = 2 * total_recall * total_precision / (total_recall + total_precision)
feed_dict = {validation_loss: total_loss, validation_accuracy: total_accuracy, validation_recall: total_recall,
validation_precision: total_precision, validation_f1_score: total_f1_score}
validation_list = [validation_loss_summary, validation_accuracy_summary, validation_recall_summary,
validation_precision_summary, validation_f1_score_summary]
validation_loss_summary_eval, validation_accuracy_summary_eval, validation_recall_summary_eval,\
validation_precision_summary_eval, validation_f1_score_summary_eval = sess.run(validation_list,
feed_dict=feed_dict)
validation_writer.add_summary(validation_loss_summary_eval, global_step=global_step_eval)
validation_writer.add_summary(validation_accuracy_summary_eval, global_step=global_step_eval)
validation_writer.add_summary(validation_recall_summary_eval, global_step=global_step_eval)
validation_writer.add_summary(validation_precision_summary_eval, global_step=global_step_eval)
validation_writer.add_summary(validation_f1_score_summary_eval, global_step=global_step_eval)
tf.logging.info("Validation done.")
sess.run(training_init_op)
"""training"""
training_list = [loss_summary, accuracy_summary, recall_summary, precision_summary,
f1_score_summary, global_step, upd]
training_loss_summary_eval, training_accuracy_summary_eval, training_recall_summary_eval,\
training_precision_summary_eval, training_f1_score_summary_eval, global_step_eval,\
_ = sess.run(training_list)
training_writer.add_summary(training_loss_summary_eval, global_step=global_step_eval)
training_writer.add_summary(training_accuracy_summary_eval, global_step=global_step_eval)
training_writer.add_summary(training_recall_summary_eval, global_step=global_step_eval)
training_writer.add_summary(training_precision_summary_eval, global_step=global_step_eval)
training_writer.add_summary(training_f1_score_summary_eval, global_step=global_step_eval)
"""save model"""
if global_step_eval % save_steps == 0:
if not os.path.exists(args.save_path) or not os.path.isdir(args.save_path):
os.makedirs(args.save_path)
save_model(saver, sess, save_path, global_step_eval)
pbar.update(1)
tf.logging.info("Congratulations!")
if __name__ == "__main__":
main()
| 60.122449
| 127
| 0.676171
|
d82f426c4f0aa7f954a9454d6c282f2fbc18f5d9
| 568
|
py
|
Python
|
depth_prediction/pytorch/encoding/nn/__init__.py
|
ygjwd12345/VISTA-Net
|
7b85ec10540eed03c93abcee1b0b41c3094bd578
|
[
"MIT"
] | 13
|
2021-03-08T07:21:00.000Z
|
2021-12-13T07:52:52.000Z
|
depth_prediction/pytorch/encoding/nn/__init__.py
|
ygjwd12345/VISTA-Net
|
7b85ec10540eed03c93abcee1b0b41c3094bd578
|
[
"MIT"
] | null | null | null |
depth_prediction/pytorch/encoding/nn/__init__.py
|
ygjwd12345/VISTA-Net
|
7b85ec10540eed03c93abcee1b0b41c3094bd578
|
[
"MIT"
] | 2
|
2021-03-31T01:23:56.000Z
|
2021-04-13T15:57:26.000Z
|
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: Hang Zhang
## ECE Department, Rutgers University
## Email: zhang.hang@rutgers.edu
## Copyright (c) 2017
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
"""Encoding NN Modules"""
from .syncbn import *
from .encoding import *
from .customize import *
from .AttentionGraphCondKernel import *
from .multihead import *
| 33.411765
| 75
| 0.547535
|
f2500ef16cbce4712be315a1a9d63401b9a716af
| 574
|
py
|
Python
|
kolibri/plugins/user_auth/views.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 545
|
2016-01-19T19:26:55.000Z
|
2022-03-20T00:13:04.000Z
|
kolibri/plugins/user_auth/views.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 8,329
|
2016-01-19T19:32:02.000Z
|
2022-03-31T21:23:12.000Z
|
kolibri/plugins/user_auth/views.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 493
|
2016-01-19T19:26:48.000Z
|
2022-03-28T14:35:05.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic.base import TemplateView
from kolibri.core.views import RootURLRedirectView
class UserAuthView(TemplateView):
template_name = "user_auth/user_auth.html"
def get(self, request):
"""
When authenticated, redirect to the appropriate view
"""
if request.user.is_authenticated():
return RootURLRedirectView.as_view()(request)
return super(UserAuthView, self).get(request)
| 28.7
| 60
| 0.740418
|
cc8affc88025d82c500917a5cc8139fad12db775
| 3,832
|
py
|
Python
|
tests/test_query.py
|
cosadiz69/pytube
|
501cc62f1e6f23b47abecc634767307d89e48165
|
[
"MIT-0"
] | null | null | null |
tests/test_query.py
|
cosadiz69/pytube
|
501cc62f1e6f23b47abecc634767307d89e48165
|
[
"MIT-0"
] | null | null | null |
tests/test_query.py
|
cosadiz69/pytube
|
501cc62f1e6f23b47abecc634767307d89e48165
|
[
"MIT-0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Unit tests for the :class:`StreamQuery <StreamQuery>` class."""
import pytest
def test_count(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.count` returns an accurate amount."""
assert cipher_signature.streams.count() == 22
@pytest.mark.parametrize(
'test_input,expected', [
({'progressive': True}, ['22', '43', '18', '36', '17']),
({'resolution': '720p'}, ['22', '136', '247']),
({'res': '720p'}, ['22', '136', '247']),
({'fps': 30, 'resolution': '480p'}, ['135', '244']),
({'mime_type': 'audio/mp4'}, ['140']),
({'type': 'audio'}, ['140', '171', '249', '250', '251']),
({'subtype': '3gpp'}, ['36', '17']),
({'abr': '128kbps'}, ['43', '140', '171']),
({'bitrate': '128kbps'}, ['43', '140', '171']),
({'audio_codec': 'vorbis'}, ['43', '171']),
({'video_codec': 'vp9'}, ['248', '247', '244', '243', '242', '278']),
({'only_audio': True}, ['140', '171', '249', '250', '251']),
({'only_video': True, 'video_codec': 'avc1.4d4015'}, ['133']),
({'progressive': True}, ['22', '43', '18', '36', '17']),
({'adaptive': True, 'resolution': '1080p'}, ['137', '248']),
({'custom_filter_functions': [lambda s: s.itag == '22']}, ['22']),
],
)
def test_filters(test_input, expected, cipher_signature):
"""Ensure filters produce the expected results."""
result = [
s.itag for s
in cipher_signature.streams.filter(**test_input).all()
]
assert result == expected
@pytest.mark.parametrize('test_input', ['first', 'last'])
def test_empty(test_input, cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.last` and
:meth:`~pytube.StreamQuery.first` return None if the resultset is
empty.
"""
query = cipher_signature.streams.filter(video_codec='vp20')
fn = getattr(query, test_input)
assert fn() is None
def test_get_last(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.last` returns the expected
:class:`Stream <Stream>`.
"""
assert cipher_signature.streams.last().itag == '251'
def test_get_first(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.first` returns the expected
:class:`Stream <Stream>`.
"""
assert cipher_signature.streams.first().itag == '22'
def test_order_by(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.order_by` sorts the list of
:class:`Stream <Stream>` instances in the expected order.
"""
itags = [
s.itag for s in cipher_signature.streams
.filter(progressive=True)
.order_by('itag')
.all()
]
assert itags == ['17', '18', '22', '36', '43']
def test_order_by_descending(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.desc` sorts the list of
:class:`Stream <Stream>` instances in the reverse order.
"""
itags = [
s.itag for s in cipher_signature.streams
.filter(progressive=True)
.order_by('itag')
.desc()
.all()
]
assert itags == ['43', '36', '22', '18', '17']
def test_order_by_ascending(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.desc` sorts the list of
:class:`Stream <Stream>` instances in ascending order.
"""
itags = [
s.itag for s in cipher_signature.streams
.filter(progressive=True)
.order_by('itag')
.asc()
.all()
]
assert itags == ['17', '18', '22', '36', '43']
def test_get_by_itag(cipher_signature):
"""Ensure :meth:`~pytube.StreamQuery.get_by_itag` returns the expected
:class:`Stream <Stream>`.
"""
assert cipher_signature.streams.get_by_itag(22).itag == '22'
def test_get_by_non_existent_itag(cipher_signature):
assert not cipher_signature.streams.get_by_itag(22983)
| 32.474576
| 78
| 0.591075
|
14957a50105ca0a0c194a4e749e1d3e7b5530ce7
| 5,439
|
py
|
Python
|
AppServer/_python_runtime.py
|
echoi-appscale/appscale
|
bff3d6a9d42b0c2dd58796c4fc6aa1ddd2c00bcc
|
[
"Apache-2.0"
] | 3
|
2016-06-12T01:18:49.000Z
|
2018-07-16T18:20:23.000Z
|
AppServer/_python_runtime.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | null | null | null |
AppServer/_python_runtime.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1
|
2020-05-25T02:59:15.000Z
|
2020-05-25T02:59:15.000Z
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Convenience wrapper for starting an appengine tool."""
import os
import sys
if not hasattr(sys, 'version_info'):
sys.stderr.write('Very old versions of Python are not supported. Please '
'use version 2.7.\n')
sys.exit(1)
version_tuple = tuple(sys.version_info[:2])
if version_tuple < (2, 7):
sys.stderr.write('Error: Python %d.%d is not supported. Please use '
'version 2.7.\n' % version_tuple)
sys.exit(1)
def _get_dir_path(sibling):
"""Get a path to the directory of this script.
By default, the canonical path (symlinks resolved) will be returned. In some
environments the canonical directory is not sufficient because different
parts of the SDK are referenced by symlinks, including this very module's
file. In this case, the non-canonical path to this file's directory will be
returned (i.e., the directory where the symlink lives, not the directory
where it points).
Args:
sibling: Relative path to a sibiling of this module file. Choose a sibling
that is potentially symlinked into the parent directory.
Returns:
A directory name.
Raises:
ValueError: If no proper path could be determined.
"""
py_file = __file__.replace('.pyc', '.py')
dir_paths = [os.path.abspath(os.path.dirname(os.path.realpath(py_file))),
os.path.abspath(os.path.dirname(py_file))]
for dir_path in dir_paths:
sibling_path = os.path.join(dir_path, sibling)
if os.path.exists(sibling_path):
return dir_path
raise ValueError('Could not determine directory that contains both, this '
'file and %s.' % sibling)
_DIR_PATH = _get_dir_path(os.path.join('lib', 'ipaddr'))
_SCRIPT_DIR = os.path.join(_DIR_PATH, 'google', 'appengine', 'tools')
_DEVAPPSERVER2_DIR = os.path.join(
_DIR_PATH, 'google', 'appengine', 'tools', 'devappserver2')
_PHP_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'php')
_PYTHON_RUNTIME_DIR = os.path.join(_DEVAPPSERVER2_DIR, 'python')
_STUB_DEPENDENCIES = [
os.path.join(_DIR_PATH, 'lib', 'antlr3'),
os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
os.path.join(_DIR_PATH, 'lib', 'ipaddr'),
os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
]
EXTRA_PATHS = _STUB_DEPENDENCIES + [
_DIR_PATH,
os.path.join(_DIR_PATH, 'lib', 'simplejson'),
os.path.join(_DIR_PATH, 'lib', 'django-1.4'),
os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
os.path.join(_DIR_PATH, 'lib', 'protorpc'),
os.path.join(_DIR_PATH, 'lib', 'PyAMF-0.6.1'),
os.path.join(_DIR_PATH, 'lib', 'markupsafe-0.15'),
os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.2'),
]
_DEVAPPSERVER2_PATHS = _STUB_DEPENDENCIES + [
_DIR_PATH,
os.path.join(_DIR_PATH, 'lib', 'concurrent'),
os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
os.path.join(_DIR_PATH, 'lib', 'jinja2-2.6'),
os.path.join(_DIR_PATH, 'lib', 'webob-1.2.3'),
os.path.join(_DIR_PATH, 'lib', 'webapp2-2.5.1'),
]
_PHP_RUNTIME_PATHS = [
os.path.join(_DIR_PATH, 'lib', 'concurrent'),
os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
]
_PYTHON_RUNTIME_PATHS = [
_DIR_PATH,
os.path.join(_DIR_PATH, 'lib', 'concurrent'),
os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
os.path.join(_DIR_PATH, 'lib', 'fancy_urllib'),
os.path.join(_DIR_PATH, 'lib', 'protorpc'),
os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
]
_BOOTSTAP_NAME_TO_REAL_NAME = {
'dev_appserver.py': 'devappserver2.py',
'_php_runtime.py': 'runtime.py',
'_python_runtime.py': 'runtime.py',
}
_SCRIPT_TO_DIR = {
'dev_appserver.py': _DEVAPPSERVER2_DIR,
'_php_runtime.py': _PHP_RUNTIME_DIR,
'_python_runtime.py': _PYTHON_RUNTIME_DIR,
}
_SYS_PATH_ADDITIONS = {
'dev_appserver.py': _DEVAPPSERVER2_PATHS,
'_php_runtime.py': _PHP_RUNTIME_PATHS,
'_python_runtime.py': _PYTHON_RUNTIME_PATHS,
}
def fix_sys_path(extra_extra_paths=()):
"""Fix the sys.path to include our extra paths.
fix_sys_path should be called before running testbed-based unit tests so that
third-party modules are correctly added to sys.path.
"""
sys.path[1:1] = EXTRA_PATHS
def _run_file(file_path, globals_, script_dir=_SCRIPT_DIR):
"""Execute the file at the specified path with the passed-in globals."""
script_name = os.path.basename(file_path)
sys.path = _SYS_PATH_ADDITIONS[script_name] + sys.path
if 'google' in sys.modules:
del sys.modules['google']
script_dir = _SCRIPT_TO_DIR.get(script_name, script_dir)
script_name = _BOOTSTAP_NAME_TO_REAL_NAME.get(script_name, script_name)
script_path = os.path.join(script_dir, script_name)
execfile(script_path, globals_)
exit(0)
if __name__ == '__main__':
_run_file(__file__, globals())
| 29.559783
| 79
| 0.691487
|
9ee7968fe3c0186d9fe092c7871739afc7b39c27
| 8,036
|
py
|
Python
|
tests/ode_test.py
|
joshuagornall/jax
|
c97cd0a526c12ad81988fd58c1c66df4ddd71813
|
[
"ECL-2.0",
"Apache-2.0"
] | 14
|
2021-04-24T03:26:39.000Z
|
2022-01-28T14:25:13.000Z
|
tests/ode_test.py
|
joshuagornall/jax
|
c97cd0a526c12ad81988fd58c1c66df4ddd71813
|
[
"ECL-2.0",
"Apache-2.0"
] | 20
|
2021-08-17T20:31:56.000Z
|
2022-03-31T11:56:24.000Z
|
tests/ode_test.py
|
joshuagornall/jax
|
c97cd0a526c12ad81988fd58c1c66df4ddd71813
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-08-04T13:36:09.000Z
|
2020-11-04T02:58:55.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from absl.testing import absltest
import numpy as np
import jax
from jax import test_util as jtu
import jax.numpy as jnp
from jax.experimental.ode import odeint
from jax.tree_util import tree_map
import scipy.integrate as osp_integrate
from jax.config import config
config.parse_flags_with_absl()
class ODETest(jtu.JaxTestCase):
def check_against_scipy(self, fun, y0, tspace, *args, tol=1e-1):
y0, tspace = np.array(y0), np.array(tspace)
np_fun = partial(fun, np)
scipy_result = jnp.asarray(osp_integrate.odeint(np_fun, y0, tspace, args))
y0, tspace = jnp.array(y0), jnp.array(tspace)
jax_fun = partial(fun, jnp)
jax_result = odeint(jax_fun, y0, tspace, *args)
self.assertAllClose(jax_result, scipy_result, check_dtypes=False, atol=tol, rtol=tol)
@jtu.skip_on_devices("tpu")
def test_pend_grads(self):
def pend(_np, y, _, m, g):
theta, omega = y
return [omega, -m * omega - g * _np.sin(theta)]
y0 = [np.pi - 0.1, 0.0]
ts = np.linspace(0., 1., 11)
args = (0.25, 9.8)
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
self.check_against_scipy(pend, y0, ts, *args, tol=tol)
integrate = partial(odeint, partial(pend, jnp))
jtu.check_grads(integrate, (y0, ts, *args), modes=["rev"], order=2,
atol=tol, rtol=tol)
@jtu.skip_on_devices("tpu", "gpu")
def test_pytree_state(self):
"""Test calling odeint with y(t) values that are pytrees."""
def dynamics(y, _t):
return tree_map(jnp.negative, y)
y0 = (np.array(-0.1), np.array([[[0.1]]]))
ts = np.linspace(0., 1., 11)
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
integrate = partial(odeint, dynamics)
jtu.check_grads(integrate, (y0, ts), modes=["rev"], order=2,
atol=tol, rtol=tol)
@jtu.skip_on_devices("tpu")
def test_weird_time_pendulum_grads(self):
"""Test that gradients are correct when the dynamics depend on t."""
def dynamics(_np, y, t):
return _np.array([y[1] * -t, -1 * y[1] - 9.8 * _np.sin(y[0])])
y0 = [np.pi - 0.1, 0.0]
ts = np.linspace(0., 1., 11)
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
self.check_against_scipy(dynamics, y0, ts, tol=tol)
integrate = partial(odeint, partial(dynamics, jnp))
jtu.check_grads(integrate, (y0, ts), modes=["rev"], order=2,
rtol=tol, atol=tol)
@jtu.skip_on_devices("tpu", "gpu")
def test_decay(self):
def decay(_np, y, t, arg1, arg2):
return -_np.sqrt(t) - y + arg1 - _np.mean((y + arg2)**2)
rng = np.random.RandomState(0)
args = (rng.randn(3), rng.randn(3))
y0 = rng.randn(3)
ts = np.linspace(0.1, 0.2, 4)
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
self.check_against_scipy(decay, y0, ts, *args, tol=tol)
integrate = partial(odeint, partial(decay, jnp))
jtu.check_grads(integrate, (y0, ts, *args), modes=["rev"], order=2,
rtol=tol, atol=tol)
@jtu.skip_on_devices("tpu", "gpu")
def test_swoop(self):
def swoop(_np, y, t, arg1, arg2):
return _np.array(y - _np.sin(t) - _np.cos(t) * arg1 + arg2)
ts = np.array([0.1, 0.2])
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
y0 = np.linspace(0.1, 0.9, 10)
args = (0.1, 0.2)
self.check_against_scipy(swoop, y0, ts, *args, tol=tol)
integrate = partial(odeint, partial(swoop, jnp))
jtu.check_grads(integrate, (y0, ts, *args), modes=["rev"], order=2,
rtol=tol, atol=tol)
@jtu.skip_on_devices("tpu", "gpu")
def test_swoop_bigger(self):
def swoop(_np, y, t, arg1, arg2):
return _np.array(y - _np.sin(t) - _np.cos(t) * arg1 + arg2)
ts = np.array([0.1, 0.2])
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
big_y0 = np.linspace(1.1, 10.9, 10)
args = (0.1, 0.3)
self.check_against_scipy(swoop, big_y0, ts, *args, tol=tol)
integrate = partial(odeint, partial(swoop, jnp))
jtu.check_grads(integrate, (big_y0, ts, *args), modes=["rev"], order=2,
rtol=tol, atol=tol)
@jtu.skip_on_devices("tpu", "gpu")
def test_odeint_vmap_grad(self):
# https://github.com/google/jax/issues/2531
def dx_dt(x, *args):
return 0.1 * x
def f(x, y):
y0 = jnp.array([x, y])
t = jnp.array([0., 5.])
y = odeint(dx_dt, y0, t)
return y[-1].sum()
def g(x):
# Two initial values for the ODE
y0_arr = jnp.array([[x, 0.1],
[x, 0.2]])
# Run ODE twice
t = jnp.array([0., 5.])
y = jax.vmap(lambda y0: odeint(dx_dt, y0, t))(y0_arr)
return y[:,-1].sum()
ans = jax.grad(g)(2.) # don't crash
expected = jax.grad(f, 0)(2., 0.1) + jax.grad(f, 0)(2., 0.2)
atol = {jnp.float64: 5e-15}
rtol = {jnp.float64: 2e-15}
self.assertAllClose(ans, expected, check_dtypes=False, atol=atol, rtol=rtol)
@jtu.skip_on_devices("tpu", "gpu")
def test_disable_jit_odeint_with_vmap(self):
# https://github.com/google/jax/issues/2598
with jax.disable_jit():
t = jnp.array([0.0, 1.0])
x0_eval = jnp.zeros((5, 2))
f = lambda x0: odeint(lambda x, _t: x, x0, t)
jax.vmap(f)(x0_eval) # doesn't crash
@jtu.skip_on_devices("tpu", "gpu")
def test_grad_closure(self):
# simplification of https://github.com/google/jax/issues/2718
def experiment(x):
def model(y, t):
return -x * y
history = odeint(model, 1., np.arange(0, 10, 0.1))
return history[-1]
jtu.check_grads(experiment, (0.01,), modes=["rev"], order=1)
@jtu.skip_on_devices("tpu", "gpu")
def test_grad_closure_with_vmap(self):
# https://github.com/google/jax/issues/2718
@jax.jit
def experiment(x):
def model(y, t):
return -x * y
history = odeint(model, 1., np.arange(0, 10, 0.1))
return history[-1]
gradfun = jax.value_and_grad(experiment)
t = np.arange(0., 1., 0.01)
h, g = jax.vmap(gradfun)(t) # doesn't crash
ans = h[11], g[11]
expected_h = experiment(t[11])
expected_g = (experiment(t[11] + 1e-5) - expected_h) / 1e-5
expected = expected_h, expected_g
self.assertAllClose(ans, expected, check_dtypes=False, atol=1e-2, rtol=1e-2)
@jtu.skip_on_devices("tpu", "gpu")
def test_forward_mode_error(self):
# https://github.com/google/jax/issues/3558
def f(k):
return odeint(lambda x, t: k*x, 1., jnp.linspace(0, 1., 50)).sum()
with self.assertRaisesRegex(TypeError, "can't apply forward-mode.*"):
jax.jacfwd(f)(3.)
@jtu.skip_on_devices("tpu", "gpu")
def test_closure_nondiff(self):
# https://github.com/google/jax/issues/3584
def dz_dt(z, t):
return jnp.stack([z[0], z[1]])
def f(z):
y = odeint(dz_dt, z, jnp.arange(10.))
return jnp.sum(y)
jax.grad(f)(jnp.ones(2)) # doesn't crash
@jtu.skip_on_devices("tpu", "gpu")
def test_complex_odeint(self):
# https://github.com/google/jax/issues/3986
def dy_dt(y, t, alpha):
return alpha * y
def f(y0, ts, alpha):
return odeint(dy_dt, y0, ts, alpha).real
alpha = 3 + 4j
y0 = 1 + 2j
ts = jnp.linspace(0., 1., 11)
tol = 1e-1 if jtu.num_float_bits(np.float64) == 32 else 1e-3
jtu.check_grads(f, (y0, ts, alpha), modes=["rev"], order=2, atol=tol, rtol=tol)
if __name__ == '__main__':
absltest.main(testLoader=jtu.JaxTestLoader())
| 31.513725
| 89
| 0.621578
|
90c3a62712c78bdd3a02f4e32897b46d96df8c6b
| 276
|
py
|
Python
|
tests/artificial/transf_Integration/trend_ConstantTrend/cycle_7/ar_/test_artificial_128_Integration_ConstantTrend_7__100.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/artificial/transf_Integration/trend_ConstantTrend/cycle_7/ar_/test_artificial_128_Integration_ConstantTrend_7__100.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/artificial/transf_Integration/trend_ConstantTrend/cycle_7/ar_/test_artificial_128_Integration_ConstantTrend_7__100.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 7, transform = "Integration", sigma = 0.0, exog_count = 100, ar_order = 0);
| 39.428571
| 171
| 0.73913
|
2755eda48119f9d087571943619e9b66904f7715
| 17
|
py
|
Python
|
pyitm/__init__.py
|
tmd224/pyitm
|
2930765eb6cb5c9d0c02b10c64f62ec9f985ce2b
|
[
"MIT"
] | 3
|
2020-05-29T18:02:17.000Z
|
2021-02-07T13:11:08.000Z
|
pyitm/__init__.py
|
tmd224/pyitm
|
2930765eb6cb5c9d0c02b10c64f62ec9f985ce2b
|
[
"MIT"
] | 1
|
2022-03-29T08:57:24.000Z
|
2022-03-29T08:57:24.000Z
|
pyitm/__init__.py
|
tmd224/pyitm
|
2930765eb6cb5c9d0c02b10c64f62ec9f985ce2b
|
[
"MIT"
] | 2
|
2021-08-30T13:20:20.000Z
|
2022-03-29T08:45:04.000Z
|
from . import itm
| 17
| 17
| 0.764706
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.