code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
return [Socket(self._device, i) for i in range(len(self.raw))]
def sockets(self)
Return socket objects of the socket control.
10.465815
7.632806
1.371162
res_payload = res.payload.decode('utf-8') output = res_payload.strip() _LOGGER.debug('Status: %s, Received: %s', res.code, output) if not output: return None if not res.code.is_successful(): if 128 <= res.code < 160: raise ClientError(output) elif 160 <= r...
def _process_output(res, parse_json=True)
Process output.
3.388819
3.289483
1.030198
if self._protocol is None: self._protocol = asyncio.Task(Context.create_client_context( loop=self._loop)) return (await self._protocol)
async def _get_protocol(self)
Get the protocol for the request.
6.853505
5.613828
1.220826
# Be responsible and clean up. protocol = await self._get_protocol() await protocol.shutdown() self._protocol = None # Let any observers know the protocol has been shutdown. for ob_error in self._observations_err_callbacks: ob_error(exc) self....
async def _reset_protocol(self, exc=None)
Reset the protocol if an error occurs.
7.493239
6.505764
1.151785
try: protocol = await self._get_protocol() pr = protocol.request(msg) r = await pr.response return pr, r except ConstructionRenderableError as e: raise ClientError("There was an error with the request.", e) except RequestTimedO...
async def _get_response(self, msg)
Perform the request, get the response.
4.46406
4.236474
1.053721
if api_command.observe: await self._observe(api_command) return method = api_command.method path = api_command.path data = api_command.data parse_json = api_command.parse_json url = api_command.url(self._host) kwargs = {} ...
async def _execute(self, api_command)
Execute the command.
2.635517
2.569653
1.025632
if not isinstance(api_commands, list): result = await self._execute(api_commands) return result commands = (self._execute(api_command) for api_command in api_commands) command_results = await asyncio.gather(*commands, loop=self._loop) return command_res...
async def request(self, api_commands)
Make a request.
3.063554
2.848501
1.075497
duration = api_command.observe_duration url = api_command.url(self._host) err_callback = api_command.err_callback msg = Message(code=Code.GET, uri=url, observe=duration) # Note that this is necessary to start observing pr, r = await self._get_response(msg) ...
async def _observe(self, api_command)
Observe an endpoint.
6.122834
5.83895
1.048619
if not self._psk: PatchedDTLSSecurityStore.IDENTITY = 'Client_identity'.encode( 'utf-8') PatchedDTLSSecurityStore.KEY = security_key.encode('utf-8') command = Gateway().generate_psk(self._psk_id) self._psk = await self.request(command) ...
async def generate_psk(self, security_key)
Generate and set a psk from the security key.
5.081549
4.902538
1.036514
info = self.raw.get(ATTR_MEMBERS, {}) if not info or ROOT_DEVICES2 not in info: return [] return info[ROOT_DEVICES2].get(ATTR_ID, [])
def member_ids(self)
Members of this group.
8.565978
7.263086
1.179385
values = { ATTR_LIGHT_DIMMER: dimmer, } if transition_time is not None: values[ATTR_TRANSITION_TIME] = transition_time return self.set_values(values)
def set_dimmer(self, dimmer, transition_time=None)
Set dimmer value of a group. dimmer: Integer between 0..255 transition_time: Integer representing tenth of a second (default None)
2.604921
3.72335
0.699618
print("Printing information about the Gateway") data = api(gateway.get_gateway_info()).raw print(jsonify(data))
def print_gateway()
Print gateway info as JSON
11.942122
9.537248
1.252156
print("Printing information about all devices paired to the Gateway") if len(devices) == 0: exit(bold("No devices paired")) container = [] for dev in devices: container.append(dev.raw) print(jsonify(container))
def print_all_devices()
Print all devices as JSON
7.62807
6.679534
1.142006
print("Printing information about all lamps paired to the Gateway") lights = [dev for dev in devices if dev.has_light_control] if len(lights) == 0: exit(bold("No lamps paired")) container = [] for l in lights: container.append(l.raw) print(jsonify(container))
def print_lamps()
Print all lamp devices as JSON
6.803607
5.729262
1.187519
print("Printing information about smart tasks") tasks = api(gateway.get_smart_tasks()) if len(tasks) == 0: exit(bold("No smart tasks defined")) container = [] for task in tasks: container.append(api(task).task_control.raw) print(jsonify(container))
def print_smart_tasks()
Print smart tasks as JSON
8.044561
7.155881
1.124189
print("Printing information about all groups defined in the Gateway") groups = api(gateway.get_groups()) if len(groups) == 0: exit(bold("No groups defined")) container = [] for group in groups: container.append(api(group).raw) print(jsonify(container))
def print_groups()
Print all groups as JSON
7.664134
6.626082
1.156661
if not GDAL_AVAILABLE: raise Exception("richdem.LoadGDAL() requires GDAL.") allowed_types = {gdal.GDT_Byte,gdal.GDT_Int16,gdal.GDT_Int32,gdal.GDT_UInt16,gdal.GDT_UInt32,gdal.GDT_Float32,gdal.GDT_Float64} #Read in data src_ds = gdal.Open(filename) srcband = src_ds.GetRasterBand(1) if no_data is ...
def LoadGDAL(filename, no_data=None)
Read a GDAL file. Opens any file GDAL can read, selects the first raster band, and loads it and its metadata into a RichDEM array of the appropriate data type. If you need to do something more complicated, look at the source of this function. Args: filename (str): Name of the ras...
3.412883
3.448624
0.989636
if type(rda) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if not GDAL_AVAILABLE: raise Exception("richdem.SaveGDAL() requires GDAL.") driver = gdal.GetDriverByName('GTiff') data_type = gdal.GDT_Float32 #TODO data_set = driver.Create(filename, xsize=rda.sh...
def SaveGDAL(filename, rda)
Save a GDAL file. Saves a RichDEM array to a data file in GeoTIFF format. If you need to do something more complicated, look at the source of this function. Args: filename (str): Name of the raster file to be created rda (rdarray): Data to save. Returns: ...
2.539926
2.388869
1.063233
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if topology not in ['D8','D4']: raise Exception("Unknown topology!") if not in_place: dem = dem.copy() _AddAnalysis(dem, "FillDepressions(dem, epsilon={0})".format(epsilon)) demw = dem.wrap() ...
def FillDepressions( dem, epsilon = False, in_place = False, topology = 'D8' )
Fills all depressions in a DEM. Args: dem (rdarray): An elevation model epsilon (float): If True, an epsilon gradient is imposed to all flat regions. This ensures that there is always a local gradient. in_place (bool): If True, the DEM is modified in ...
3.370652
3.411494
0.988028
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if topology not in ['D8','D4']: raise Exception("Unknown topology!") if not in_place: dem = dem.copy() _AddAnalysis(dem, "BreachDepressions(dem)") demw = dem.wrap() if topology=='D8': _ric...
def BreachDepressions( dem, in_place = False, topology = 'D8' )
Breaches all depressions in a DEM. Args: dem (rdarray): An elevation model in_place (bool): If True, the DEM is modified in place and there is no return; otherwise, a new, altered DEM is returned. topology (string): ...
4.185639
4.127036
1.0142
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if not in_place: dem = dem.copy() _AddAnalysis(dem, "ResolveFlats(dem, in_place={in_place})".format(in_place=in_place)) demw = dem.wrap() _richdem.rdResolveFlatsEpsilon(demw) dem.copyFromWrapped(d...
def ResolveFlats( dem, in_place = False )
Attempts to resolve flats by imposing a local gradient Args: dem (rdarray): An elevation model in_place (bool): If True, the DEM is modified in place and there is no return; otherwise, a new, altered DEM is returned. Returns: DEM m...
6.772937
6.805242
0.995253
if type(props) is not rd3array: raise Exception("A richdem.rd3array or numpy.ndarray is required!") if weights is not None and in_place: accum = rdarray(weights, no_data=-1) elif weights is not None and not in_place: accum = rdarray(weights, copy=True, meta_obj=props, no_data=-1) elif weig...
def FlowAccumFromProps( props, weights = None, in_place = False )
Calculates flow accumulation from flow proportions. Args: props (rdarray): An elevation model weights (rdarray): Flow accumulation weights to use. This is the amount of flow generated by each cell. If this is not provided, each cell w...
4.655315
4.575643
1.017412
return delimiter.join(filter(lambda s: s != '', map(lambda s: s.lstrip(delimiter), args)))
def _join(*args)
Join S3 bucket args together. Remove empty entries and strip left-leading ``/``
6.395042
5.964279
1.072224
self.bookstore_settings = BookstoreSettings(config=self.config) self.session = aiobotocore.get_session()
def initialize(self)
Initialize a helper to get bookstore settings and session information quickly
8.154492
3.864939
2.109863
self.log.info("Attempt publishing to %s", path) if path == '' or path == '/': raise web.HTTPError(400, "Must provide a path for publishing") model = self.get_json_body() if model: await self._publish(model, path.lstrip('/')) else: ra...
async def put(self, path='')
Publish a notebook on a given path. The payload directly matches the contents API for PUT.
4.032966
3.845405
1.048775
if model['type'] != 'notebook': raise web.HTTPError(400, "bookstore only publishes notebooks") content = model['content'] full_s3_path = s3_path( self.bookstore_settings.s3_bucket, self.bookstore_settings.published_prefix, path ) file_key = s3_ke...
async def _publish(self, model, path)
Publish notebook model to the path
2.327378
2.272197
1.024285
general_settings = [settings.s3_bucket != "", settings.s3_endpoint_url != ""] archive_settings = [settings.workspace_prefix != ""] published_settings = [settings.published_prefix != ""] validation_checks = { "bookstore_valid": all(general_settings), "archive_valid": all(archive_set...
def validate_bookstore(settings: BookstoreSettings)
Validate bookstore configuration settings. Parameters ---------- settings: bookstore.bookstore_config.BookstoreSettings The instantiated `Settings` object to be validated.
4.253714
4.486779
0.948055
self.token = self.nb_record.token first = requests.get(f"{self.url}/login") self.xsrf_token = first.cookies.get("_xsrf", "")
def setup_auth(self)
Sets up token access for authorizing requests to notebook server. This sets the notebook token as self.token and the xsrf_token as self.xsrf_token.
9.505254
6.820478
1.393635
self.req_session = requests.Session() self.req_session.headers.update(self.headers)
def setup_request_sessions(self)
Sets up a requests.Session object for sharing headers across API requests.
3.240341
2.351883
1.377765
async with self.path_lock_ready: lock = self.path_locks.get(record.filepath) if lock is None: lock = Lock() self.path_locks[record.filepath] = lock # Skip writes when a given path is already locked if lock.locked(): s...
async def archive(self, record: ArchiveRecord)
Process a record to write to storage. Acquire a path lock before archive. Writing to storage will only be allowed to a path if a valid `path_lock` is held and the path is not locked by another process. Parameters ---------- record : ArchiveRecord A notebook ...
2.440537
2.300235
1.060995
if model["type"] != "notebook": return content = json.dumps(model["content"]) loop = ioloop.IOLoop.current() # Offload archival and schedule write to storage with the current event loop loop.spawn_callback( self.archive, ArchiveReco...
def run_pre_save_hook(self, model, path, **kwargs)
Send request to store notebook to S3. This hook offloads the storage request to the event loop. When the event loop is available for execution of the request, the storage of the notebook will be done and the write to storage occurs. Parameters ---------- model : str ...
9.357324
7.859406
1.190589
app = Flask(__name__) app.config.from_object(CONFIG[config_name]) BOOTSTRAP.init_app(app) # call controllers from flask_seguro.controllers.main import main as main_blueprint app.register_blueprint(main_blueprint) return app
def create_app(config_name)
Factory Function
3.937856
3.833851
1.027128
params = kwargs or {} params['reference'] = self.reference params['preApprovalCode'] = self.code for i, item in enumerate(self.items, 1): params['itemId%s' % i] = item.get('id') params['itemDescription%s' % i] = item.get('description') para...
def build_pre_approval_payment_params(self, **kwargs)
build a dict with params
2.314905
2.279108
1.015706
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def get(self, url)
do a get transaction
5.901121
6.431768
0.917496
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def post(self, url)
do a post request
5.245187
5.878664
0.892241
self.data['currency'] = self.config.CURRENCY self.build_checkout_params(**kwargs) if transparent: response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL) else: response = self.post(url=self.config.CHECKOUT_URL) return PagSeguroCheckoutRespo...
def checkout(self, transparent=False, **kwargs)
create a pagseguro checkout
3.290725
2.941825
1.1186
response = self.get(url=self.config.NOTIFICATION_URL % code) return PagSeguroNotificationResponse(response.content, self.config)
def check_notification(self, code)
check a notification by its code
7.494095
6.937065
1.080298
response = self.get( url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code) return PagSeguroPreApprovalNotificationResponse( response.content, self.config)
def check_pre_approval_notification(self, code)
check a notification by its code
5.656956
5.128829
1.102972
self.build_pre_approval_payment_params(**kwargs) response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL) return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_ask_payment(self, **kwargs)
ask form a subscribe payment
4.806258
4.521984
1.062865
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code) return PagSeguroPreApprovalCancel(response.content, self.config)
def pre_approval_cancel(self, code)
cancel a subscribe
5.0931
5.329726
0.955603
response = self.get(url=self.config.TRANSACTION_URL % code) return PagSeguroNotificationResponse(response.content, self.config)
def check_transaction(self, code)
check a transaction by its code
8.704833
7.641752
1.139115
last_page = False results = [] while last_page is False: search_result = self._consume_query_transactions( initial_date, final_date, page, max_results) results.extend(search_result.transactions) if search_result.current_page is None or...
def query_transactions(self, initial_date, final_date, page=None, max_results=None)
query transaction by date range
2.192976
2.248615
0.975257
last_page = False results = [] while last_page is False: search_result = self._consume_query_pre_approvals( initial_date, final_date, page, max_results) results.extend(search_result.pre_approvals) if search_result.current_page is None ...
def query_pre_approvals(self, initial_date, final_date, page=None, max_results=None)
query pre-approvals by date range
2.213883
2.262547
0.978491
cart = Cart(session['cart']) if cart.change_item(item_id, 'add'): session['cart'] = cart.to_dict() return list_products()
def add_to_cart(item_id)
Cart with Product
4.540049
5.105798
0.889195
return { "total": self.total, "subtotal": self.subtotal, "items": self.items, "extra_amount": self.extra_amount }
def to_dict(self)
Attribute values to dict
4.710343
3.934988
1.197041
product = Products().get_one(item_id) if product: if operation == 'add': self.items.append(product) elif operation == 'remove': cart_p = [x for x in self.items if x['id'] == product['id']] self.items.remove(cart_p[0]) ...
def change_item(self, item_id, operation)
Remove items in cart
2.908866
2.609492
1.114725
subtotal = float(0) total = float(0) for product in self.items: subtotal += float(product["price"]) if subtotal > 0: total = subtotal + self.extra_amount self.subtotal = subtotal self.total = total
def update(self)
Remove items in cart
3.940155
3.22341
1.222356
u = urlparse(url) if u.netloc.find('@') > -1 and (u.scheme == 'bolt' or u.scheme == 'bolt+routing'): credentials, hostname = u.netloc.rsplit('@', 1) username, password, = credentials.split(':') else: raise ValueError("Expecting url format: bolt://use...
def set_connection(self, url)
Sets the connection URL to the address a Neo4j server is set up at
3.225122
3.111593
1.036486
if self._active_transaction: raise SystemError("Transaction in progress") self._active_transaction = self.driver.session(access_mode=access_mode).begin_transaction()
def begin(self, access_mode=None)
Begins a new transaction, raises SystemError exception if a transaction is in progress
4.923153
3.532456
1.393691
# Object resolution occurs in-place for a_result_item in enumerate(result_list): for a_result_attribute in enumerate(a_result_item[1]): try: # Primitive types should remain primitive types, # Node...
def _object_resolution(self, result_list)
Performs in place automatic object resolution on a set of results returned by cypher_query. The function operates recursively in order to be able to resolve Nodes within nested list structures. Not meant to be called directly, used primarily by cypher_query. :param resu...
5.030807
4.618991
1.089157
if self._pid != os.getpid(): self.set_connection(self.url) if self._active_transaction: session = self._active_transaction else: session = self.driver.session() try: # Retrieve the data start = time.time() ...
def cypher_query(self, query, params=None, handle_unique=True, retry_on_session_expire=False, resolve_objects=False)
Runs a query on the database and returns a list of results and their headers. :param query: A CYPHER query :type: str :param params: Dictionary of parameters :type: dict :param handle_unique: Whether or not to raise UniqueProperty exception on Cypher's ConstraintValidati...
2.904351
2.838932
1.023044
if direction == OUTGOING: stmt = '-{0}->' elif direction == INCOMING: stmt = '<-{0}-' else: stmt = '-{0}-' rel_props = '' if relation_properties: rel_props = ' {{{0}}}'.format(', '.join( ['{0}: {1}'.format(key, value) for key, value in relation_pro...
def _rel_helper(lhs, rhs, ident=None, relation_type=None, direction=None, relation_properties=None, **kwargs)
Generate a relationship matching string, with specified parameters. Examples: relation_direction = OUTGOING: (lhs)-[relation_ident:relation_type]->(rhs) relation_direction = INCOMING: (lhs)<-[relation_ident:relation_type]-(rhs) relation_direction = EITHER: (lhs)-[relation_ident:relation_type]-(rhs) ...
3.511023
3.252255
1.079566
rels = cls.defined_properties(rels=True, aliases=False, properties=False) for key, value in rels.items(): if hasattr(node_set, key): raise ValueError("Can't install traversal '{0}' exists on NodeSet".format(key)) rel = getattr(cls, key) rel._lookup_node_class() ...
def install_traversals(cls, node_set)
For a StructuredNode class install Traversal objects for each relationship definition on a NodeSet instance
5.157004
4.758096
1.083838
output = {} for key, value in kwargs.items(): if '__' in key: prop, operator = key.rsplit('__') operator = OPERATOR_TABLE[operator] else: prop = key operator = '=' if prop not in cls.defined_properties(rels=False): raise...
def process_filter_args(cls, kwargs)
loop through properties in filter parameters check they match class definition deflate them and convert into something easy to generate cypher from
2.718337
2.600133
1.04546
rel_definitions = cls.defined_properties(properties=False, rels=True, aliases=False) match, dont_match = {}, {} for key, value in kwargs.items(): if key not in rel_definitions: raise ValueError("No such relation {0} defined on a {1}".format(key, cls.__name__)) rhs_ident =...
def process_has_args(cls, kwargs)
loop through has parameters check they correspond to class rels defined
4.587238
4.267045
1.075039
# build source rhs_label = ':' + traversal.target_class.__label__ # build source lhs_ident = self.build_source(traversal.source) rhs_ident = traversal.name + rhs_label self._ast['return'] = traversal.name self._ast['result_class'] = traversal.target_clas...
def build_traversal(self, traversal)
traverse a relationship from a node to a set of nodes
6.117619
5.988289
1.021597
ident_w_label = ident + ':' + cls.__label__ self._ast['match'].append('({0})'.format(ident_w_label)) self._ast['return'] = ident self._ast['result_class'] = cls return ident
def build_label(self, ident, cls)
match nodes by a label
7.125247
6.215385
1.146389
source_ident = ident for key, value in node_set.must_match.items(): if isinstance(value, dict): label = ':' + value['node_class'].__label__ stmt = _rel_helper(lhs=source_ident, rhs=label, ident='', **value) self._ast['where'].append(s...
def build_additional_match(self, ident, node_set)
handle additional matches supplied by 'has()' calls
3.601422
3.461776
1.040339
if q_filters is not None: stmts = self._parse_q_filters(ident, q_filters, source_class) if stmts: self._ast['where'].append(stmts) else: stmts = [] for row in filters: negate = False # pre-process N...
def build_where_stmt(self, ident, filters, q_filters=None, source_class=None)
construct a where statement from some filters
3.060202
3.006049
1.018015
return self.query_cls(self).build_ast()._execute(lazy)
def all(self, lazy=False)
Return all nodes belonging to the set :param lazy: False by default, specify True to get nodes with id only without the parameters. :return: list of nodes :rtype: list
28.96841
42.823608
0.676459
result = self._get(limit=2, lazy=lazy, **kwargs) if len(result) > 1: raise MultipleNodesReturned(repr(kwargs)) elif not result: raise self.source_class.DoesNotExist(repr(kwargs)) else: return result[0]
def get(self, lazy=False, **kwargs)
Retrieve one node from the set matching supplied parameters :param lazy: False by default, specify True to get nodes with id only without the parameters. :param kwargs: same syntax as `filter()` :return: node
4.247032
4.522357
0.939119
result = result = self._get(limit=1, **kwargs) if result: return result[0] else: raise self.source_class.DoesNotExist(repr(kwargs))
def first(self, **kwargs)
Retrieve the first node from the set matching supplied parameters :param kwargs: same syntax as `filter()` :return: node
6.113148
7.581825
0.80629
if args or kwargs: self.q_filters = Q(self.q_filters & Q(*args, **kwargs)) return self
def filter(self, *args, **kwargs)
Apply filters to the existing nodes in the set. :param kwargs: filter parameters Filters mimic Django's syntax with the double '__' to separate field and operators. e.g `.filter(salary__gt=20000)` results in `salary > 20000`. The following operators are available: ...
5.534263
9.564728
0.578612
if args or kwargs: self.q_filters = Q(self.q_filters & ~Q(*args, **kwargs)) return self
def exclude(self, *args, **kwargs)
Exclude nodes from the NodeSet via filters. :param kwargs: filter parameters see syntax for the filter method :return: self
5.914576
9.257101
0.638923
should_remove = len(props) == 1 and props[0] is None if not hasattr(self, '_order_by') or should_remove: self._order_by = [] if should_remove: return self if '?' in props: self._order_by.append('?') else: for prop i...
def order_by(self, *props)
Order by properties. Prepend with minus to do descending. Pass None to remove ordering.
2.699689
2.587935
1.043183
if kwargs: if self.definition.get('model') is None: raise ValueError("match() with filter only available on relationships with a model") output = process_filter_args(self.definition['model'], kwargs) if output: self.filters.append(outp...
def match(self, **kwargs)
Traverse relationships with properties matching the given parameters. e.g: `.match(price__lt=10)` :param kwargs: see `NodeSet.filter()` for syntax :return: self
7.350298
7.208261
1.019705
if not isinstance(value,neo4j.types.spatial.Point): raise TypeError('Invalid datatype to inflate. Expected POINT datatype, received {}'.format(type(value))) try: value_point_crs = SRID_TO_CRS[value.srid] except KeyError: raise ValueError('Invalid SRI...
def inflate(self, value)
Handles the marshalling from Neo4J POINT to NeomodelPoint :param value: Value returned from the database :type value: Neo4J POINT :return: NeomodelPoint
2.708978
2.555041
1.060248
if not isinstance(value, NeomodelPoint): raise TypeError('Invalid datatype to deflate. Expected NeomodelPoint, received {}'.format(type(value))) if not value.crs == self._crs: raise ValueError('Invalid CRS. ' 'Expected NeomodelPoint defined ...
def deflate(self, value)
Handles the marshalling from NeomodelPoint to Neo4J POINT :param value: The point that was assigned as value to a property in the model :type value: NeomodelPoint :return: Neo4J POINT
2.62896
2.431615
1.081158
obj = QBase(children, connector, negated) obj.__class__ = cls return obj
def _new_instance(cls, children=None, connector=None, negated=False)
Create a new instance of this class when new Nodes (or subclasses) are needed in the internal code in this class. Normally, it just shadows __init__(). However, subclasses with an __init__ signature that aren't an extension of Node.__init__ might need to implement this method to allow a ...
6.503788
8.148236
0.798184
if data in self.children: return data if not squash: self.children.append(data) return data if self.connector == conn_type: # We can reuse self.children to append or squash the node other. if (isinstance(data, QBase) and not da...
def add(self, data, conn_type, squash=True)
Combine this tree and the data represented by data using the connector conn_type. The combine is done by squashing the node other away if possible. This tree (self) will never be pushed to a child node of the combined tree, nor will the connector or negated properties change. R...
5.855376
5.348523
1.094765
if not issubclass(type(obj), self.definition['node_class']): raise ValueError("Expected node of class " + self.definition['node_class'].__name__) if not hasattr(obj, 'id'): raise ValueError("Can't perform operation on unsaved node " + repr(obj))
def _check_node(self, obj)
check for valid node i.e correct class and is saved
4.218899
3.825861
1.102732
self._check_node(node) if not self.definition['model'] and properties: raise NotImplementedError( "Relationship properties without using a relationship model " "is no longer supported." ) params = {} rel_model = self.defi...
def connect(self, node, properties=None)
Connect a node :param node: :param properties: for the new relationship :type: dict :return:
6.104908
6.01889
1.014291
self.disconnect_all() self.connect(node, properties)
def replace(self, node, properties=None)
Disconnect all existing nodes and connect the supplied node :param node: :param properties: for the new relationship :type: dict :return:
9.705636
6.286666
1.543845
self._check_node(node) my_rel = _rel_helper(lhs='us', rhs='them', ident='r', **self.definition) q = "MATCH " + my_rel + " WHERE id(them)={them} and id(us)={self} RETURN r LIMIT 1" rels = self.source.cypher(q, {'them': node.id})[0] if not rels: return ...
def relationship(self, node)
Retrieve the relationship object for this first relationship between self and node. :param node: :return: StructuredRel
7.922826
7.44643
1.063976
self._check_node(old_node) self._check_node(new_node) if old_node.id == new_node.id: return old_rel = _rel_helper(lhs='us', rhs='old', ident='r', **self.definition) # get list of properties on the existing rel result, meta = self.source.cypher( ...
def reconnect(self, old_node, new_node)
Disconnect old_node and connect new_node copying over any properties on the original relationship. Useful for preventing cardinality violations :param old_node: :param new_node: :return: None
3.601073
3.478624
1.0352
rel = _rel_helper(lhs='a', rhs='b', ident='r', **self.definition) q = "MATCH (a), (b) WHERE id(a)={self} and id(b)={them} " \ "MATCH " + rel + " DELETE r" self.source.cypher(q, {'them': node.id})
def disconnect(self, node)
Disconnect a node :param node: :return:
9.31967
9.863824
0.944833
rhs = 'b:' + self.definition['node_class'].__label__ rel = _rel_helper(lhs='a', rhs=rhs, ident='r', **self.definition) q = 'MATCH (a) WHERE id(a)={self} MATCH ' + rel + ' DELETE r' self.source.cypher(q)
def disconnect_all(self)
Disconnect all nodes :return:
13.735942
14.892576
0.922335
nodes = super(ZeroOrOne, self).all() if len(nodes) == 1: return nodes[0] if len(nodes) > 1: raise CardinalityViolation(self, len(nodes))
def single(self)
Return the associated node. :return: node
4.177344
4.588619
0.910371
if len(self): raise AttemptedCardinalityViolation( "Node already has {0} can't connect more".format(self)) else: return super(ZeroOrOne, self).connect(node, properties)
def connect(self, node, properties=None)
Connect to a node. :param node: :type: StructuredNode :param properties: relationship properties :type: dict :return: True / rel instance
10.015276
11.289262
0.887151
nodes = super(OneOrMore, self).all() if nodes: return nodes[0] raise CardinalityViolation(self, 'none')
def single(self)
Fetch one of the related nodes :return: Node
10.530188
11.125192
0.946517
if super(OneOrMore, self).__len__() < 2: raise AttemptedCardinalityViolation("One or more expected") return super(OneOrMore, self).disconnect(node)
def disconnect(self, node)
Disconnect node :param node: :return:
8.723251
9.291857
0.938806
nodes = super(One, self).all() if nodes: if len(nodes) == 1: return nodes[0] else: raise CardinalityViolation(self, len(nodes)) else: raise CardinalityViolation(self, 'none')
def single(self)
Return the associated node. :return: node
4.148165
4.482632
0.925386
if not hasattr(self.source, 'id'): raise ValueError("Node has not been saved cannot connect!") if len(self): raise AttemptedCardinalityViolation( "Node already has one relationship" ) else: return super(One, self).connect(n...
def connect(self, node, properties=None)
Connect a node :param node: :param properties: relationship properties :return: True / rel instance
10.710651
9.83777
1.088728
results, meta = db.cypher_query("CALL db.constraints()") pattern = re.compile(':(.*) \).*\.(\w*)') for constraint in results: db.cypher_query('DROP ' + constraint[0]) match = pattern.search(constraint[0]) stdout.write(''' - Droping unique constraint and index on label {0} with ...
def drop_constraints(quiet=True, stdout=None)
Discover and drop all constraints. :type: bool :return: None
6.30243
6.467038
0.974547
results, meta = db.cypher_query("CALL db.indexes()") pattern = re.compile(':(.*)\((.*)\)') for index in results: db.cypher_query('DROP ' + index[0]) match = pattern.search(index[0]) stdout.write(' - Dropping index on label {0} with property {1}.\n'.format( match.gro...
def drop_indexes(quiet=True, stdout=None)
Discover and drop all indexes. :type: bool :return: None
4.443593
4.476065
0.992745
if not stdout: stdout = sys.stdout stdout.write("Droping constraints...\n") drop_constraints(quiet=False, stdout=stdout) stdout.write('Droping indexes...\n') drop_indexes(quiet=False, stdout=stdout)
def remove_all_labels(stdout=None)
Calls functions for dropping constraints and indexes. :param stdout: output stream :return: None
4.093357
2.917747
1.402917
if not hasattr(cls, '__label__'): if not quiet: stdout.write(' ! Skipping class {0}.{1} is abstract\n'.format(cls.__module__, cls.__name__)) return for name, property in cls.defined_properties(aliases=False, rels=False).items(): db_property = property.db_property or na...
def install_labels(cls, quiet=True, stdout=None)
Setup labels with indexes and constraints for a given class :param cls: StructuredNode class :type: class :param quiet: (default true) enable standard output :param stdout: stdout stream :type: bool :return: None
2.986118
2.823066
1.057757
if not stdout: stdout = sys.stdout def subsub(kls): # recursively return all subclasses return kls.__subclasses__() + [g for s in kls.__subclasses__() for g in subsub(s)] stdout.write("Setting up indexes and constraints...\n\n") i = 0 for cls in subsub(StructuredNode): ...
def install_all_labels(stdout=None)
Discover all subclasses of StructuredNode in your application and execute install_labels on each. Note: code most be loaded (imported) in order for a class to be discovered. :param stdout: output stream :return: None
3.957535
3.43084
1.153518
query_params = dict(merge_params=merge_params) n_merge = "n:{0} {{{1}}}".format( ":".join(cls.inherited_labels()), ", ".join("{0}: params.create.{0}".format(getattr(cls, p).db_property or p) for p in cls.__required_properties__)) if relationship is None: ...
def _build_merge_query(cls, merge_params, update_existing=False, lazy=False, relationship=None)
Get a tuple of a CYPHER query and a params dict for the specified MERGE query. :param merge_params: The target node match parameters, each node must have a "create" key and optional "update". :type merge_params: list of dict :param update_existing: True to update properties of existing nodes, d...
4.683854
4.495694
1.041853
if 'streaming' in kwargs: warnings.warn('streaming is not supported by bolt, please remove the kwarg', category=DeprecationWarning, stacklevel=1) lazy = kwargs.get('lazy', False) # create mapped query query = "CREATE (n:{0} {{create_params...
def create(cls, *props, **kwargs)
Call to CREATE with parameters map. A new instance will be created and saved. :param props: dict of properties to create the nodes. :type props: tuple :param lazy: False by default, specify True to get nodes with id only without the parameters. :type: bool :rtype: list
5.562369
5.21945
1.0657
lazy = kwargs.get('lazy', False) relationship = kwargs.get('relationship') # build merge query, make sure to update only explicitly specified properties create_or_update_params = [] for specified, deflated in [(p, cls.deflate(p, skip_empty=True)) for p in props]: ...
def create_or_update(cls, *props, **kwargs)
Call to MERGE with parameters map. A new instance will be created and saved if does not already exists, this is an atomic operation. If an instance already exists all optional properties specified will be updated. Note that the post_create hook isn't called after create_or_update :param props:...
5.258245
4.953494
1.061522
self._pre_action_check('cypher') params = params or {} params.update({'self': self.id}) return db.cypher_query(query, params)
def cypher(self, query, params=None)
Execute a cypher query with the param 'self' pre-populated with the nodes neo4j id. :param query: cypher query string :type: string :param params: query parameters :type: dict :return: list containing query results :rtype: list
6.020092
5.284304
1.13924
self._pre_action_check('delete') self.cypher("MATCH (self) WHERE id(self)={self} " "OPTIONAL MATCH (self)-[r]-()" " DELETE r, self") delattr(self, 'id') self.deleted = True return True
def delete(self)
Delete a node and it's relationships :return: True
5.793626
5.521559
1.049274
lazy = kwargs.get('lazy', False) relationship = kwargs.get('relationship') # build merge query get_or_create_params = [{"create": cls.deflate(p, skip_empty=True)} for p in props] query, params = cls._build_merge_query(get_or_create_params, relationship=relationship, laz...
def get_or_create(cls, *props, **kwargs)
Call to MERGE with parameters map. A new instance will be created and saved if does not already exists, this is an atomic operation. Parameters must contain all required properties, any non required properties with defaults will be generated. Note that the post_create hook isn't called after ge...
5.751872
5.386629
1.067805
# support lazy loading if isinstance(node, int): snode = cls() snode.id = node else: node_properties = _get_node_properties(node) props = {} for key, prop in cls.__all_properties__: # map property name from data...
def inflate(cls, node)
Inflate a raw neo4j_driver node to a neomodel node :param node: :return: node object
3.423647
3.362659
1.018137
return [scls.__label__ for scls in cls.mro() if hasattr(scls, '__label__') and not hasattr( scls, '__abstract_node__')]
def inherited_labels(cls)
Return list of labels from nodes class hierarchy. :return: list
5.55596
6.55151
0.848043
self._pre_action_check('refresh') if hasattr(self, 'id'): request = self.cypher("MATCH (n) WHERE id(n)={self}" " RETURN n")[0] if not request or not request[0]: raise self.__class__.DoesNotExist("Can't refresh n...
def refresh(self)
Reload the node from neo4j
5.197277
4.619745
1.125014
# create or update instance node if hasattr(self, 'id'): # update params = self.deflate(self.__properties__, self) query = "MATCH (n) WHERE id(n)={self} \n" query += "\n".join(["SET n.{0} = {{{1}}}".format(key, key) + "\n" ...
def save(self)
Save the node to neo4j or raise an exception :return: the node instance
5.381872
4.973657
1.082076
if self.has_default: if hasattr(self.default, '__call__'): return self.default() else: return self.default else: raise Exception("No default value specified")
def default_value(self)
Generate a default value :return: the value
3.019315
3.262582
0.925437
props = self.deflate(self.__properties__) query = "MATCH ()-[r]->() WHERE id(r)={self} " for key in props: query += " SET r.{0} = {{{1}}}".format(key, key) props['self'] = self.id db.cypher_query(query, props) return self
def save(self)
Save the relationship :return: self
6.509632
6.076499
1.07128