before
stringlengths
0
955k
after
stringlengths
0
877k
repo
stringlengths
1
74
type
stringclasses
1 value
def get_conn_leftSibling_ctx(parse_dict, DocID, sent_index, conn_indices): <DeepExtract> C_String = ' '.join([parse_dict[DocID]['sentences'][sent_index]['words'][word_token][0] for word_token in conn_indices]) conn_name = C_String </DeepExtract> parse_tree = parse_dict[DocID]['sentences'][sent_index]['parsetree'].strip() syntax_tree = Syntax_tree(parse_tree) if syntax_tree.tree == None: leftSiblingCtx = 'NONE_TREE' else: leftSibling_node = syntax_tree.get_left_sibling_category_node_by_token_indices(conn_indices) <DeepExtract> if leftSibling_node == None: leftSiblingCtx = 'None' Ctx = [] Ctx.append(leftSibling_node.name) if leftSibling_node.up == None: Ctx.append('NULL') else: Ctx.append(leftSibling_node.up.name) for child in leftSibling_node.get_children(): Ctx.append(child.name) leftSiblingCtx = '-'.join(Ctx) </DeepExtract> conn_leftSiblingCtx = '%s|%s' % (conn_name, leftSiblingCtx) return conn_leftSiblingCtx
def get_conn_leftSibling_ctx(parse_dict, DocID, sent_index, conn_indices): C_String = ' '.join([parse_dict[DocID]['sentences'][sent_index]['words'][word_token][0] for word_token in conn_indices]) conn_name = C_String parse_tree = parse_dict[DocID]['sentences'][sent_index]['parsetree'].strip() syntax_tree = Syntax_tree(parse_tree) if syntax_tree.tree == None: leftSiblingCtx = 'NONE_TREE' else: leftSibling_node = syntax_tree.get_left_sibling_category_node_by_token_indices(conn_indices) if leftSibling_node == None: leftSiblingCtx = 'None' Ctx = [] Ctx.append(leftSibling_node.name) if leftSibling_node.up == None: Ctx.append('NULL') else: Ctx.append(leftSibling_node.up.name) for child in leftSibling_node.get_children(): Ctx.append(child.name) leftSiblingCtx = '-'.join(Ctx) conn_leftSiblingCtx = '%s|%s' % (conn_name, leftSiblingCtx) return conn_leftSiblingCtx
conll2015_discourse
positive
def get_url_params_from_item(self, item): <DeepExtract> param_map = dict(self.url_param_map) </DeepExtract> return self.get_index().get_url_params_from_item(item, param_map)
def get_url_params_from_item(self, item): param_map = dict(self.url_param_map) return self.get_index().get_url_params_from_item(item, param_map)
django-hyperadmin
positive
def test_html_default_encoding(self): body = encode_string('<html><head><title>Ã\x90¢Ã\x90µÃ\x91Â\x81Ã\x91Â\x82</title></head></html>') <DeepExtract> fn = self._get_temporary_file() with open(fn, 'wb') as tmpfile: tmpfile.write(body) from chameleon.template import BaseTemplateFile class DummyTemplateFile(BaseTemplateFile): def cook(self, body): self.body = body template = DummyTemplateFile(fn) template.cook_check() template = template </DeepExtract> self.assertEqual(template.body, body.decode('utf-8'))
def test_html_default_encoding(self): body = encode_string('<html><head><title>Ã\x90¢Ã\x90µÃ\x91Â\x81Ã\x91Â\x82</title></head></html>') fn = self._get_temporary_file() with open(fn, 'wb') as tmpfile: tmpfile.write(body) from chameleon.template import BaseTemplateFile class DummyTemplateFile(BaseTemplateFile): def cook(self, body): self.body = body template = DummyTemplateFile(fn) template.cook_check() template = template self.assertEqual(template.body, body.decode('utf-8'))
chameleon
positive
def test_region_set_multiple_values(self): <DeepExtract> _region_args = {} for cls in reversed(self.__class__.__mro__): if 'region_args' in cls.__dict__: _region_args.update(cls.__dict__['region_args']) _region_args.update(**region_args) _config_args = self.config_args.copy() _config_args.update(config_args) def _store_keys(key): if existing_key_mangler: key = existing_key_mangler(key) self._keys.add(key) reg = key self._region_inst = reg = CacheRegion(**_region_args) existing_key_mangler = self._region_inst.key_mangler self._region_inst.key_mangler = _store_keys self._region_inst._user_defined_key_mangler = _store_keys reg.configure(backend or self.backend, **_config_args) reg = reg </DeepExtract> values = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'} reg.set_multi(values) eq_(values['key1'], reg.get('key1')) eq_(values['key2'], reg.get('key2')) eq_(values['key3'], reg.get('key3'))
def test_region_set_multiple_values(self): _region_args = {} for cls in reversed(self.__class__.__mro__): if 'region_args' in cls.__dict__: _region_args.update(cls.__dict__['region_args']) _region_args.update(**region_args) _config_args = self.config_args.copy() _config_args.update(config_args) def _store_keys(key): if existing_key_mangler: key = existing_key_mangler(key) self._keys.add(key) reg = key self._region_inst = reg = CacheRegion(**_region_args) existing_key_mangler = self._region_inst.key_mangler self._region_inst.key_mangler = _store_keys self._region_inst._user_defined_key_mangler = _store_keys reg.configure(backend or self.backend, **_config_args) reg = reg values = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'} reg.set_multi(values) eq_(values['key1'], reg.get('key1')) eq_(values['key2'], reg.get('key2')) eq_(values['key3'], reg.get('key3'))
dogpile.cache
positive
def replace(self, key, value): """Replace the value of a key-value pair. If a pair with *key* exists, then its value is updated to *value*. If no such pair exists, then nothing is done. If a value was replaced, return the old value. Otherwise return the default value. """ <DeepExtract> node = self._head distance = 0 for i in reversed(range(self.level)): nnode = node[2 + i] while nnode is not self._tail and nnode[0] < key: (nnode, node) = (nnode[2 + i], nnode) distance += 1 if i == 0 else node[-1] self._path[i] = node self._distance[i] = distance (path, _) = (self._path, self._distance) </DeepExtract> node = path[0][2] if node is self._tail or node[0] != key: return self.default (node[1], oldvalue) = (value, node[1]) return oldvalue
def replace(self, key, value): """Replace the value of a key-value pair. If a pair with *key* exists, then its value is updated to *value*. If no such pair exists, then nothing is done. If a value was replaced, return the old value. Otherwise return the default value. """ node = self._head distance = 0 for i in reversed(range(self.level)): nnode = node[2 + i] while nnode is not self._tail and nnode[0] < key: (nnode, node) = (nnode[2 + i], nnode) distance += 1 if i == 0 else node[-1] self._path[i] = node self._distance[i] = distance (path, _) = (self._path, self._distance) node = path[0][2] if node is self._tail or node[0] != key: return self.default (node[1], oldvalue) = (value, node[1]) return oldvalue
bluepass
positive
@combined_distances_ex.capture def latex_table(vals_filtered: ValsFiltered, pretty_models: Mapping[str, common_config.RewardCfg], log_dir: str) -> None: """ Writes tables of data from `vals_filtered`. Args: vals_filtered: Filtered values returned by `filter_values`. pretty_models: A Mapping from short-form ("pretty") labels to reward configurations. A model matching that reward configuration has the associated short label. log_dir: Directory to write table to. """ for (k, v) in vals_filtered.items(): v = vals_filtered[k] path = os.path.join(log_dir, f'{k}.csv') logger.info(f"Writing table to '{path}'") with open(path, 'wb') as f: <DeepExtract> y_reward_cfgs = common_keys(v.values()) experiment_kinds = _get_sorted_experiment_kinds() first_row = '' second_row = '' for (distance, experiments) in experiment_kinds.items(): first_row += ' & & \\multicolumn{' + str(len(experiments)) + '}{c}{' + distance + '}' second_row += ' & & ' + ' & '.join(experiments) rows = [first_row, second_row] for model in y_reward_cfgs: cols = [] label = _pretty_label(model, pretty_models) row = f'{label} & & ' for (distance, experiments) in experiment_kinds.items(): for visitation in experiments: k = (distance, visitation) if k in v: val = v[k].loc[model] if distance == 'rl': multiplier = 1 elif k.endswith('relative'): multiplier = 100 else: multiplier = 1000 val = val * multiplier col = _fixed_width_format(val) try: float(col) col = '\\num{' + col + '}' except ValueError: pass else: col = '---' cols.append(col) cols.append('') row += ' & '.join(cols[:-1]) rows.append(row) rows.append('') table = ' \\\\\n'.join(rows) </DeepExtract> f.write(table.encode())
@combined_distances_ex.capture def latex_table(vals_filtered: ValsFiltered, pretty_models: Mapping[str, common_config.RewardCfg], log_dir: str) -> None: """ Writes tables of data from `vals_filtered`. Args: vals_filtered: Filtered values returned by `filter_values`. pretty_models: A Mapping from short-form ("pretty") labels to reward configurations. A model matching that reward configuration has the associated short label. log_dir: Directory to write table to. """ for (k, v) in vals_filtered.items(): v = vals_filtered[k] path = os.path.join(log_dir, f'{k}.csv') logger.info(f"Writing table to '{path}'") with open(path, 'wb') as f: y_reward_cfgs = common_keys(v.values()) experiment_kinds = _get_sorted_experiment_kinds() first_row = '' second_row = '' for (distance, experiments) in experiment_kinds.items(): first_row += ' & & \\multicolumn{' + str(len(experiments)) + '}{c}{' + distance + '}' second_row += ' & & ' + ' & '.join(experiments) rows = [first_row, second_row] for model in y_reward_cfgs: cols = [] label = _pretty_label(model, pretty_models) row = f'{label} & & ' for (distance, experiments) in experiment_kinds.items(): for visitation in experiments: k = (distance, visitation) if k in v: val = v[k].loc[model] if distance == 'rl': multiplier = 1 elif k.endswith('relative'): multiplier = 100 else: multiplier = 1000 val = val * multiplier col = _fixed_width_format(val) try: float(col) col = '\\num{' + col + '}' except ValueError: pass else: col = '---' cols.append(col) cols.append('') row += ' & '.join(cols[:-1]) rows.append(row) rows.append('') table = ' \\\\\n'.join(rows) f.write(table.encode())
evaluating-rewards
positive
def Line(self, line, eol='\n'): if self.head: if self.headCounter < 10: <DeepExtract> if self.fOut == None or self.console: try: print(line, end=eol) except UnicodeEncodeError: encoding = sys.stdout.encoding print(line.encode(encoding, errors='backslashreplace').decode(encoding), end=eol) if self.fOut != None: self.fOut.write(line + '\n') self.fOut.flush() </DeepExtract> elif self.tail: self.tailQueue = self.tailQueue[-9:] + [[line, eol]] self.headCounter += 1 elif self.tail: self.tailQueue = self.tailQueue[-9:] + [[line, eol]] else: <DeepExtract> if self.fOut == None or self.console: try: print(line, end=eol) except UnicodeEncodeError: encoding = sys.stdout.encoding print(line.encode(encoding, errors='backslashreplace').decode(encoding), end=eol) if self.fOut != None: self.fOut.write(line + '\n') self.fOut.flush() </DeepExtract>
def Line(self, line, eol='\n'): if self.head: if self.headCounter < 10: if self.fOut == None or self.console: try: print(line, end=eol) except UnicodeEncodeError: encoding = sys.stdout.encoding print(line.encode(encoding, errors='backslashreplace').decode(encoding), end=eol) if self.fOut != None: self.fOut.write(line + '\n') self.fOut.flush() elif self.tail: self.tailQueue = self.tailQueue[-9:] + [[line, eol]] self.headCounter += 1 elif self.tail: self.tailQueue = self.tailQueue[-9:] + [[line, eol]] else: if self.fOut == None or self.console: try: print(line, end=eol) except UnicodeEncodeError: encoding = sys.stdout.encoding print(line.encode(encoding, errors='backslashreplace').decode(encoding), end=eol) if self.fOut != None: self.fOut.write(line + '\n') self.fOut.flush() </DeepExtract>
Beta
positive
def test_match_os_any(): rule = textwrap.dedent('\n rule:\n meta:\n name: test rule\n features:\n - or:\n - and:\n - or:\n - os: windows\n - os: linux\n - os: macos\n - string: "Hello world"\n - and:\n - os: any\n - string: "Goodbye world"\n ') r = capa.rules.Rule.from_yaml(rule) <DeepExtract> (features1, matches1) = capa.engine.match([r], {OS(OS_ANY): {1}, String('Hello world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_ANY): {1}, String('Hello world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) </DeepExtract> assert 'test rule' in matches <DeepExtract> (features1, matches1) = capa.engine.match([r], {OS(OS_WINDOWS): {1}, String('Hello world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_WINDOWS): {1}, String('Hello world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) </DeepExtract> assert 'test rule' in matches <DeepExtract> (features1, matches1) = capa.engine.match([r], {OS(OS_ANY): {1}, String('Goodbye world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_ANY): {1}, String('Goodbye world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) </DeepExtract> assert 'test rule' in matches <DeepExtract> (features1, matches1) = capa.engine.match([r], {OS(OS_WINDOWS): {1}, String('Goodbye world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_WINDOWS): {1}, String('Goodbye world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) </DeepExtract> assert 'test rule' in matches
def test_match_os_any(): rule = textwrap.dedent('\n rule:\n meta:\n name: test rule\n features:\n - or:\n - and:\n - or:\n - os: windows\n - os: linux\n - os: macos\n - string: "Hello world"\n - and:\n - os: any\n - string: "Goodbye world"\n ') r = capa.rules.Rule.from_yaml(rule) (features1, matches1) = capa.engine.match([r], {OS(OS_ANY): {1}, String('Hello world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_ANY): {1}, String('Hello world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) assert 'test rule' in matches (features1, matches1) = capa.engine.match([r], {OS(OS_WINDOWS): {1}, String('Hello world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_WINDOWS): {1}, String('Hello world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) assert 'test rule' in matches (features1, matches1) = capa.engine.match([r], {OS(OS_ANY): {1}, String('Goodbye world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_ANY): {1}, String('Goodbye world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) assert 'test rule' in matches (features1, matches1) = capa.engine.match([r], {OS(OS_WINDOWS): {1}, String('Goodbye world'): {1}}, 0) ruleset = capa.rules.RuleSet([r]) (features2, matches2) = ruleset.match(scope, {OS(OS_WINDOWS): {1}, String('Goodbye world'): {1}}, 0) for (feature, locations) in features1.items(): assert feature in features2 assert locations == features2[feature] for (rulename, results) in matches1.items(): assert rulename in matches2 assert len(results) == len(matches2[rulename]) (_, matches) = (features1, matches1) assert 'test rule' in matches
capa
positive
def save(self, *args, **kwargs): <DeepExtract> if self.operator not in self.operator_table: raise TypeError('Incorrect operator "{operator}" for class {cls}'.format(operator=self.operator, cls=self.__class__.__name__)) </DeepExtract> return super(Operator, self).save(*args, **kwargs)
def save(self, *args, **kwargs): if self.operator not in self.operator_table: raise TypeError('Incorrect operator "{operator}" for class {cls}'.format(operator=self.operator, cls=self.__class__.__name__)) return super(Operator, self).save(*args, **kwargs)
django-business-logic
positive
def warmup(self, shape=(480, 640, 3), iteration=5): """Warmup pipeline engine Use all-zero numpy array as input to warmup pipeline engine. Args: meta: Metadata of image data shape: Warmup image shape in (w, h, c) format iteration: How many times to feed in warmup image Returns: N/A """ logger.debug('Warmup shape: {}'.format(shape)) input_data = [np.zeros(shape=shape, dtype=np.uint8)] * iteration <DeepExtract> dl_comp_config = [] try: pipeline_def = self.pipeline_config['pipeline_def'] except KeyError: logger.warning('Invalid pipeline config') pipeline_def = [] for comp_config in pipeline_def: if 'classifier' in comp_config['name'] or 'detector' in comp_config['name']: dl_comp_config.append(comp_config) dl_comp_config = dl_comp_config </DeepExtract> for comp_config in dl_comp_config: t_start = time.time() comp_name = comp_config['name'] inst = self.launcher.pipeline.pipeline[comp_name]['instance'] inst.input_data = input_data inst.main_process() t_duration = time.time() - t_start logger.debug('Warmup {0} costs {1} sec'.format(comp_name, t_duration))
def warmup(self, shape=(480, 640, 3), iteration=5): """Warmup pipeline engine Use all-zero numpy array as input to warmup pipeline engine. Args: meta: Metadata of image data shape: Warmup image shape in (w, h, c) format iteration: How many times to feed in warmup image Returns: N/A """ logger.debug('Warmup shape: {}'.format(shape)) input_data = [np.zeros(shape=shape, dtype=np.uint8)] * iteration dl_comp_config = [] try: pipeline_def = self.pipeline_config['pipeline_def'] except KeyError: logger.warning('Invalid pipeline config') pipeline_def = [] for comp_config in pipeline_def: if 'classifier' in comp_config['name'] or 'detector' in comp_config['name']: dl_comp_config.append(comp_config) dl_comp_config = dl_comp_config for comp_config in dl_comp_config: t_start = time.time() comp_name = comp_config['name'] inst = self.launcher.pipeline.pipeline[comp_name]['instance'] inst.input_data = input_data inst.main_process() t_duration = time.time() - t_start logger.debug('Warmup {0} costs {1} sec'.format(comp_name, t_duration))
BerryNet
positive
def load_data_wiki(batch_size, max_len): """Load the WikiText-2 dataset. Defined in :numref:`subsec_prepare_mlm_data`""" num_workers = d2l.get_dataloader_workers() data_dir = d2l.download_extract('wikitext-2', 'wikitext-2') <DeepExtract> file_name = os.path.join(data_dir, 'wiki.train.tokens') with open(file_name, 'r') as f: lines = f.readlines() paragraphs = [line.strip().lower().split(' . ') for line in lines if len(line.split(' . ')) >= 2] random.shuffle(paragraphs) paragraphs = paragraphs </DeepExtract> train_set = _WikiTextDataset(paragraphs, max_len) train_iter = gluon.data.DataLoader(train_set, batch_size, shuffle=True, num_workers=num_workers) return (train_iter, train_set.vocab)
def load_data_wiki(batch_size, max_len): """Load the WikiText-2 dataset. Defined in :numref:`subsec_prepare_mlm_data`""" num_workers = d2l.get_dataloader_workers() data_dir = d2l.download_extract('wikitext-2', 'wikitext-2') file_name = os.path.join(data_dir, 'wiki.train.tokens') with open(file_name, 'r') as f: lines = f.readlines() paragraphs = [line.strip().lower().split(' . ') for line in lines if len(line.split(' . ')) >= 2] random.shuffle(paragraphs) paragraphs = paragraphs train_set = _WikiTextDataset(paragraphs, max_len) train_iter = gluon.data.DataLoader(train_set, batch_size, shuffle=True, num_workers=num_workers) return (train_iter, train_set.vocab)
d2l-zh
positive
def RemoveUser(self, user): """Remove a Linux user account. Args: user: string, the Linux user account to remove. """ self.logger.info('Removing user %s.', user) if self.remove: command = self.userdel_cmd.format(user=user) try: subprocess.check_call(command.split(' ')) except subprocess.CalledProcessError as e: self.logger.warning('Could not remove user %s. %s.', user, str(e)) else: self.logger.info('Removed user account %s.', user) <DeepExtract> pw_entry = self._GetUser(user) if not pw_entry: return home_dir = pw_entry.pw_dir authorized_keys_file = os.path.join(home_dir, '.ssh', 'authorized_keys') if os.path.exists(authorized_keys_file): try: os.remove(authorized_keys_file) except OSError as e: message = 'Could not remove authorized keys for user %s. %s.' self.logger.warning(message, user, str(e)) </DeepExtract> <DeepExtract> if False: self.logger.info('Adding user %s to the Google sudoers group.', user) command = self.gpasswd_add_cmd.format(user=user, group=self.google_sudoers_group) else: self.logger.info('Removing user %s from the Google sudoers group.', user) command = self.gpasswd_remove_cmd.format(user=user, group=self.google_sudoers_group) try: subprocess.check_call(command.split(' ')) except subprocess.CalledProcessError as e: self.logger.warning('Could not update user %s. %s.', user, str(e)) return False else: self.logger.debug('Removed user %s from the Google sudoers group.', user) return True </DeepExtract>
def RemoveUser(self, user): """Remove a Linux user account. Args: user: string, the Linux user account to remove. """ self.logger.info('Removing user %s.', user) if self.remove: command = self.userdel_cmd.format(user=user) try: subprocess.check_call(command.split(' ')) except subprocess.CalledProcessError as e: self.logger.warning('Could not remove user %s. %s.', user, str(e)) else: self.logger.info('Removed user account %s.', user) pw_entry = self._GetUser(user) if not pw_entry: return home_dir = pw_entry.pw_dir authorized_keys_file = os.path.join(home_dir, '.ssh', 'authorized_keys') if os.path.exists(authorized_keys_file): try: os.remove(authorized_keys_file) except OSError as e: message = 'Could not remove authorized keys for user %s. %s.' self.logger.warning(message, user, str(e)) if False: self.logger.info('Adding user %s to the Google sudoers group.', user) command = self.gpasswd_add_cmd.format(user=user, group=self.google_sudoers_group) else: self.logger.info('Removing user %s from the Google sudoers group.', user) command = self.gpasswd_remove_cmd.format(user=user, group=self.google_sudoers_group) try: subprocess.check_call(command.split(' ')) except subprocess.CalledProcessError as e: self.logger.warning('Could not update user %s. %s.', user, str(e)) return False else: self.logger.debug('Removed user %s from the Google sudoers group.', user) return True </DeepExtract>
compute-image-packages
positive
def __init__(self, conv_body_func, fpn_level_info, P2only=False): super().__init__() self.fpn_level_info = fpn_level_info self.P2only = P2only self.dim_out = fpn_dim = cfg.FPN.DIM <DeepExtract> min_level = LOWEST_BACKBONE_LVL max_level = HIGHEST_BACKBONE_LVL if cfg.FPN.MULTILEVEL_RPN and (not cfg.FPN.MULTILEVEL_ROIS): max_level = cfg.FPN.RPN_MAX_LEVEL min_level = cfg.FPN.RPN_MIN_LEVEL if not cfg.FPN.MULTILEVEL_RPN and cfg.FPN.MULTILEVEL_ROIS: max_level = cfg.FPN.ROI_MAX_LEVEL min_level = cfg.FPN.ROI_MIN_LEVEL if cfg.FPN.MULTILEVEL_RPN and cfg.FPN.MULTILEVEL_ROIS: max_level = max(cfg.FPN.RPN_MAX_LEVEL, cfg.FPN.ROI_MAX_LEVEL) min_level = min(cfg.FPN.RPN_MIN_LEVEL, cfg.FPN.ROI_MIN_LEVEL) (min_level, max_level) = (min_level, max_level) </DeepExtract> self.num_backbone_stages = len(fpn_level_info.blobs) - (min_level - LOWEST_BACKBONE_LVL) fpn_dim_lateral = fpn_level_info.dims self.spatial_scale = [] self.conv_top = nn.Conv2d(fpn_dim_lateral[0], fpn_dim, 1, 1, 0) if cfg.FPN.USE_GN: self.conv_top = nn.Sequential(nn.Conv2d(fpn_dim_lateral[0], fpn_dim, 1, 1, 0, bias=False), nn.GroupNorm(net_utils.get_group_gn(fpn_dim), fpn_dim, eps=cfg.GROUP_NORM.EPSILON)) else: self.conv_top = nn.Conv2d(fpn_dim_lateral[0], fpn_dim, 1, 1, 0) self.topdown_lateral_modules = nn.ModuleList() self.posthoc_modules = nn.ModuleList() for i in range(self.num_backbone_stages - 1): self.topdown_lateral_modules.append(topdown_lateral_module(fpn_dim, fpn_dim_lateral[i + 1])) for i in range(self.num_backbone_stages): if cfg.FPN.USE_GN: self.posthoc_modules.append(nn.Sequential(nn.Conv2d(fpn_dim, fpn_dim, 3, 1, 1, bias=False), nn.GroupNorm(net_utils.get_group_gn(fpn_dim), fpn_dim, eps=cfg.GROUP_NORM.EPSILON))) else: self.posthoc_modules.append(nn.Conv2d(fpn_dim, fpn_dim, 3, 1, 1)) self.spatial_scale.append(fpn_level_info.spatial_scales[i]) if not cfg.FPN.EXTRA_CONV_LEVELS and max_level == HIGHEST_BACKBONE_LVL + 1: self.maxpool_p6 = nn.MaxPool2d(kernel_size=1, stride=2, padding=0) self.spatial_scale.insert(0, self.spatial_scale[0] * 0.5) if cfg.FPN.EXTRA_CONV_LEVELS and max_level > HIGHEST_BACKBONE_LVL: self.extra_pyramid_modules = nn.ModuleList() dim_in = fpn_level_info.dims[0] for i in range(HIGHEST_BACKBONE_LVL + 1, max_level + 1): self.extra_pyramid_modules(nn.Conv2d(dim_in, fpn_dim, 3, 2, 1)) dim_in = fpn_dim self.spatial_scale.insert(0, self.spatial_scale[0] * 0.5) if self.P2only: self.spatial_scale = self.spatial_scale[-1] <DeepExtract> def init_func(m): if isinstance(m, nn.Conv2d): mynn.init.XavierFill(m.weight) if m.bias is not None: init.constant_(m.bias, 0) for child_m in self.children(): if not isinstance(child_m, nn.ModuleList) or not isinstance(child_m[0], topdown_lateral_module): child_m.apply(init_func) </DeepExtract> self.conv_body = conv_body_func()
def __init__(self, conv_body_func, fpn_level_info, P2only=False): super().__init__() self.fpn_level_info = fpn_level_info self.P2only = P2only self.dim_out = fpn_dim = cfg.FPN.DIM min_level = LOWEST_BACKBONE_LVL max_level = HIGHEST_BACKBONE_LVL if cfg.FPN.MULTILEVEL_RPN and (not cfg.FPN.MULTILEVEL_ROIS): max_level = cfg.FPN.RPN_MAX_LEVEL min_level = cfg.FPN.RPN_MIN_LEVEL if not cfg.FPN.MULTILEVEL_RPN and cfg.FPN.MULTILEVEL_ROIS: max_level = cfg.FPN.ROI_MAX_LEVEL min_level = cfg.FPN.ROI_MIN_LEVEL if cfg.FPN.MULTILEVEL_RPN and cfg.FPN.MULTILEVEL_ROIS: max_level = max(cfg.FPN.RPN_MAX_LEVEL, cfg.FPN.ROI_MAX_LEVEL) min_level = min(cfg.FPN.RPN_MIN_LEVEL, cfg.FPN.ROI_MIN_LEVEL) (min_level, max_level) = (min_level, max_level) self.num_backbone_stages = len(fpn_level_info.blobs) - (min_level - LOWEST_BACKBONE_LVL) fpn_dim_lateral = fpn_level_info.dims self.spatial_scale = [] self.conv_top = nn.Conv2d(fpn_dim_lateral[0], fpn_dim, 1, 1, 0) if cfg.FPN.USE_GN: self.conv_top = nn.Sequential(nn.Conv2d(fpn_dim_lateral[0], fpn_dim, 1, 1, 0, bias=False), nn.GroupNorm(net_utils.get_group_gn(fpn_dim), fpn_dim, eps=cfg.GROUP_NORM.EPSILON)) else: self.conv_top = nn.Conv2d(fpn_dim_lateral[0], fpn_dim, 1, 1, 0) self.topdown_lateral_modules = nn.ModuleList() self.posthoc_modules = nn.ModuleList() for i in range(self.num_backbone_stages - 1): self.topdown_lateral_modules.append(topdown_lateral_module(fpn_dim, fpn_dim_lateral[i + 1])) for i in range(self.num_backbone_stages): if cfg.FPN.USE_GN: self.posthoc_modules.append(nn.Sequential(nn.Conv2d(fpn_dim, fpn_dim, 3, 1, 1, bias=False), nn.GroupNorm(net_utils.get_group_gn(fpn_dim), fpn_dim, eps=cfg.GROUP_NORM.EPSILON))) else: self.posthoc_modules.append(nn.Conv2d(fpn_dim, fpn_dim, 3, 1, 1)) self.spatial_scale.append(fpn_level_info.spatial_scales[i]) if not cfg.FPN.EXTRA_CONV_LEVELS and max_level == HIGHEST_BACKBONE_LVL + 1: self.maxpool_p6 = nn.MaxPool2d(kernel_size=1, stride=2, padding=0) self.spatial_scale.insert(0, self.spatial_scale[0] * 0.5) if cfg.FPN.EXTRA_CONV_LEVELS and max_level > HIGHEST_BACKBONE_LVL: self.extra_pyramid_modules = nn.ModuleList() dim_in = fpn_level_info.dims[0] for i in range(HIGHEST_BACKBONE_LVL + 1, max_level + 1): self.extra_pyramid_modules(nn.Conv2d(dim_in, fpn_dim, 3, 2, 1)) dim_in = fpn_dim self.spatial_scale.insert(0, self.spatial_scale[0] * 0.5) if self.P2only: self.spatial_scale = self.spatial_scale[-1] def init_func(m): if isinstance(m, nn.Conv2d): mynn.init.XavierFill(m.weight) if m.bias is not None: init.constant_(m.bias, 0) for child_m in self.children(): if not isinstance(child_m, nn.ModuleList) or not isinstance(child_m[0], topdown_lateral_module): child_m.apply(init_func) self.conv_body = conv_body_func()
Amodal-Instance-Segmentation-through-KINS-Dataset
positive
def sparse(tensor, sparsity, std=0.01): """Fills the 2D input Tensor or Variable as a sparse matrix, where the non-zero elements will be drawn from a normal distribution with mean=0 and std=`std`. Args: tensor: a n-dimension torch.Tensor sparsity: The fraction of elements in each column to be set to zero std: the standard deviation of the normal distribution used to generate the non-zero values Examples: >>> w = torch.Tensor(3, 5) >>> nninit.sparse(w, sparsity=0.1) """ if isinstance(tensor, Variable): <DeepExtract> if isinstance(tensor.data, Variable): sparse(tensor.data.data, sparsity, std=std) return tensor.data else: if tensor.data.ndimension() != 2: raise ValueError('Sparse initialization only supported for 2D inputs') tensor.data.normal_(0, std) (rows, cols) = (tensor.data.size(0), tensor.data.size(1)) num_zeros = int(np.ceil(cols * sparsity)) for col_idx in range(tensor.data.size(1)): row_indices = np.arange(rows) np.random.shuffle(row_indices) zero_indices = row_indices[:num_zeros] tensor.data.numpy()[zero_indices, col_idx] = 0 return tensor.data </DeepExtract> return tensor else: if tensor.ndimension() != 2: raise ValueError('Sparse initialization only supported for 2D inputs') tensor.normal_(0, std) (rows, cols) = (tensor.size(0), tensor.size(1)) num_zeros = int(np.ceil(cols * sparsity)) for col_idx in range(tensor.size(1)): row_indices = np.arange(rows) np.random.shuffle(row_indices) zero_indices = row_indices[:num_zeros] tensor.numpy()[zero_indices, col_idx] = 0 return tensor
def sparse(tensor, sparsity, std=0.01): """Fills the 2D input Tensor or Variable as a sparse matrix, where the non-zero elements will be drawn from a normal distribution with mean=0 and std=`std`. Args: tensor: a n-dimension torch.Tensor sparsity: The fraction of elements in each column to be set to zero std: the standard deviation of the normal distribution used to generate the non-zero values Examples: >>> w = torch.Tensor(3, 5) >>> nninit.sparse(w, sparsity=0.1) """ if isinstance(tensor, Variable): if isinstance(tensor.data, Variable): sparse(tensor.data.data, sparsity, std=std) return tensor.data else: if tensor.data.ndimension() != 2: raise ValueError('Sparse initialization only supported for 2D inputs') tensor.data.normal_(0, std) (rows, cols) = (tensor.data.size(0), tensor.data.size(1)) num_zeros = int(np.ceil(cols * sparsity)) for col_idx in range(tensor.data.size(1)): row_indices = np.arange(rows) np.random.shuffle(row_indices) zero_indices = row_indices[:num_zeros] tensor.data.numpy()[zero_indices, col_idx] = 0 return tensor.data return tensor else: if tensor.ndimension() != 2: raise ValueError('Sparse initialization only supported for 2D inputs') tensor.normal_(0, std) (rows, cols) = (tensor.size(0), tensor.size(1)) num_zeros = int(np.ceil(cols * sparsity)) for col_idx in range(tensor.size(1)): row_indices = np.arange(rows) np.random.shuffle(row_indices) zero_indices = row_indices[:num_zeros] tensor.numpy()[zero_indices, col_idx] = 0 return tensor
Danesfield
positive
def __init__(self, description, version, command_manager, stdin=None, stdout=None, stderr=None, interactive_app_factory=None, deferred_help=False): """Initialize the application. """ self.command_manager = command_manager self.command_manager.add_command('help', help.HelpCommand) self.command_manager.add_command('complete', complete.CompleteCommand) <DeepExtract> try: locale.setlocale(locale.LC_ALL, '') except locale.Error: pass self.stdin = stdin or sys.stdin self.stdout = stdout or sys.stdout self.stderr = stderr or sys.stderr </DeepExtract> self.interactive_app_factory = interactive_app_factory self.deferred_help = deferred_help <DeepExtract> argparse_kwargs = argparse_kwargs or {} parser = _argparse.ArgumentParser(description=description, add_help=False, **argparse_kwargs) parser.add_argument('--version', action='version', version='{0} {1}'.format(App.NAME, version)) verbose_group = parser.add_mutually_exclusive_group() verbose_group.add_argument('-v', '--verbose', action='count', dest='verbose_level', default=self.DEFAULT_VERBOSE_LEVEL, help='Increase verbosity of output. Can be repeated.') verbose_group.add_argument('-q', '--quiet', action='store_const', dest='verbose_level', const=0, help='Suppress output except warnings and errors.') parser.add_argument('--log-file', action='store', default=None, help='Specify a file to log output. Disabled by default.') if self.deferred_help: parser.add_argument('-h', '--help', dest='deferred_help', action='store_true', help='Show help message and exit.') else: parser.add_argument('-h', '--help', action=help.HelpAction, nargs=0, default=self, help='Show help message and exit.') parser.add_argument('--debug', default=False, action='store_true', help='Show tracebacks on errors.') self.parser = parser </DeepExtract> self.interactive_mode = False self.interpreter = None
def __init__(self, description, version, command_manager, stdin=None, stdout=None, stderr=None, interactive_app_factory=None, deferred_help=False): """Initialize the application. """ self.command_manager = command_manager self.command_manager.add_command('help', help.HelpCommand) self.command_manager.add_command('complete', complete.CompleteCommand) try: locale.setlocale(locale.LC_ALL, '') except locale.Error: pass self.stdin = stdin or sys.stdin self.stdout = stdout or sys.stdout self.stderr = stderr or sys.stderr self.interactive_app_factory = interactive_app_factory self.deferred_help = deferred_help argparse_kwargs = argparse_kwargs or {} parser = _argparse.ArgumentParser(description=description, add_help=False, **argparse_kwargs) parser.add_argument('--version', action='version', version='{0} {1}'.format(App.NAME, version)) verbose_group = parser.add_mutually_exclusive_group() verbose_group.add_argument('-v', '--verbose', action='count', dest='verbose_level', default=self.DEFAULT_VERBOSE_LEVEL, help='Increase verbosity of output. Can be repeated.') verbose_group.add_argument('-q', '--quiet', action='store_const', dest='verbose_level', const=0, help='Suppress output except warnings and errors.') parser.add_argument('--log-file', action='store', default=None, help='Specify a file to log output. Disabled by default.') if self.deferred_help: parser.add_argument('-h', '--help', dest='deferred_help', action='store_true', help='Show help message and exit.') else: parser.add_argument('-h', '--help', action=help.HelpAction, nargs=0, default=self, help='Show help message and exit.') parser.add_argument('--debug', default=False, action='store_true', help='Show tracebacks on errors.') self.parser = parser self.interactive_mode = False self.interpreter = None
cliff
positive
def authorized_delete_list(self, object_list, bundle): """ Handles checking of permissions to see if the user has authorization to DELETE this resource. """ try: auth_result = self._meta.authorization.delete_list(object_list, bundle) except Unauthorized as e: <DeepExtract> raise ImmediateHttpResponse(response=http.HttpUnauthorized()) </DeepExtract> return auth_result
def authorized_delete_list(self, object_list, bundle): """ Handles checking of permissions to see if the user has authorization to DELETE this resource. """ try: auth_result = self._meta.authorization.delete_list(object_list, bundle) except Unauthorized as e: raise ImmediateHttpResponse(response=http.HttpUnauthorized()) return auth_result
django-tastypie
positive
def send(self, req, **kwargs): """Return the file specified by the given request @type req: C{PreparedRequest} @todo: Should I bother filling `response.headers` and processing If-Modified-Since and friends using `os.stat`? """ path = os.path.normcase(os.path.normpath(url2pathname(req.path_url))) response = requests.Response() <DeepExtract> if req.method.lower() in ('put', 'delete'): (response.status_code, response.reason) = (501, 'Not Implemented') elif req.method.lower() not in ('get', 'head'): (response.status_code, response.reason) = (405, 'Method Not Allowed') elif os.path.isdir(path): (response.status_code, response.reason) = (400, 'Path Not A File') elif not os.path.isfile(path): (response.status_code, response.reason) = (404, 'File Not Found') elif not os.access(path, os.R_OK): (response.status_code, response.reason) = (403, 'Access Denied') else: (response.status_code, response.reason) = (200, 'OK') </DeepExtract> if response.status_code == 200 and req.method.lower() != 'head': try: response.raw = open(path, 'rb') except (OSError, IOError) as err: response.status_code = 500 response.reason = str(err) if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url response.request = req response.connection = self return response
def send(self, req, **kwargs): """Return the file specified by the given request @type req: C{PreparedRequest} @todo: Should I bother filling `response.headers` and processing If-Modified-Since and friends using `os.stat`? """ path = os.path.normcase(os.path.normpath(url2pathname(req.path_url))) response = requests.Response() if req.method.lower() in ('put', 'delete'): (response.status_code, response.reason) = (501, 'Not Implemented') elif req.method.lower() not in ('get', 'head'): (response.status_code, response.reason) = (405, 'Method Not Allowed') elif os.path.isdir(path): (response.status_code, response.reason) = (400, 'Path Not A File') elif not os.path.isfile(path): (response.status_code, response.reason) = (404, 'File Not Found') elif not os.access(path, os.R_OK): (response.status_code, response.reason) = (403, 'Access Denied') else: (response.status_code, response.reason) = (200, 'OK') if response.status_code == 200 and req.method.lower() != 'head': try: response.raw = open(path, 'rb') except (OSError, IOError) as err: response.status_code = 500 response.reason = str(err) if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url response.request = req response.connection = self return response
eulfedora
positive
def generate_anchors(base_size=16, ratios=[0.5, 1, 2], scales=2 ** np.arange(3, 6)): """ Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window. """ base_anchor = np.array([1, 1, base_size, base_size]) - 1 <DeepExtract> (w, h, x_ctr, y_ctr) = _whctrs(base_anchor) size = w * h size_ratios = size / ratios ws = np.round(np.sqrt(size_ratios)) hs = np.round(ws * ratios) anchors = _mkanchors(ws, hs, x_ctr, y_ctr) ratio_anchors = anchors </DeepExtract> anchors = np.vstack([_scale_enum(ratio_anchors[i, :], scales) for i in xrange(ratio_anchors.shape[0])]) return anchors
def generate_anchors(base_size=16, ratios=[0.5, 1, 2], scales=2 ** np.arange(3, 6)): """ Generate anchor (reference) windows by enumerating aspect ratios X scales wrt a reference (0, 0, 15, 15) window. """ base_anchor = np.array([1, 1, base_size, base_size]) - 1 (w, h, x_ctr, y_ctr) = _whctrs(base_anchor) size = w * h size_ratios = size / ratios ws = np.round(np.sqrt(size_ratios)) hs = np.round(ws * ratios) anchors = _mkanchors(ws, hs, x_ctr, y_ctr) ratio_anchors = anchors anchors = np.vstack([_scale_enum(ratio_anchors[i, :], scales) for i in xrange(ratio_anchors.shape[0])]) return anchors
3d-vehicle-tracking
positive
def generate_item_sim_matrix(self): """Calculate the similarity matrix between items. :return: item_sim_matrix """ item_sim_matrix = np.ones((self.n_items, self.n_items), dtype=np.float32) for item in range(self.n_items): user_sequence = self.binary_user_item.getcol(item).nonzero()[0] <DeepExtract> sparse_sequence = get_sparse_vector(user_sequence, self.n_users) overlap = self.binary_user_item.T.dot(sparse_sequence).toarray().ravel() overlap[overlap != 0] /= np.sqrt(self._users_count_per_item()[overlap != 0]) item_sim_matrix[item] = overlap </DeepExtract> return item_sim_matrix
def generate_item_sim_matrix(self): """Calculate the similarity matrix between items. :return: item_sim_matrix """ item_sim_matrix = np.ones((self.n_items, self.n_items), dtype=np.float32) for item in range(self.n_items): user_sequence = self.binary_user_item.getcol(item).nonzero()[0] sparse_sequence = get_sparse_vector(user_sequence, self.n_users) overlap = self.binary_user_item.T.dot(sparse_sequence).toarray().ravel() overlap[overlap != 0] /= np.sqrt(self._users_count_per_item()[overlap != 0]) item_sim_matrix[item] = overlap return item_sim_matrix
beta-recsys
positive
def convert_date_time(date_time, settings): date_format = get_date_format(settings) time_format = get_time_format(settings) full_format = get_full_format(settings) if date_time[0] == '"': return natural_parser(date_time, settings) if date_time.startswith('wn'): return (get_date_from_week_number(date_time), date_format) date_time_str = str(date_time) if date_time_str.lower() in DATE_FUNCTION_MAP.keys(): return DATE_FUNCTION_MAP[date_time_str.lower()](settings) <DeepExtract> absolute = False if date_time_str.lower().startswith('^'): date_time_str.lower() = date_time_str.lower().lstrip('^') absolute = True for anniversary in settings['anniversaries'].keys(): if date_time_str.lower().lower() == anniversary: anniversary_date_str = settings['anniversaries'][anniversary] anniversary_date = dateutil.parser.parse(anniversary_date_str) if absolute: anniversary_date = anniversary_date else: anniversary_date = get_anniversary(anniversary_date) anniversary_date = None </DeepExtract> if anniversary_date is not None: return (datetime.combine(anniversary_date, datetime.max.time()), date_format) try: time_preprocessor = get_time_preprocessor(settings) if time_preprocessor: date_time_str = time_preprocessor(date_time_str) date_and_time = datetime.strptime(date_time_str.upper(), full_format) return (date_and_time, full_format) except ValueError: try: process_date = datetime.strptime(date_time_str.upper(), date_format) date_and_time = datetime.combine(process_date, datetime.max.time()) return (date_and_time, date_format) except ValueError: try: process_time = datetime.strptime(date_time_str.upper(), time_format).time() date_and_time = datetime.combine(datetime.today(), process_time) return (date_and_time, time_format) except: raise ValueError
def convert_date_time(date_time, settings): date_format = get_date_format(settings) time_format = get_time_format(settings) full_format = get_full_format(settings) if date_time[0] == '"': return natural_parser(date_time, settings) if date_time.startswith('wn'): return (get_date_from_week_number(date_time), date_format) date_time_str = str(date_time) if date_time_str.lower() in DATE_FUNCTION_MAP.keys(): return DATE_FUNCTION_MAP[date_time_str.lower()](settings) absolute = False if date_time_str.lower().startswith('^'): date_time_str.lower() = date_time_str.lower().lstrip('^') absolute = True for anniversary in settings['anniversaries'].keys(): if date_time_str.lower().lower() == anniversary: anniversary_date_str = settings['anniversaries'][anniversary] anniversary_date = dateutil.parser.parse(anniversary_date_str) if absolute: anniversary_date = anniversary_date else: anniversary_date = get_anniversary(anniversary_date) anniversary_date = None if anniversary_date is not None: return (datetime.combine(anniversary_date, datetime.max.time()), date_format) try: time_preprocessor = get_time_preprocessor(settings) if time_preprocessor: date_time_str = time_preprocessor(date_time_str) date_and_time = datetime.strptime(date_time_str.upper(), full_format) return (date_and_time, full_format) except ValueError: try: process_date = datetime.strptime(date_time_str.upper(), date_format) date_and_time = datetime.combine(process_date, datetime.max.time()) return (date_and_time, date_format) except ValueError: try: process_time = datetime.strptime(date_time_str.upper(), time_format).time() date_and_time = datetime.combine(datetime.today(), process_time) return (date_and_time, time_format) except: raise ValueError
Alfred-Workflows-DateCalculator
positive
def gif(self, file_id=None, url=None, title=None, content=None, thumb_url=None, width=None, height=None, duration=None, caption=None, syntax=None, attach=None): """Render an inline gif result""" <DeepExtract> if hasattr('gif', 'id'): 'gif' = 'gif'.id args = {'chat_id': self.id} if 'gif' is not None: args['reply_to_message_id'] = 'gif' if title is not None: _deprecated_message('The extra parameter', '1.0', 'use the attach parameter', -4) args['reply_markup'] = json.dumps(title.serialize()) if attach is not None: if not hasattr(attach, '_serialize_attachment'): raise ValueError('%s is not an attachment' % attach) args['reply_markup'] = json.dumps(attach._serialize_attachment(self)) if not content: args['disable_notification'] = True args = args </DeepExtract> <DeepExtract> result_type = args['type'] if file_id is not None and url is None: args[result_type + '_file_id'] = file_id elif file_id is None and url is not None: args[result_type + '_url'] = url elif file_id is None and url is None: raise TypeError('file_id or URL is missing') else: raise TypeError('Only one among file_id and URL must be passed') args = args </DeepExtract> <DeepExtract> if thumb_url is not None: args['thumb_url'] = thumb_url if None is not None: args['thumb_width'] = None if None is not None: args['thumb_height'] = None args = args </DeepExtract> <DeepExtract> if caption is not None: args['caption'] = caption if syntax is not None: args['parse_mode'] = syntax else: args['parse_mode'] = syntaxes.guess_syntax(caption, syntax) args = args </DeepExtract> if width is not None: args['gif_width'] = width if height is not None: args['gif_height'] = height if duration is not None: args['gif_duration'] = duration return args
def gif(self, file_id=None, url=None, title=None, content=None, thumb_url=None, width=None, height=None, duration=None, caption=None, syntax=None, attach=None): """Render an inline gif result""" if hasattr('gif', 'id'): 'gif' = 'gif'.id args = {'chat_id': self.id} if 'gif' is not None: args['reply_to_message_id'] = 'gif' if title is not None: _deprecated_message('The extra parameter', '1.0', 'use the attach parameter', -4) args['reply_markup'] = json.dumps(title.serialize()) if attach is not None: if not hasattr(attach, '_serialize_attachment'): raise ValueError('%s is not an attachment' % attach) args['reply_markup'] = json.dumps(attach._serialize_attachment(self)) if not content: args['disable_notification'] = True args = args result_type = args['type'] if file_id is not None and url is None: args[result_type + '_file_id'] = file_id elif file_id is None and url is not None: args[result_type + '_url'] = url elif file_id is None and url is None: raise TypeError('file_id or URL is missing') else: raise TypeError('Only one among file_id and URL must be passed') args = args if thumb_url is not None: args['thumb_url'] = thumb_url if None is not None: args['thumb_width'] = None if None is not None: args['thumb_height'] = None args = args if caption is not None: args['caption'] = caption if syntax is not None: args['parse_mode'] = syntax else: args['parse_mode'] = syntaxes.guess_syntax(caption, syntax) args = args if width is not None: args['gif_width'] = width if height is not None: args['gif_height'] = height if duration is not None: args['gif_duration'] = duration return args
botogram
positive
def test_well_known_kusto_endpoints_random_kusto_clusters(): for c in ['https://127.0.0.1', 'https://127.1.2.3', 'https://kustozszokb5yrauyq.westeurope.kusto.windows.net', 'https://kustofrbwrznltavls.centralus.kusto.windows.net', 'https://kusto7j53clqswr4he.germanywestcentral.kusto.windows.net', 'https://rpe2e0422132101fct2.eastus2euap.kusto.windows.net', 'https://kustooq2gdfraeaxtq.westcentralus.kusto.windows.net', 'https://kustoesp3ewo4s5cow.westcentralus.kusto.windows.net', 'https://kustowmd43nx4ihnjs.southeastasia.kusto.windows.net', 'https://createt210723t0601.westus2.kusto.windows.net', 'https://kusto2rkgmaskub3fy.eastus2.kusto.windows.net', 'https://kustou7u32pue4eij4.australiaeast.kusto.windows.net', 'https://kustohme3e2jnolxys.northeurope.kusto.windows.net', 'https://kustoas7cx3achaups.southcentralus.kusto.windows.net', 'https://rpe2e0104160100act.westus2.kusto.windows.net', 'https://kustox5obddk44367y.southcentralus.kusto.windows.net', 'https://kustortnjlydpe5l6u.canadacentral.kusto.windows.net', 'https://kustoz74sj7ikkvftk.southeastasia.kusto.windows.net', 'https://rpe2e1004182350fctf.westus2.kusto.windows.net', 'https://rpe2e1115095448act.westus2.kusto.windows.net', 'https://kustoxenx32x3tuznw.southafricawest.kusto.windows.net', 'https://kustowc3m5jpqtembw.canadacentral.kusto.windows.net', 'https://rpe2e1011182056fctf.westus2.kusto.windows.net', 'https://kusto3ge6xthiafqug.eastus.kusto.windows.net', 'https://teamsauditservice.westus.kusto.windows.net', 'https://kustooubnzekmh4doy.canadacentral.kusto.windows.net', 'https://rpe2e1206081632fct2f.westus2.kusto.windows.net', 'https://stopt402211020t0606.automationtestworkspace402.kusto.azuresynapse.net', 'https://delt402210818t2309.automationtestworkspace402.kusto.azuresynapse.net', 'https://kusto42iuqj4bejjxq.koreacentral.kusto.windows.net', 'https://kusto3rv75hibmg6vu.southeastasia.kusto.windows.net', 'https://kustogmhxb56nqjrje.westus2.kusto.windows.net', 'https://kustozu5wg2p3aw3um.koreasouth.kusto.windows.net', 'https://kustos36f2amn2agwk.australiaeast.kusto.windows.net', 'https://kustop4htq3k676jau.eastus.kusto.windows.net', 'https://kustojdny5lga53cts.southcentralus.kusto.windows.net', 'https://customerportalprodeast.kusto.windows.net', 'https://rpe2e0730231650und.westus2.kusto.windows.net', 'https://kusto7lxdbebadivjw.southeastasia.kusto.windows.net', 'https://alprd2neu000003s.northeurope.kusto.windows.net', 'https://kustontnwqy3eler5g.northeurope.kusto.windows.net', 'https://kustoap2wpozj7qpio.eastus.kusto.windows.net', 'https://kustoajnxslghxlee4.japaneast.kusto.windows.net', 'https://oiprdseau234x.australiasoutheast.kusto.windows.net', 'https://kusto7yevbo7ypsnx4.germanywestcentral.kusto.windows.net', 'https://kustoagph5odbqyquq.westus3.kusto.windows.net', 'https://kustovs2hxo3ftud5e.westeurope.kusto.windows.net', 'https://kustorzuk2dgiwdryc.uksouth.kusto.windows.net', 'https://kustovsb4ogsdniwqk.eastus2.kusto.windows.net', 'https://kusto3g3mpmkm3p3xc.switzerlandnorth.kusto.windows.net', 'https://kusto2e2o7er7ypx2o.westus2.kusto.windows.net', 'https://kustoa3qqlh23yksim.southafricawest.kusto.windows.net', 'https://rpe2evnt11021711comp.rpe2evnt11021711-wksp.kusto.azuresynapse.net', 'https://cdpkustoausas01.australiasoutheast.kusto.windows.net', 'https://testinge16cluster.uksouth.kusto.windows.net', 'https://testkustopoolbs6ond.workspacebs6ond.kusto.azuresynapse.net', 'https://offnodereportingbcdr1.southcentralus.kusto.windows.net', 'https://mhstorage16red.westus.kusto.windows.net', 'https://kusto7kza5q2fmnh2w.northeurope.kusto.windows.net', 'https://tvmquerycanc.centralus.kusto.windows.net', 'https://kustowrcde4olp4zho.eastus.kusto.windows.net', 'https://delt403210910t0727.automationtestworkspace403.kusto.azuresynapse.net', 'https://foprdcq0004.brazilsouth.kusto.windows.net', 'https://rpe2e0827133746fctf.eastus2euap.kusto.windows.net', 'https://kustoz7yrvoaoa2yaa.australiaeast.kusto.windows.net', 'https://rpe2e1203125809und.westus2.kusto.windows.net', 'https://kustoywilbpggrltk4.francecentral.kusto.windows.net', 'https://stopt402210825t0408.automationtestworkspace402.kusto.azuresynapse.net', 'https://kustonryfjo5klvrh4.westeurope.kusto.windows.net', 'https://kustowwqgogzpseg6o.eastus2.kusto.windows.net', 'https://kustor3gjpwqum3olw.canadacentral.kusto.windows.net', 'https://dflskfdslfkdslkdsfldfs.westeurope.kusto.data.microsoft.com', 'https://dflskfdslfkdslkdsfldfs.z7.kusto.fabric.microsoft.com']: <DeepExtract> well_known_kusto_endpoints.validate_trusted_endpoint(c, DEFAULT_PUBLIC_LOGIN_URL) </DeepExtract> cluster_name = c.upper() <DeepExtract> well_known_kusto_endpoints.validate_trusted_endpoint(cluster_name, DEFAULT_PUBLIC_LOGIN_URL) </DeepExtract> special_urls = 'synapse' in c or 'data.microsoft.com' in c or 'fabric.microsoft.com' in c if not special_urls: cluster_name = c.replace('.kusto.', '.kustomfa.') <DeepExtract> well_known_kusto_endpoints.validate_trusted_endpoint(cluster_name, DEFAULT_PUBLIC_LOGIN_URL) </DeepExtract> if not special_urls: cluster_name = c.replace('.kusto.', '.kustodev.') <DeepExtract> well_known_kusto_endpoints.validate_trusted_endpoint(cluster_name, DEFAULT_PUBLIC_LOGIN_URL) </DeepExtract>
def test_well_known_kusto_endpoints_random_kusto_clusters(): for c in ['https://127.0.0.1', 'https://127.1.2.3', 'https://kustozszokb5yrauyq.westeurope.kusto.windows.net', 'https://kustofrbwrznltavls.centralus.kusto.windows.net', 'https://kusto7j53clqswr4he.germanywestcentral.kusto.windows.net', 'https://rpe2e0422132101fct2.eastus2euap.kusto.windows.net', 'https://kustooq2gdfraeaxtq.westcentralus.kusto.windows.net', 'https://kustoesp3ewo4s5cow.westcentralus.kusto.windows.net', 'https://kustowmd43nx4ihnjs.southeastasia.kusto.windows.net', 'https://createt210723t0601.westus2.kusto.windows.net', 'https://kusto2rkgmaskub3fy.eastus2.kusto.windows.net', 'https://kustou7u32pue4eij4.australiaeast.kusto.windows.net', 'https://kustohme3e2jnolxys.northeurope.kusto.windows.net', 'https://kustoas7cx3achaups.southcentralus.kusto.windows.net', 'https://rpe2e0104160100act.westus2.kusto.windows.net', 'https://kustox5obddk44367y.southcentralus.kusto.windows.net', 'https://kustortnjlydpe5l6u.canadacentral.kusto.windows.net', 'https://kustoz74sj7ikkvftk.southeastasia.kusto.windows.net', 'https://rpe2e1004182350fctf.westus2.kusto.windows.net', 'https://rpe2e1115095448act.westus2.kusto.windows.net', 'https://kustoxenx32x3tuznw.southafricawest.kusto.windows.net', 'https://kustowc3m5jpqtembw.canadacentral.kusto.windows.net', 'https://rpe2e1011182056fctf.westus2.kusto.windows.net', 'https://kusto3ge6xthiafqug.eastus.kusto.windows.net', 'https://teamsauditservice.westus.kusto.windows.net', 'https://kustooubnzekmh4doy.canadacentral.kusto.windows.net', 'https://rpe2e1206081632fct2f.westus2.kusto.windows.net', 'https://stopt402211020t0606.automationtestworkspace402.kusto.azuresynapse.net', 'https://delt402210818t2309.automationtestworkspace402.kusto.azuresynapse.net', 'https://kusto42iuqj4bejjxq.koreacentral.kusto.windows.net', 'https://kusto3rv75hibmg6vu.southeastasia.kusto.windows.net', 'https://kustogmhxb56nqjrje.westus2.kusto.windows.net', 'https://kustozu5wg2p3aw3um.koreasouth.kusto.windows.net', 'https://kustos36f2amn2agwk.australiaeast.kusto.windows.net', 'https://kustop4htq3k676jau.eastus.kusto.windows.net', 'https://kustojdny5lga53cts.southcentralus.kusto.windows.net', 'https://customerportalprodeast.kusto.windows.net', 'https://rpe2e0730231650und.westus2.kusto.windows.net', 'https://kusto7lxdbebadivjw.southeastasia.kusto.windows.net', 'https://alprd2neu000003s.northeurope.kusto.windows.net', 'https://kustontnwqy3eler5g.northeurope.kusto.windows.net', 'https://kustoap2wpozj7qpio.eastus.kusto.windows.net', 'https://kustoajnxslghxlee4.japaneast.kusto.windows.net', 'https://oiprdseau234x.australiasoutheast.kusto.windows.net', 'https://kusto7yevbo7ypsnx4.germanywestcentral.kusto.windows.net', 'https://kustoagph5odbqyquq.westus3.kusto.windows.net', 'https://kustovs2hxo3ftud5e.westeurope.kusto.windows.net', 'https://kustorzuk2dgiwdryc.uksouth.kusto.windows.net', 'https://kustovsb4ogsdniwqk.eastus2.kusto.windows.net', 'https://kusto3g3mpmkm3p3xc.switzerlandnorth.kusto.windows.net', 'https://kusto2e2o7er7ypx2o.westus2.kusto.windows.net', 'https://kustoa3qqlh23yksim.southafricawest.kusto.windows.net', 'https://rpe2evnt11021711comp.rpe2evnt11021711-wksp.kusto.azuresynapse.net', 'https://cdpkustoausas01.australiasoutheast.kusto.windows.net', 'https://testinge16cluster.uksouth.kusto.windows.net', 'https://testkustopoolbs6ond.workspacebs6ond.kusto.azuresynapse.net', 'https://offnodereportingbcdr1.southcentralus.kusto.windows.net', 'https://mhstorage16red.westus.kusto.windows.net', 'https://kusto7kza5q2fmnh2w.northeurope.kusto.windows.net', 'https://tvmquerycanc.centralus.kusto.windows.net', 'https://kustowrcde4olp4zho.eastus.kusto.windows.net', 'https://delt403210910t0727.automationtestworkspace403.kusto.azuresynapse.net', 'https://foprdcq0004.brazilsouth.kusto.windows.net', 'https://rpe2e0827133746fctf.eastus2euap.kusto.windows.net', 'https://kustoz7yrvoaoa2yaa.australiaeast.kusto.windows.net', 'https://rpe2e1203125809und.westus2.kusto.windows.net', 'https://kustoywilbpggrltk4.francecentral.kusto.windows.net', 'https://stopt402210825t0408.automationtestworkspace402.kusto.azuresynapse.net', 'https://kustonryfjo5klvrh4.westeurope.kusto.windows.net', 'https://kustowwqgogzpseg6o.eastus2.kusto.windows.net', 'https://kustor3gjpwqum3olw.canadacentral.kusto.windows.net', 'https://dflskfdslfkdslkdsfldfs.westeurope.kusto.data.microsoft.com', 'https://dflskfdslfkdslkdsfldfs.z7.kusto.fabric.microsoft.com']: well_known_kusto_endpoints.validate_trusted_endpoint(c, DEFAULT_PUBLIC_LOGIN_URL) cluster_name = c.upper() well_known_kusto_endpoints.validate_trusted_endpoint(cluster_name, DEFAULT_PUBLIC_LOGIN_URL) special_urls = 'synapse' in c or 'data.microsoft.com' in c or 'fabric.microsoft.com' in c if not special_urls: cluster_name = c.replace('.kusto.', '.kustomfa.') well_known_kusto_endpoints.validate_trusted_endpoint(cluster_name, DEFAULT_PUBLIC_LOGIN_URL) if not special_urls: cluster_name = c.replace('.kusto.', '.kustodev.') well_known_kusto_endpoints.validate_trusted_endpoint(cluster_name, DEFAULT_PUBLIC_LOGIN_URL) </DeepExtract>
azure-kusto-python
positive
def get_feature_state(env, mask=None): <DeepExtract> if isinstance(env.get_curr_handcards(), list) or isinstance(env.get_curr_handcards(), np.ndarray): if len(env.get_curr_handcards()) == 0: curr_cards = [] chars = [card.Card.cards[c - 3] for c in env.get_curr_handcards()] curr_cards = chars else: curr_cards = card.Card.cards[env.get_curr_handcards() - 3] </DeepExtract> (curr_val, curr_round) = env.get_cards_value(card.Card.char2color(curr_cards)) if mask is None: <DeepExtract> mask = np.zeros([len(action_space)]) if not curr_cards: mask = mask for j in range(mask.size): if counter_subset(action_space[j], curr_cards): mask[j] = 1 if not to_char(env.get_last_outcards()): mask[0] = 0 mask = mask if len(to_char(env.get_last_outcards())) > 0: for j in range(1, mask.size): if mask[j] == 1 and (not card.CardGroup.to_cardgroup(action_space[j]).bigger_than(card.CardGroup.to_cardgroup(to_char(env.get_last_outcards())))): mask[j] = 0 mask = mask </DeepExtract> features = np.zeros([len(mask), 9]) features[:, 0] = mask.astype(np.int32) for i in range(mask.shape[0]): m = mask[i] if m: a = action_space[i] if not a: features[i, 1] = 1 continue next_cards = curr_cards.copy() for c in a: next_cards.remove(c) (next_val, next_round) = env.get_cards_value(card.Card.char2color(next_cards)) lose_control = env.will_lose_control(card.Card.char2value_3_17(a) + 3) if lose_control: features[i, 1] = 1 if len(a) >= len(curr_cards): features[i, 2] = 1 if next_val > curr_val: features[i, 3] = 1 if next_round < curr_round: features[i, 4] = 1 cnt = len(a) if cnt > 15: cnt = 15 features[i, 5] = cnt & 8 >> 3 features[i, 6] = cnt & 4 >> 2 features[i, 7] = cnt & 2 >> 1 features[i, 8] = cnt & 1 return features
def get_feature_state(env, mask=None): if isinstance(env.get_curr_handcards(), list) or isinstance(env.get_curr_handcards(), np.ndarray): if len(env.get_curr_handcards()) == 0: curr_cards = [] chars = [card.Card.cards[c - 3] for c in env.get_curr_handcards()] curr_cards = chars else: curr_cards = card.Card.cards[env.get_curr_handcards() - 3] (curr_val, curr_round) = env.get_cards_value(card.Card.char2color(curr_cards)) if mask is None: mask = np.zeros([len(action_space)]) if not curr_cards: mask = mask for j in range(mask.size): if counter_subset(action_space[j], curr_cards): mask[j] = 1 if not to_char(env.get_last_outcards()): mask[0] = 0 mask = mask if len(to_char(env.get_last_outcards())) > 0: for j in range(1, mask.size): if mask[j] == 1 and (not card.CardGroup.to_cardgroup(action_space[j]).bigger_than(card.CardGroup.to_cardgroup(to_char(env.get_last_outcards())))): mask[j] = 0 mask = mask features = np.zeros([len(mask), 9]) features[:, 0] = mask.astype(np.int32) for i in range(mask.shape[0]): m = mask[i] if m: a = action_space[i] if not a: features[i, 1] = 1 continue next_cards = curr_cards.copy() for c in a: next_cards.remove(c) (next_val, next_round) = env.get_cards_value(card.Card.char2color(next_cards)) lose_control = env.will_lose_control(card.Card.char2value_3_17(a) + 3) if lose_control: features[i, 1] = 1 if len(a) >= len(curr_cards): features[i, 2] = 1 if next_val > curr_val: features[i, 3] = 1 if next_round < curr_round: features[i, 4] = 1 cnt = len(a) if cnt > 15: cnt = 15 features[i, 5] = cnt & 8 >> 3 features[i, 6] = cnt & 4 >> 2 features[i, 7] = cnt & 2 >> 1 features[i, 8] = cnt & 1 return features
doudizhu-C
positive
def set_user_details(self): if self.account_client.get_real_name() is not None: self.real_name = self.account_client.get_real_name() <DeepExtract> self.realname_label.set_text(self.real_name) </DeepExtract> if self.account_client.get_face_path() is not None: self.face_image.set_from_path(self.account_client.get_face_path()) self.face_image.show()
def set_user_details(self): if self.account_client.get_real_name() is not None: self.real_name = self.account_client.get_real_name() self.realname_label.set_text(self.real_name) if self.account_client.get_face_path() is not None: self.face_image.set_from_path(self.account_client.get_face_path()) self.face_image.show()
cinnamon-screensaver
positive
def overlay_keypoints(image, predictions): keypoints = predictions.get_field('keypoints') kps = keypoints.keypoints scores = keypoints.get_field('logits') kps = torch.cat((kps[:, :, 0:2], scores[:, :, None]), dim=2).numpy() for region in kps: <DeepExtract> dataset_keypoints = PersonKeypoints.NAMES kp_lines = PersonKeypoints.CONNECTIONS cmap = plt.get_cmap('rainbow') colors = [cmap(i) for i in np.linspace(0, 1, len(kp_lines) + 2)] colors = [(c[2] * 255, c[1] * 255, c[0] * 255) for c in colors] kp_mask = np.copy(image) mid_shoulder = (region.transpose((1, 0))[:2, dataset_keypoints.index('right_shoulder')] + region.transpose((1, 0))[:2, dataset_keypoints.index('left_shoulder')]) / 2.0 sc_mid_shoulder = np.minimum(region.transpose((1, 0))[2, dataset_keypoints.index('right_shoulder')], region.transpose((1, 0))[2, dataset_keypoints.index('left_shoulder')]) mid_hip = (region.transpose((1, 0))[:2, dataset_keypoints.index('right_hip')] + region.transpose((1, 0))[:2, dataset_keypoints.index('left_hip')]) / 2.0 sc_mid_hip = np.minimum(region.transpose((1, 0))[2, dataset_keypoints.index('right_hip')], region.transpose((1, 0))[2, dataset_keypoints.index('left_hip')]) nose_idx = dataset_keypoints.index('nose') if sc_mid_shoulder > kp_thresh and region.transpose((1, 0))[2, nose_idx] > kp_thresh: cv2.line(kp_mask, tuple(mid_shoulder), tuple(region.transpose((1, 0))[:2, nose_idx]), color=colors[len(kp_lines)], thickness=2, lineType=cv2.LINE_AA) if sc_mid_shoulder > kp_thresh and sc_mid_hip > kp_thresh: cv2.line(kp_mask, tuple(mid_shoulder), tuple(mid_hip), color=colors[len(kp_lines) + 1], thickness=2, lineType=cv2.LINE_AA) for l in range(len(kp_lines)): i1 = kp_lines[l][0] i2 = kp_lines[l][1] p1 = (region.transpose((1, 0))[0, i1], region.transpose((1, 0))[1, i1]) p2 = (region.transpose((1, 0))[0, i2], region.transpose((1, 0))[1, i2]) if region.transpose((1, 0))[2, i1] > kp_thresh and region.transpose((1, 0))[2, i2] > kp_thresh: cv2.line(kp_mask, p1, p2, color=colors[l], thickness=2, lineType=cv2.LINE_AA) if region.transpose((1, 0))[2, i1] > kp_thresh: cv2.circle(kp_mask, p1, radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA) if region.transpose((1, 0))[2, i2] > kp_thresh: cv2.circle(kp_mask, p2, radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA) image = cv2.addWeighted(image, 1.0 - alpha, kp_mask, alpha, 0) </DeepExtract> return image
def overlay_keypoints(image, predictions): keypoints = predictions.get_field('keypoints') kps = keypoints.keypoints scores = keypoints.get_field('logits') kps = torch.cat((kps[:, :, 0:2], scores[:, :, None]), dim=2).numpy() for region in kps: dataset_keypoints = PersonKeypoints.NAMES kp_lines = PersonKeypoints.CONNECTIONS cmap = plt.get_cmap('rainbow') colors = [cmap(i) for i in np.linspace(0, 1, len(kp_lines) + 2)] colors = [(c[2] * 255, c[1] * 255, c[0] * 255) for c in colors] kp_mask = np.copy(image) mid_shoulder = (region.transpose((1, 0))[:2, dataset_keypoints.index('right_shoulder')] + region.transpose((1, 0))[:2, dataset_keypoints.index('left_shoulder')]) / 2.0 sc_mid_shoulder = np.minimum(region.transpose((1, 0))[2, dataset_keypoints.index('right_shoulder')], region.transpose((1, 0))[2, dataset_keypoints.index('left_shoulder')]) mid_hip = (region.transpose((1, 0))[:2, dataset_keypoints.index('right_hip')] + region.transpose((1, 0))[:2, dataset_keypoints.index('left_hip')]) / 2.0 sc_mid_hip = np.minimum(region.transpose((1, 0))[2, dataset_keypoints.index('right_hip')], region.transpose((1, 0))[2, dataset_keypoints.index('left_hip')]) nose_idx = dataset_keypoints.index('nose') if sc_mid_shoulder > kp_thresh and region.transpose((1, 0))[2, nose_idx] > kp_thresh: cv2.line(kp_mask, tuple(mid_shoulder), tuple(region.transpose((1, 0))[:2, nose_idx]), color=colors[len(kp_lines)], thickness=2, lineType=cv2.LINE_AA) if sc_mid_shoulder > kp_thresh and sc_mid_hip > kp_thresh: cv2.line(kp_mask, tuple(mid_shoulder), tuple(mid_hip), color=colors[len(kp_lines) + 1], thickness=2, lineType=cv2.LINE_AA) for l in range(len(kp_lines)): i1 = kp_lines[l][0] i2 = kp_lines[l][1] p1 = (region.transpose((1, 0))[0, i1], region.transpose((1, 0))[1, i1]) p2 = (region.transpose((1, 0))[0, i2], region.transpose((1, 0))[1, i2]) if region.transpose((1, 0))[2, i1] > kp_thresh and region.transpose((1, 0))[2, i2] > kp_thresh: cv2.line(kp_mask, p1, p2, color=colors[l], thickness=2, lineType=cv2.LINE_AA) if region.transpose((1, 0))[2, i1] > kp_thresh: cv2.circle(kp_mask, p1, radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA) if region.transpose((1, 0))[2, i2] > kp_thresh: cv2.circle(kp_mask, p2, radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA) image = cv2.addWeighted(image, 1.0 - alpha, kp_mask, alpha, 0) return image
dgmn
positive
def validate(val_loader, model, criterion, args): batch_time = AverageMeter('Time', ':6.3f') losses = AverageMeter('Loss', ':.4e') top1 = AverageMeter('Acc@1', ':6.2f') top5 = AverageMeter('Acc@5', ':6.2f') progress = ProgressMeter(len(val_loader), [batch_time, losses, top1, top5], prefix='Test: ') model.eval() with torch.no_grad(): end = time.time() for (i, (images, target)) in enumerate(val_loader): if args.gpu is not None: images = images.cuda(args.gpu, non_blocking=True) target = target.cuda(args.gpu, non_blocking=True) output = model(images) loss = criterion(output, target) <DeepExtract> with torch.no_grad(): maxk = max((1, 5)) batch_size = target.size(0) (_, pred) = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in (1, 5): correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k.mul_(100.0 / batch_size)) (acc1, acc5) = res </DeepExtract> losses.update(loss.item(), images.size(0)) top1.update(acc1[0], images.size(0)) top5.update(acc5[0], images.size(0)) batch_time.update(time.time() - end) end = time.time() if i % args.print_freq == 0: progress.display(i) print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'.format(top1=top1, top5=top5)) return top1.avg
def validate(val_loader, model, criterion, args): batch_time = AverageMeter('Time', ':6.3f') losses = AverageMeter('Loss', ':.4e') top1 = AverageMeter('Acc@1', ':6.2f') top5 = AverageMeter('Acc@5', ':6.2f') progress = ProgressMeter(len(val_loader), [batch_time, losses, top1, top5], prefix='Test: ') model.eval() with torch.no_grad(): end = time.time() for (i, (images, target)) in enumerate(val_loader): if args.gpu is not None: images = images.cuda(args.gpu, non_blocking=True) target = target.cuda(args.gpu, non_blocking=True) output = model(images) loss = criterion(output, target) with torch.no_grad(): maxk = max((1, 5)) batch_size = target.size(0) (_, pred) = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in (1, 5): correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k.mul_(100.0 / batch_size)) (acc1, acc5) = res losses.update(loss.item(), images.size(0)) top1.update(acc1[0], images.size(0)) top5.update(acc5[0], images.size(0)) batch_time.update(time.time() - end) end = time.time() if i % args.print_freq == 0: progress.display(i) print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'.format(top1=top1, top5=top5)) return top1.avg
CV_LTH_Pre-training
positive
@mock.patch.object(time, 'sleep') def testUploadPassphraseWithRequestError(self, sleep_mock): <DeepExtract> self.c._GetMetadata = mock.Mock(return_value={'foo': 'bar'}) self.c._FetchXsrfToken = mock.Mock(return_value='token') side_effect = [] for code in [403]: if code == httplib.OK: mock_response = mock.Mock() mock_response.code = code side_effect.append(mock_response) else: mock_fp = StringIO.StringIO('Detailed error message for %s.' % code) exc = base_client.urllib2.HTTPError('url', code, httplib.responses[code], {}, mock_fp) side_effect.append(exc) self.c.opener = mock.Mock(spec=urllib2.OpenerDirector) self.c.opener.open.side_effect = side_effect </DeepExtract> with self.assertRaisesRegexp(base_client.RequestError, 'Uploading passphrase failed: HTTP Error 403: Forbidden: Detailed error message for 403.'): self.c.UploadPassphrase('foo', 'bar') sleep_mock.assert_not_called()
@mock.patch.object(time, 'sleep') def testUploadPassphraseWithRequestError(self, sleep_mock): self.c._GetMetadata = mock.Mock(return_value={'foo': 'bar'}) self.c._FetchXsrfToken = mock.Mock(return_value='token') side_effect = [] for code in [403]: if code == httplib.OK: mock_response = mock.Mock() mock_response.code = code side_effect.append(mock_response) else: mock_fp = StringIO.StringIO('Detailed error message for %s.' % code) exc = base_client.urllib2.HTTPError('url', code, httplib.responses[code], {}, mock_fp) side_effect.append(exc) self.c.opener = mock.Mock(spec=urllib2.OpenerDirector) self.c.opener.open.side_effect = side_effect with self.assertRaisesRegexp(base_client.RequestError, 'Uploading passphrase failed: HTTP Error 403: Forbidden: Detailed error message for 403.'): self.c.UploadPassphrase('foo', 'bar') sleep_mock.assert_not_called()
cauliflowervest
positive
def _iterencode_default(self, o, markers=None): <DeepExtract> raise TypeError('%r is not JSON serializable' % (o,)) </DeepExtract> return self._iterencode(newobj, markers)
def _iterencode_default(self, o, markers=None): raise TypeError('%r is not JSON serializable' % (o,)) return self._iterencode(newobj, markers)
delicious2google
positive
def setup_model(args): """Setup model.""" <DeepExtract> print_rank_0('building CPM model ...') model = GPT2Model(num_layers=args.num_layers, vocab_size=args.vocab_size, hidden_size=args.hidden_size, num_attention_heads=args.num_attention_heads, embedding_dropout_prob=args.hidden_dropout, attention_dropout_prob=args.attention_dropout, output_dropout_prob=args.hidden_dropout, max_sequence_length=args.max_position_embeddings, checkpoint_activations=args.checkpoint_activations, checkpoint_num_layers=args.checkpoint_num_layers, parallel_output=args.parallel_output) if mpu.get_data_parallel_rank() == 0: print(' > number of parameters on model parallel rank {}: {}'.format(mpu.get_model_parallel_rank(), sum([p.nelement() for p in model.parameters()])), flush=True) model.cuda(torch.cuda.current_device()) if args.fp16: model = FP16_Module(model) if USE_TORCH_DDP: i = torch.cuda.current_device() model = DDP(model, device_ids=[i], output_device=i, process_group=mpu.get_data_parallel_group()) else: model = DDP(model) model = model </DeepExtract> args.iteration = load_checkpoint_model(model, args) return model
def setup_model(args): """Setup model.""" print_rank_0('building CPM model ...') model = GPT2Model(num_layers=args.num_layers, vocab_size=args.vocab_size, hidden_size=args.hidden_size, num_attention_heads=args.num_attention_heads, embedding_dropout_prob=args.hidden_dropout, attention_dropout_prob=args.attention_dropout, output_dropout_prob=args.hidden_dropout, max_sequence_length=args.max_position_embeddings, checkpoint_activations=args.checkpoint_activations, checkpoint_num_layers=args.checkpoint_num_layers, parallel_output=args.parallel_output) if mpu.get_data_parallel_rank() == 0: print(' > number of parameters on model parallel rank {}: {}'.format(mpu.get_model_parallel_rank(), sum([p.nelement() for p in model.parameters()])), flush=True) model.cuda(torch.cuda.current_device()) if args.fp16: model = FP16_Module(model) if USE_TORCH_DDP: i = torch.cuda.current_device() model = DDP(model, device_ids=[i], output_device=i, process_group=mpu.get_data_parallel_group()) else: model = DDP(model) model = model args.iteration = load_checkpoint_model(model, args) return model
CPM-1-Generate
positive
def OnContextMenu(self, event=None): removed_expand = None allow_rename = True allow_trash = True <DeepExtract> base_selected = wx.TreeCtrl.GetSelections(self) if len(base_selected) == 1 and self.root_item in base_selected: selected = tuple(base_selected) selected = [] for BASE in base_selected: for ITEM in self.item_list: if ITEM.GetBaseItem() == BASE: selected.append(ITEM) selected = tuple(selected) </DeepExtract> if len(selected) > 1: allow_rename = False for ITEM in selected: if ITEM.Type != 'folder': removed_expand = self.ctx_menu.Remove(menuid.EXPAND) break elif isinstance(selected[0], PathItem) and selected[0].Type != 'folder': removed_expand = self.ctx_menu.Remove(menuid.EXPAND) elif selected and isinstance(selected[0], wx.TreeItemId) and (selected[0] == self.root_item): logger.debug('Root item selected') removed_menus = [] menu_ids = [wx.ID_ADD, None, menuid.RENAME] if self.trash: menu_ids.append(wx.ID_DELETE) for MENU_ID in menu_ids: if not MENU_ID: removed_menus.append(None) else: removed_menus.append(self.ctx_menu.Remove(MENU_ID)) expand_label = GT('Collapse') if not self.IsExpanded(self.root_item): expand_label = GT('Expand') self.ctx_menu.SetLabel(menuid.EXPAND, expand_label) self.PopupMenu(self.ctx_menu) for INDEX in range(len(removed_menus)): menu = removed_menus[INDEX] if menu: self.ctx_menu.InsertItem(INDEX, menu) return if selected: if not removed_expand: expand_label = GT('Collapse') for ITEM in selected: if not self.IsExpanded(ITEM): expand_label = GT('Expand') break self.ctx_menu.SetLabel(menuid.EXPAND, expand_label) for ITEM in self.mount_list: if ITEM in selected: allow_rename = False allow_trash = False break self.ctx_menu.Enable(menuid.RENAME, allow_rename) if self.trash: self.ctx_menu.Enable(wx.ID_DELETE, allow_trash) self.PopupMenu(self.ctx_menu) if removed_expand: self.ctx_menu.InsertItem(1, removed_expand) else: logger.debug('No items were selected')
def OnContextMenu(self, event=None): removed_expand = None allow_rename = True allow_trash = True base_selected = wx.TreeCtrl.GetSelections(self) if len(base_selected) == 1 and self.root_item in base_selected: selected = tuple(base_selected) selected = [] for BASE in base_selected: for ITEM in self.item_list: if ITEM.GetBaseItem() == BASE: selected.append(ITEM) selected = tuple(selected) if len(selected) > 1: allow_rename = False for ITEM in selected: if ITEM.Type != 'folder': removed_expand = self.ctx_menu.Remove(menuid.EXPAND) break elif isinstance(selected[0], PathItem) and selected[0].Type != 'folder': removed_expand = self.ctx_menu.Remove(menuid.EXPAND) elif selected and isinstance(selected[0], wx.TreeItemId) and (selected[0] == self.root_item): logger.debug('Root item selected') removed_menus = [] menu_ids = [wx.ID_ADD, None, menuid.RENAME] if self.trash: menu_ids.append(wx.ID_DELETE) for MENU_ID in menu_ids: if not MENU_ID: removed_menus.append(None) else: removed_menus.append(self.ctx_menu.Remove(MENU_ID)) expand_label = GT('Collapse') if not self.IsExpanded(self.root_item): expand_label = GT('Expand') self.ctx_menu.SetLabel(menuid.EXPAND, expand_label) self.PopupMenu(self.ctx_menu) for INDEX in range(len(removed_menus)): menu = removed_menus[INDEX] if menu: self.ctx_menu.InsertItem(INDEX, menu) return if selected: if not removed_expand: expand_label = GT('Collapse') for ITEM in selected: if not self.IsExpanded(ITEM): expand_label = GT('Expand') break self.ctx_menu.SetLabel(menuid.EXPAND, expand_label) for ITEM in self.mount_list: if ITEM in selected: allow_rename = False allow_trash = False break self.ctx_menu.Enable(menuid.RENAME, allow_rename) if self.trash: self.ctx_menu.Enable(wx.ID_DELETE, allow_trash) self.PopupMenu(self.ctx_menu) if removed_expand: self.ctx_menu.InsertItem(1, removed_expand) else: logger.debug('No items were selected')
debreate
positive
def show(self): if self.property('animatable'): <DeepExtract> self._size_anim.start() self._opacity_anim.start() </DeepExtract> self.move(QtGui.QCursor.pos()) super(MPopup, self).show() self.activateWindow()
def show(self): if self.property('animatable'): self._size_anim.start() self._opacity_anim.start() self.move(QtGui.QCursor.pos()) super(MPopup, self).show() self.activateWindow()
dayu_widgets
positive
def edta(s, t, indel_cost=1, replace_cost=lambda a, b: 1): """ EDTA Edit Distance Alignment http://rosalind.info/problems/edta/ Given: Two protein strings s and t (with each string having length at most 1000 aa). Return: The edit distance dE(s,t) followed by two augmented strings s' and t' representing an optimal alignment of s and t. Most of the work is already done by EDIT; we merely need to backtrack through the matrix and to construct the alignment. """ def backtrack(): (m, n) = matrix.shape m -= 1 n -= 1 s1 = [] t1 = [] while m > 0 and n > 0: moves = [(m - 1, n), (m, n - 1), (m - 1, n - 1)] scores = [matrix[m - 1, n] + indel_cost, matrix[m, n - 1] + indel_cost, matrix[m - 1, n - 1] + (0 if s[m - 1] == t[n - 1] else replace_cost(s[m - 1], t[n - 1]))] ss = [s[m - 1], '-', s[m - 1]] ts = ['-', t[n - 1], t[n - 1]] index = np.argmin(scores) (m, n) = moves[index] s1.append(ss[index]) t1.append(ts[index]) s1.reverse() t1.reverse() return (''.join(s1), ''.join(t1)) <DeepExtract> m = len(s) n = len(t) matrix = np.full((m + 1, n + 1), np.nan) matrix[0, :] = list(range(n + 1)) matrix[:, 0] = list(range(m + 1)) for i in range(1, len(s) + 1): for j in range(1, len(t) + 1): matrix[i, j] = min(matrix[i - 1, j] + indel_cost, matrix[i, j - 1] + indel_cost, matrix[i - 1, j - 1] + (0 if s[i - 1] == t[j - 1] else replace_cost(s[i - 1], t[j - 1]))) (dist, matrix) = (matrix[m, n], matrix) </DeepExtract> <DeepExtract> i = len(s) j = len(t) score = matrix[i][j] s1 = [] t1 = [] if showPath: print('Path') print(i, j) while i > 0 and j > 0: (i, j, di, dj) = moves[i, j] if di == 0: s1.append('-') t1.append(t[j]) elif dj == 0: s1.append(s[i]) t1.append('-') else: s1.append(s[i]) t1.append(t[j]) if showPath: print(i, j, di, dj, s1[-1], t1[-1]) (s1, t1) = (score, s1[::-1], t1[::-1]) </DeepExtract> return (dist, s1, t1)
def edta(s, t, indel_cost=1, replace_cost=lambda a, b: 1): """ EDTA Edit Distance Alignment http://rosalind.info/problems/edta/ Given: Two protein strings s and t (with each string having length at most 1000 aa). Return: The edit distance dE(s,t) followed by two augmented strings s' and t' representing an optimal alignment of s and t. Most of the work is already done by EDIT; we merely need to backtrack through the matrix and to construct the alignment. """ def backtrack(): (m, n) = matrix.shape m -= 1 n -= 1 s1 = [] t1 = [] while m > 0 and n > 0: moves = [(m - 1, n), (m, n - 1), (m - 1, n - 1)] scores = [matrix[m - 1, n] + indel_cost, matrix[m, n - 1] + indel_cost, matrix[m - 1, n - 1] + (0 if s[m - 1] == t[n - 1] else replace_cost(s[m - 1], t[n - 1]))] ss = [s[m - 1], '-', s[m - 1]] ts = ['-', t[n - 1], t[n - 1]] index = np.argmin(scores) (m, n) = moves[index] s1.append(ss[index]) t1.append(ts[index]) s1.reverse() t1.reverse() return (''.join(s1), ''.join(t1)) m = len(s) n = len(t) matrix = np.full((m + 1, n + 1), np.nan) matrix[0, :] = list(range(n + 1)) matrix[:, 0] = list(range(m + 1)) for i in range(1, len(s) + 1): for j in range(1, len(t) + 1): matrix[i, j] = min(matrix[i - 1, j] + indel_cost, matrix[i, j - 1] + indel_cost, matrix[i - 1, j - 1] + (0 if s[i - 1] == t[j - 1] else replace_cost(s[i - 1], t[j - 1]))) (dist, matrix) = (matrix[m, n], matrix) i = len(s) j = len(t) score = matrix[i][j] s1 = [] t1 = [] if showPath: print('Path') print(i, j) while i > 0 and j > 0: (i, j, di, dj) = moves[i, j] if di == 0: s1.append('-') t1.append(t[j]) elif dj == 0: s1.append(s[i]) t1.append('-') else: s1.append(s[i]) t1.append(t[j]) if showPath: print(i, j, di, dj, s1[-1], t1[-1]) (s1, t1) = (score, s1[::-1], t1[::-1]) return (dist, s1, t1)
bioinformatics
positive
def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): """Assign gt to bboxes. This method assign a gt bbox to every bbox (proposal/anchor), each bbox will be assigned with -1, 0, or a positive number. -1 means don't care, 0 means negative sample, positive number is the index (1-based) of assigned gt. The assignment is done in following steps, the order matters. 1. assign every bbox to -1 2. assign proposals whose iou with all gts < neg_iou_thr to 0 3. for each bbox, if the iou with its nearest gt >= pos_iou_thr, assign it to that bbox 4. for each gt bbox, assign its nearest proposals (may be more than one) to itself Args: bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are labelled as `ignored`, e.g., crowd boxes in COCO. gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). Returns: :obj:`AssignResult`: The assign result. """ if bboxes.shape[0] == 0 or gt_bboxes.shape[0] == 0: raise ValueError('No gt or bboxes') bboxes = bboxes[:, :4] overlaps = bbox_overlaps(gt_bboxes, bboxes) if self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None and (gt_bboxes_ignore.numel() > 0): if self.ignore_wrt_candidates: ignore_overlaps = bbox_overlaps(bboxes, gt_bboxes_ignore, mode='iof') (ignore_max_overlaps, _) = ignore_overlaps.max(dim=1) else: ignore_overlaps = bbox_overlaps(gt_bboxes_ignore, bboxes, mode='iof') (ignore_max_overlaps, _) = ignore_overlaps.max(dim=0) overlaps[:, ignore_max_overlaps > self.ignore_iof_thr] = -1 <DeepExtract> if overlaps.numel() == 0: raise ValueError('No gt or proposals') (num_gts, num_bboxes) = (overlaps.size(0), overlaps.size(1)) assigned_gt_inds = overlaps.new_full((num_bboxes,), -1, dtype=torch.long) (max_overlaps, argmax_overlaps) = overlaps.max(dim=0) (gt_max_overlaps, gt_argmax_overlaps) = overlaps.max(dim=1) if isinstance(self.neg_iou_thr, float): assigned_gt_inds[(max_overlaps >= 0) & (max_overlaps < self.neg_iou_thr)] = 0 elif isinstance(self.neg_iou_thr, tuple): assert len(self.neg_iou_thr) == 2 assigned_gt_inds[(max_overlaps >= self.neg_iou_thr[0]) & (max_overlaps < self.neg_iou_thr[1])] = 0 pos_inds = max_overlaps >= self.pos_iou_thr assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 for i in range(num_gts): if gt_max_overlaps[i] >= self.min_pos_iou: if self.gt_max_assign_all: max_iou_inds = overlaps[i, :] == gt_max_overlaps[i] assigned_gt_inds[max_iou_inds] = i + 1 else: assigned_gt_inds[gt_argmax_overlaps[i]] = i + 1 if gt_labels is not None: assigned_labels = assigned_gt_inds.new_zeros((num_bboxes,)) pos_inds = torch.nonzero(assigned_gt_inds > 0).squeeze() if pos_inds.numel() > 0: assigned_labels[pos_inds] = gt_labels[assigned_gt_inds[pos_inds] - 1] else: assigned_labels = None assign_result = AssignResult(num_gts, assigned_gt_inds, max_overlaps, labels=assigned_labels) </DeepExtract> return assign_result
def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): """Assign gt to bboxes. This method assign a gt bbox to every bbox (proposal/anchor), each bbox will be assigned with -1, 0, or a positive number. -1 means don't care, 0 means negative sample, positive number is the index (1-based) of assigned gt. The assignment is done in following steps, the order matters. 1. assign every bbox to -1 2. assign proposals whose iou with all gts < neg_iou_thr to 0 3. for each bbox, if the iou with its nearest gt >= pos_iou_thr, assign it to that bbox 4. for each gt bbox, assign its nearest proposals (may be more than one) to itself Args: bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are labelled as `ignored`, e.g., crowd boxes in COCO. gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). Returns: :obj:`AssignResult`: The assign result. """ if bboxes.shape[0] == 0 or gt_bboxes.shape[0] == 0: raise ValueError('No gt or bboxes') bboxes = bboxes[:, :4] overlaps = bbox_overlaps(gt_bboxes, bboxes) if self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None and (gt_bboxes_ignore.numel() > 0): if self.ignore_wrt_candidates: ignore_overlaps = bbox_overlaps(bboxes, gt_bboxes_ignore, mode='iof') (ignore_max_overlaps, _) = ignore_overlaps.max(dim=1) else: ignore_overlaps = bbox_overlaps(gt_bboxes_ignore, bboxes, mode='iof') (ignore_max_overlaps, _) = ignore_overlaps.max(dim=0) overlaps[:, ignore_max_overlaps > self.ignore_iof_thr] = -1 if overlaps.numel() == 0: raise ValueError('No gt or proposals') (num_gts, num_bboxes) = (overlaps.size(0), overlaps.size(1)) assigned_gt_inds = overlaps.new_full((num_bboxes,), -1, dtype=torch.long) (max_overlaps, argmax_overlaps) = overlaps.max(dim=0) (gt_max_overlaps, gt_argmax_overlaps) = overlaps.max(dim=1) if isinstance(self.neg_iou_thr, float): assigned_gt_inds[(max_overlaps >= 0) & (max_overlaps < self.neg_iou_thr)] = 0 elif isinstance(self.neg_iou_thr, tuple): assert len(self.neg_iou_thr) == 2 assigned_gt_inds[(max_overlaps >= self.neg_iou_thr[0]) & (max_overlaps < self.neg_iou_thr[1])] = 0 pos_inds = max_overlaps >= self.pos_iou_thr assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 for i in range(num_gts): if gt_max_overlaps[i] >= self.min_pos_iou: if self.gt_max_assign_all: max_iou_inds = overlaps[i, :] == gt_max_overlaps[i] assigned_gt_inds[max_iou_inds] = i + 1 else: assigned_gt_inds[gt_argmax_overlaps[i]] = i + 1 if gt_labels is not None: assigned_labels = assigned_gt_inds.new_zeros((num_bboxes,)) pos_inds = torch.nonzero(assigned_gt_inds > 0).squeeze() if pos_inds.numel() > 0: assigned_labels[pos_inds] = gt_labels[assigned_gt_inds[pos_inds] - 1] else: assigned_labels = None assign_result = AssignResult(num_gts, assigned_gt_inds, max_overlaps, labels=assigned_labels) return assign_result
Cross-iterationBatchNorm
positive
def _benchmark_dtw(): size1 = 2 ** 10 size2 = 2 ** 10 freq1 = 1 / size1 freq2 = 1 / size2 input1 = np.cos(np.arange(size1) * 2 * np.pi * freq1) input2 = np.sin(np.arange(size2) * 2 * np.pi * freq2) + 0.1 * np.random.random(size=size2) global series1, series2 <DeepExtract> series1 = TimeSeries.from_values(np.array(input1) if isinstance(input1, list) else input1) </DeepExtract> <DeepExtract> series2 = TimeSeries.from_values(np.array(input2) if isinstance(input2, list) else input2) </DeepExtract> import cProfile cProfile.run('_dtw_exact()', sort='tottime') cProfile.run('_dtw_multigrid()', sort='tottime')
def _benchmark_dtw(): size1 = 2 ** 10 size2 = 2 ** 10 freq1 = 1 / size1 freq2 = 1 / size2 input1 = np.cos(np.arange(size1) * 2 * np.pi * freq1) input2 = np.sin(np.arange(size2) * 2 * np.pi * freq2) + 0.1 * np.random.random(size=size2) global series1, series2 series1 = TimeSeries.from_values(np.array(input1) if isinstance(input1, list) else input1) series2 = TimeSeries.from_values(np.array(input2) if isinstance(input2, list) else input2) import cProfile cProfile.run('_dtw_exact()', sort='tottime') cProfile.run('_dtw_multigrid()', sort='tottime')
darts
positive
def run(self): fetch = self.fetch.split(',') if isinstance(self.fetch, str) else self.fetch feeds = self.feeds.split(',') if isinstance(self.feeds, str) else self.feeds subdirs = [str(path) for path in Path(self.path_saved_model).iterdir() if path.is_dir() and 'temp' not in str(path)] latest = str(sorted(subdirs)[-1]) LOGGER.info(f'Using SavedModel {latest}') with tf.Session(graph=tf.Graph()) as sess: meta_graph_def = tf.saved_model.loader.load(sess, ['serve'], latest) graph_def = meta_graph_def.graph_def if INIT_ALL_TABLES in {node.name for node in graph_def.node}: fetch.append(INIT_ALL_TABLES) else: table_initializers = tf.get_collection(tf.GraphKeys.TABLE_INITIALIZERS) if table_initializers: LOGGER.info(f'Adding {INIT_ALL_TABLES} Node to the graph') table_init_op = tf.group(*table_initializers, name=INIT_ALL_TABLES) node_def = table_init_op.node_def graph_def.node.append(node_def) fetch.append(INIT_ALL_TABLES) <DeepExtract> nodes = [] for node in graph_def.node: new_node = tf.NodeDef() new_node.CopyFrom(node) nodes.append(new_node) if node.name in self.new_names: new_node.name = self.new_names[node.name] LOGGER.info(f'Node renamed: {node.name} -> {new_node.name}') if not set(self.new_names.values()) <= set((node.name for node in nodes)): missing = set(self.new_names.values()) - set((node.name for node in nodes)) raise TensorsNotFoundError(missing) for node in nodes: for (idx, name) in enumerate(node.input): node.input[idx] = self.new_names[name] if name in self.new_names else name if '_class' in node.attr: attr = node.attr['_class'] for (idx, item) in enumerate(attr.list.s): loc_match = re.match('^loc:@(.+)$', item.decode()) if loc_match and loc_match.groups()[0] in self.new_names: new_name = self.new_names[loc_match.groups()[0]] attr.list.s[idx] = f'loc:@{new_name}'.encode() new_graph = tf.GraphDef() new_graph.node.extend(nodes) graph_def = new_graph </DeepExtract> <DeepExtract> nodes = [] for node in graph_def.node: if node.name not in feeds and node.op == 'Placeholder': LOGGER.info(f'Removing placeholder {node.name}') continue new_node = tf.NodeDef() if node.name in feeds and node.op != 'Placeholder': LOGGER.info(f'Creating placeholder {node.name}') new_node.name = node.name new_node.op = 'Placeholder' new_node.attr['shape'].CopyFrom(tf.AttrValue(shape=node.attr['_output_shapes'].list.shape[0])) new_node.attr['dtype'].CopyFrom(node.attr['T']) else: new_node.CopyFrom(node) nodes.append(new_node) if not set(feeds) <= set((node.name for node in nodes)): missing = set(feeds) - set((node.name for node in nodes)) raise TensorsNotFoundError(missing) new_graph = tf.GraphDef() new_graph.node.extend(nodes) graph_def = new_graph </DeepExtract> graph_def = extract_sub_graph(graph_def, fetch) graph_def = freeze_graph_with_def_protos(input_graph_def=graph_def, input_saver_def=None, input_checkpoint=None, output_node_names=','.join(fetch), restore_op_name=None, filename_tensor_name=None, output_graph=None, clear_devices=True, initializer_nodes=None, variable_names_blacklist=','.join(self.blacklisted_variables), input_saved_model_dir=latest, saved_model_tags=['serve']) tf.io.write_graph(graph_def, logdir=self.path_optimized_model, name=self.graph_name, as_text=False) LOGGER.info(f'Optimized Model successfully exported to {self.path_optimized_model}/{self.graph_name}')
def run(self): fetch = self.fetch.split(',') if isinstance(self.fetch, str) else self.fetch feeds = self.feeds.split(',') if isinstance(self.feeds, str) else self.feeds subdirs = [str(path) for path in Path(self.path_saved_model).iterdir() if path.is_dir() and 'temp' not in str(path)] latest = str(sorted(subdirs)[-1]) LOGGER.info(f'Using SavedModel {latest}') with tf.Session(graph=tf.Graph()) as sess: meta_graph_def = tf.saved_model.loader.load(sess, ['serve'], latest) graph_def = meta_graph_def.graph_def if INIT_ALL_TABLES in {node.name for node in graph_def.node}: fetch.append(INIT_ALL_TABLES) else: table_initializers = tf.get_collection(tf.GraphKeys.TABLE_INITIALIZERS) if table_initializers: LOGGER.info(f'Adding {INIT_ALL_TABLES} Node to the graph') table_init_op = tf.group(*table_initializers, name=INIT_ALL_TABLES) node_def = table_init_op.node_def graph_def.node.append(node_def) fetch.append(INIT_ALL_TABLES) nodes = [] for node in graph_def.node: new_node = tf.NodeDef() new_node.CopyFrom(node) nodes.append(new_node) if node.name in self.new_names: new_node.name = self.new_names[node.name] LOGGER.info(f'Node renamed: {node.name} -> {new_node.name}') if not set(self.new_names.values()) <= set((node.name for node in nodes)): missing = set(self.new_names.values()) - set((node.name for node in nodes)) raise TensorsNotFoundError(missing) for node in nodes: for (idx, name) in enumerate(node.input): node.input[idx] = self.new_names[name] if name in self.new_names else name if '_class' in node.attr: attr = node.attr['_class'] for (idx, item) in enumerate(attr.list.s): loc_match = re.match('^loc:@(.+)$', item.decode()) if loc_match and loc_match.groups()[0] in self.new_names: new_name = self.new_names[loc_match.groups()[0]] attr.list.s[idx] = f'loc:@{new_name}'.encode() new_graph = tf.GraphDef() new_graph.node.extend(nodes) graph_def = new_graph nodes = [] for node in graph_def.node: if node.name not in feeds and node.op == 'Placeholder': LOGGER.info(f'Removing placeholder {node.name}') continue new_node = tf.NodeDef() if node.name in feeds and node.op != 'Placeholder': LOGGER.info(f'Creating placeholder {node.name}') new_node.name = node.name new_node.op = 'Placeholder' new_node.attr['shape'].CopyFrom(tf.AttrValue(shape=node.attr['_output_shapes'].list.shape[0])) new_node.attr['dtype'].CopyFrom(node.attr['T']) else: new_node.CopyFrom(node) nodes.append(new_node) if not set(feeds) <= set((node.name for node in nodes)): missing = set(feeds) - set((node.name for node in nodes)) raise TensorsNotFoundError(missing) new_graph = tf.GraphDef() new_graph.node.extend(nodes) graph_def = new_graph graph_def = extract_sub_graph(graph_def, fetch) graph_def = freeze_graph_with_def_protos(input_graph_def=graph_def, input_saver_def=None, input_checkpoint=None, output_node_names=','.join(fetch), restore_op_name=None, filename_tensor_name=None, output_graph=None, clear_devices=True, initializer_nodes=None, variable_names_blacklist=','.join(self.blacklisted_variables), input_saved_model_dir=latest, saved_model_tags=['serve']) tf.io.write_graph(graph_def, logdir=self.path_optimized_model, name=self.graph_name, as_text=False) LOGGER.info(f'Optimized Model successfully exported to {self.path_optimized_model}/{self.graph_name}')
deepr
positive
def _set_frequency(self, frequency): if frequency is None: <DeepExtract> if self._closed: return if self.gpio_pin is None: raise PinUnsupported('GPIO was enabled, but is not supported on pin %r' % self.gpio_pin) self._disable_pwm() if not self.gpio_active: self.gpio_pin.open() self.gpio_active = True </DeepExtract> else: <DeepExtract> if self._closed: return if self.pwm_pin is None: raise PinPWMUnsupported('PWM was enabled, but is not supported on pin %r' % self.pwm_pin) self._disable_gpio() if not self.pwm_active: self.pwm_pin.open() self.pwm_active = True </DeepExtract> self.pwm_pin.set_period_ns(_NS_PER_SECOND / frequency)
def _set_frequency(self, frequency): if frequency is None: if self._closed: return if self.gpio_pin is None: raise PinUnsupported('GPIO was enabled, but is not supported on pin %r' % self.gpio_pin) self._disable_pwm() if not self.gpio_active: self.gpio_pin.open() self.gpio_active = True else: if self._closed: return if self.pwm_pin is None: raise PinPWMUnsupported('PWM was enabled, but is not supported on pin %r' % self.pwm_pin) self._disable_gpio() if not self.pwm_active: self.pwm_pin.open() self.pwm_active = True self.pwm_pin.set_period_ns(_NS_PER_SECOND / frequency)
aiyprojects-raspbian
positive
@conductor_only def generate_orchestration_playbook(self, url=None, namespace=None, vault_files=None, **kwargs): """ Generate an Ansible playbook to orchestrate services. :param url: registry URL where images will be pulled from :param namespace: registry namespace :return: playbook dict """ states = ['start', 'restart', 'stop', 'destroy'] service_def = {} for (service_name, service) in iteritems(self.services): service_definition = {} if service.get('roles'): if url and namespace: service_definition[u'image'] = '{}/{}/{}'.format(re.sub('/$', '', url), namespace, self.image_name_for_service(service_name)) else: <DeepExtract> try: image = self.client.images.get('%s:latest' % self.image_name_for_service(service_name)) except docker_errors.ImageNotFound: images = self.client.images.list(name=self.image_name_for_service(service_name)) logger.debug(u'Could not find the latest image for service, searching for other tags with same image name', image_name=self.image_name_for_service(service_name), service=service_name) if not images: image = None def tag_sort(i): image = [t for t in i.tags if t.startswith(self.image_name_for_service(service_name))][0] images = sorted(images, key=tag_sort) logger.debug('Found images for service', service=service_name, images=images) image = images[-1] else: image = image </DeepExtract> if image is None: raise exceptions.AnsibleContainerConductorException(u"No image found for service {}, make sure you've run `ansible-container build`".format(service_name)) service_definition[u'image'] = image.tags[0] else: try: image = self.client.images.get(service['from']) image_from = image.tags[0] except docker.errors.ImageNotFound: image_from = service['from'] logger.warning(u'Image {} for service {} not found. An attempt will be made to pull it.'.format(service['from'], service_name)) service_definition[u'image'] = image_from for extra in self.COMPOSE_WHITELIST: if extra in service: service_definition[extra] = service[extra] if 'secrets' in service: service_secrets = [] for (secret, secret_engines) in iteritems(service[u'secrets']): if 'docker' in secret_engines: service_secrets += secret_engines[u'docker'] if service_secrets: service_definition[u'secrets'] = service_secrets if self.CAP_SIM_SECRETS: if not 'volumes' in service_definition: service_definition['volumes'] = [] service_definition['volumes'].append('{}:/run/secrets:ro'.format(self.secrets_volume_name)) logger.debug(u'Adding new service to definition', service=service_name, definition=service_definition) service_def[service_name] = service_definition tasks = [] <DeepExtract> top_level_secrets = dict() if self.secrets: for (secret, secret_definition) in iteritems(self.secrets): if isinstance(secret_definition, dict): for (key, value) in iteritems(secret_definition): name = '{}_{}'.format(secret, key) top_level_secrets[name] = dict(external=True) elif isinstance(secret_definition, string_types): top_level_secrets[secret] = dict(external=True) top_level_secrets = top_level_secrets </DeepExtract> if self.CAP_SIM_SECRETS and top_level_secrets: if not isinstance(self.volumes, dict): self.volumes = dict() self.volumes[self.secrets_volume_name] = dict(external=True) for desired_state in states: task_params = {u'project_name': self.project_name, u'definition': {u'version': u'3.1' if top_level_secrets else u'2', u'services': service_def}} if self.secrets: task_params[u'definition'][u'secrets'] = top_level_secrets if self.volumes: task_params[u'definition'][u'volumes'] = dict(self.volumes) if desired_state in {'restart', 'start', 'stop'}: task_params[u'state'] = u'present' if desired_state == 'restart': task_params[u'restarted'] = True if desired_state == 'stop': task_params[u'stopped'] = True elif desired_state == 'destroy': task_params[u'state'] = u'absent' task_params[u'remove_volumes'] = u'yes' tasks.append({u'docker_service': task_params, u'tags': [desired_state]}) playbook = [] if self.secrets and self.CAP_SIM_SECRETS: playbook.append(self.generate_secrets_play(vault_files=vault_files)) playbook.append(CommentedMap([(u'name', 'Deploy {}'.format(self.project_name)), (u'hosts', u'localhost'), (u'gather_facts', False)])) if vault_files: playbook[len(playbook) - 1][u'vars_files'] = [os.path.normpath(os.path.abspath(v)) for v in vault_files] playbook[len(playbook) - 1][u'tasks'] = tasks for service in list(self.services.keys()) + ['conductor']: <DeepExtract> if service == 'conductor': image_name = u'%s-%s' % (self.project_name.lower(), service.lower()) result = None for (name, service) in iteritems(self.services): if service.get('containers'): for c in service['containers']: container_service_name = u'%s-%s' % (name, c['container_name']) if container_service_name == service: if c.get('roles'): result = u'%s-%s' % (self.project_name.lower(), container_service_name.lower()) else: result = c.get('from') break elif name == service: if service.get('roles'): result = u'%s-%s' % (self.project_name.lower(), name.lower()) else: result = service.get('from') if result: break if result is None: raise exceptions.AnsibleContainerConfigException(u"Failed to resolve image for service {}. The service or container definition is likely missing a 'from' attribute".format(service)) image_name = result </DeepExtract> for image in self.client.images.list(name=image_name): logger.debug('Found image for service', tags=image.tags, id=image.short_id) for tag in image.tags: if tag.startswith(self.project_name): logger.debug('Adding task to destroy image', tag=tag) playbook[len(playbook) - 1][u'tasks'].append({u'docker_image': {u'name': tag, u'state': u'absent', u'force': u'yes'}, u'tags': u'destroy'}) if self.secrets and self.CAP_SIM_SECRETS: playbook.append(self.generate_remove_volume_play()) logger.debug(u'Created playbook to run project', playbook=playbook) return playbook
@conductor_only def generate_orchestration_playbook(self, url=None, namespace=None, vault_files=None, **kwargs): """ Generate an Ansible playbook to orchestrate services. :param url: registry URL where images will be pulled from :param namespace: registry namespace :return: playbook dict """ states = ['start', 'restart', 'stop', 'destroy'] service_def = {} for (service_name, service) in iteritems(self.services): service_definition = {} if service.get('roles'): if url and namespace: service_definition[u'image'] = '{}/{}/{}'.format(re.sub('/$', '', url), namespace, self.image_name_for_service(service_name)) else: try: image = self.client.images.get('%s:latest' % self.image_name_for_service(service_name)) except docker_errors.ImageNotFound: images = self.client.images.list(name=self.image_name_for_service(service_name)) logger.debug(u'Could not find the latest image for service, searching for other tags with same image name', image_name=self.image_name_for_service(service_name), service=service_name) if not images: image = None def tag_sort(i): image = [t for t in i.tags if t.startswith(self.image_name_for_service(service_name))][0] images = sorted(images, key=tag_sort) logger.debug('Found images for service', service=service_name, images=images) image = images[-1] else: image = image if image is None: raise exceptions.AnsibleContainerConductorException(u"No image found for service {}, make sure you've run `ansible-container build`".format(service_name)) service_definition[u'image'] = image.tags[0] else: try: image = self.client.images.get(service['from']) image_from = image.tags[0] except docker.errors.ImageNotFound: image_from = service['from'] logger.warning(u'Image {} for service {} not found. An attempt will be made to pull it.'.format(service['from'], service_name)) service_definition[u'image'] = image_from for extra in self.COMPOSE_WHITELIST: if extra in service: service_definition[extra] = service[extra] if 'secrets' in service: service_secrets = [] for (secret, secret_engines) in iteritems(service[u'secrets']): if 'docker' in secret_engines: service_secrets += secret_engines[u'docker'] if service_secrets: service_definition[u'secrets'] = service_secrets if self.CAP_SIM_SECRETS: if not 'volumes' in service_definition: service_definition['volumes'] = [] service_definition['volumes'].append('{}:/run/secrets:ro'.format(self.secrets_volume_name)) logger.debug(u'Adding new service to definition', service=service_name, definition=service_definition) service_def[service_name] = service_definition tasks = [] top_level_secrets = dict() if self.secrets: for (secret, secret_definition) in iteritems(self.secrets): if isinstance(secret_definition, dict): for (key, value) in iteritems(secret_definition): name = '{}_{}'.format(secret, key) top_level_secrets[name] = dict(external=True) elif isinstance(secret_definition, string_types): top_level_secrets[secret] = dict(external=True) top_level_secrets = top_level_secrets if self.CAP_SIM_SECRETS and top_level_secrets: if not isinstance(self.volumes, dict): self.volumes = dict() self.volumes[self.secrets_volume_name] = dict(external=True) for desired_state in states: task_params = {u'project_name': self.project_name, u'definition': {u'version': u'3.1' if top_level_secrets else u'2', u'services': service_def}} if self.secrets: task_params[u'definition'][u'secrets'] = top_level_secrets if self.volumes: task_params[u'definition'][u'volumes'] = dict(self.volumes) if desired_state in {'restart', 'start', 'stop'}: task_params[u'state'] = u'present' if desired_state == 'restart': task_params[u'restarted'] = True if desired_state == 'stop': task_params[u'stopped'] = True elif desired_state == 'destroy': task_params[u'state'] = u'absent' task_params[u'remove_volumes'] = u'yes' tasks.append({u'docker_service': task_params, u'tags': [desired_state]}) playbook = [] if self.secrets and self.CAP_SIM_SECRETS: playbook.append(self.generate_secrets_play(vault_files=vault_files)) playbook.append(CommentedMap([(u'name', 'Deploy {}'.format(self.project_name)), (u'hosts', u'localhost'), (u'gather_facts', False)])) if vault_files: playbook[len(playbook) - 1][u'vars_files'] = [os.path.normpath(os.path.abspath(v)) for v in vault_files] playbook[len(playbook) - 1][u'tasks'] = tasks for service in list(self.services.keys()) + ['conductor']: if service == 'conductor': image_name = u'%s-%s' % (self.project_name.lower(), service.lower()) result = None for (name, service) in iteritems(self.services): if service.get('containers'): for c in service['containers']: container_service_name = u'%s-%s' % (name, c['container_name']) if container_service_name == service: if c.get('roles'): result = u'%s-%s' % (self.project_name.lower(), container_service_name.lower()) else: result = c.get('from') break elif name == service: if service.get('roles'): result = u'%s-%s' % (self.project_name.lower(), name.lower()) else: result = service.get('from') if result: break if result is None: raise exceptions.AnsibleContainerConfigException(u"Failed to resolve image for service {}. The service or container definition is likely missing a 'from' attribute".format(service)) image_name = result for image in self.client.images.list(name=image_name): logger.debug('Found image for service', tags=image.tags, id=image.short_id) for tag in image.tags: if tag.startswith(self.project_name): logger.debug('Adding task to destroy image', tag=tag) playbook[len(playbook) - 1][u'tasks'].append({u'docker_image': {u'name': tag, u'state': u'absent', u'force': u'yes'}, u'tags': u'destroy'}) if self.secrets and self.CAP_SIM_SECRETS: playbook.append(self.generate_remove_volume_play()) logger.debug(u'Created playbook to run project', playbook=playbook) return playbook
ansible-container
positive
def validate(self, value): value = super().validate(value) if not self.indexed or value is None: return value if not self.repeated: <DeepExtract> if len(value) > _max_indexed_length and len(value.encode(self.encoding)) > _max_indexed_length: raise ValueError(f'String value is longer than the maximum allowed length ({_max_indexed_length}) for indexed properties. Set indexed to False if the value should not be indexed.') </DeepExtract> return value
def validate(self, value): value = super().validate(value) if not self.indexed or value is None: return value if not self.repeated: if len(value) > _max_indexed_length and len(value.encode(self.encoding)) > _max_indexed_length: raise ValueError(f'String value is longer than the maximum allowed length ({_max_indexed_length}) for indexed properties. Set indexed to False if the value should not be indexed.') return value
anom-py
positive
def build(): """Build the application""" logger.info('Deleting directories build and dist') shutil.rmtree('build', ignore_errors=True) shutil.rmtree('dist', ignore_errors=True) shutil.rmtree('BleachBit-Portable', ignore_errors=True) logger.info('Running py2exe') shutil.copyfile('bleachbit.py', 'bleachbit_console.py') cmd = sys.executable + ' -OO setup.py py2exe' <DeepExtract> logger.info(cmd) p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() logger.info(stdout.decode(setup_encoding)) if stderr: logger.error(stderr.decode(setup_encoding)) </DeepExtract> <DeepExtract> if not os.path.exists('dist\\bleachbit.exe'): logger.error('dist\\bleachbit.exe' + ' not found') if msg: logger.error(msg) sys.exit(1) </DeepExtract> <DeepExtract> if not os.path.exists('dist\\bleachbit_console.exe'): logger.error('dist\\bleachbit_console.exe' + ' not found') if msg: logger.error(msg) sys.exit(1) </DeepExtract> os.remove('bleachbit_console.py') if not os.path.exists('dist'): os.makedirs('dist') logger.info('Copying GTK helpers') for exe in glob.glob1(GTK_LIBDIR, 'gspawn-win*-helper*.exe'): shutil.copyfile(os.path.join(GTK_LIBDIR, exe), os.path.join('dist', exe)) for exe in ('fc-cache.exe',): shutil.copyfile(os.path.join(GTK_LIBDIR, exe), os.path.join('dist', exe)) logger.info('Copying GTK files and icon') for d in ('dbus-1', 'fonts', 'gtk-3.0', 'pango'): path = os.path.join(GTK_DIR, 'etc', d) if os.path.exists(path): <DeepExtract> logger.info('copying {} to {}'.format(path, os.path.join('dist', 'etc', d))) cmd = 'xcopy {} {} /i /s /q'.format(path, os.path.join('dist', 'etc', d)) os.system(cmd) </DeepExtract> for d in ('girepository-1.0', 'glade', 'gtk-3.0'): path = os.path.join(GTK_DIR, 'lib', d) if os.path.exists(path): <DeepExtract> logger.info('copying {} to {}'.format(path, os.path.join('dist', 'lib', d))) cmd = 'xcopy {} {} /i /s /q'.format(path, os.path.join('dist', 'lib', d)) os.system(cmd) </DeepExtract> logger.info('Remove windows fonts dir from fonts.conf file') for d in ('icons',): path = os.path.join(GTK_DIR, 'share', d) if os.path.exists(path): <DeepExtract> logger.info('copying {} to {}'.format(path, os.path.join('dist', 'share', d))) cmd = 'xcopy {} {} /i /s /q'.format(path, os.path.join('dist', 'share', d)) os.system(cmd) </DeepExtract> SCHEMAS_DIR = 'share\\glib-2.0\\schemas' gschemas_compiled = os.path.join(GTK_DIR, SCHEMAS_DIR, 'gschemas.compiled') if os.path.exists(gschemas_compiled): os.makedirs(os.path.join('dist', SCHEMAS_DIR)) shutil.copyfile(gschemas_compiled, os.path.join('dist', SCHEMAS_DIR, 'gschemas.compiled')) os.makedirs(os.path.join('dist', 'share'), exist_ok=True) shutil.copyfile('bleachbit.png', 'dist\\share\\bleachbit.png') shutil.copyfile('windows\\bleachbit.ico', 'dist\\share\\bleachbit.ico') for dll in glob.glob1(GTK_LIBDIR, '*.dll'): shutil.copyfile(os.path.join(GTK_LIBDIR, dll), 'dist\\' + dll) os.mkdir('dist\\data') shutil.copyfile('data\\app-menu.ui', 'dist\\data\\app-menu.ui') logger.info('Copying themes') <DeepExtract> logger.info('copying {} to {}'.format('themes', 'dist\\themes')) cmd = 'xcopy {} {} /i /s /q'.format('themes', 'dist\\themes') os.system(cmd) </DeepExtract> logger.info('Copying CA bundle') import requests shutil.copyfile(requests.utils.DEFAULT_CA_BUNDLE_PATH, os.path.join('dist', 'cacert.pem')) dist_locale_dir = 'dist\\share\\locale' shutil.rmtree(dist_locale_dir, ignore_errors=True) os.makedirs(dist_locale_dir) logger.info('Copying GTK localizations') locale_dir = os.path.join(GTK_DIR, 'share\\locale\\') for f in recursive_glob(locale_dir, ['gtk30.mo']): if not f.startswith(locale_dir): continue rel_f = f[len(locale_dir):] os.makedirs(os.path.join(dist_locale_dir, os.path.dirname(rel_f))) shutil.copyfile(f, os.path.join(dist_locale_dir, rel_f)) <DeepExtract> if not os.path.exists(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\gtk30.mo')): logger.error(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\gtk30.mo') + ' not found') if msg: logger.error(msg) sys.exit(1) </DeepExtract> logger.info('Copying BleachBit localizations') <DeepExtract> logger.info('copying {} to {}'.format('locale', dist_locale_dir)) cmd = 'xcopy {} {} /i /s /q'.format('locale', dist_locale_dir) os.system(cmd) </DeepExtract> <DeepExtract> if not os.path.exists(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\bleachbit.mo')): logger.error(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\bleachbit.mo') + ' not found') if msg: logger.error(msg) sys.exit(1) </DeepExtract> logger.info('Copying BleachBit cleaners') if not os.path.exists('dist\\share\\cleaners'): os.makedirs('dist\\share\\cleaners') <DeepExtract> cleaners_files = [os.path.join(looproot, filename) for (looproot, _, filenames) in os.walk('cleaners') for filename in filenames if any((fnmatch.fnmatch(filename, pattern) for pattern in ['*.xml']))] </DeepExtract> for file in cleaners_files: shutil.copy(file, 'dist\\share\\cleaners') logger.info('Checking for CleanerML') <DeepExtract> if not os.path.exists('dist\\share\\cleaners\\internet_explorer.xml'): logger.error('dist\\share\\cleaners\\internet_explorer.xml' + ' not found') if msg: logger.error(msg) sys.exit(1) </DeepExtract> logger.info('Copying license') shutil.copy('COPYING', 'dist') <DeepExtract> if os.path.exists('CodeSign.bat'): logger.info('Signing code: %s' % 'dist\\bleachbit.exe') cmd = 'CodeSign.bat %s' % 'dist\\bleachbit.exe' run_cmd(cmd) else: logger.warning('CodeSign.bat not available for %s' % 'dist\\bleachbit.exe') </DeepExtract> <DeepExtract> if os.path.exists('CodeSign.bat'): logger.info('Signing code: %s' % 'dist\\bleachbit_console.exe') cmd = 'CodeSign.bat %s' % 'dist\\bleachbit_console.exe' run_cmd(cmd) else: logger.warning('CodeSign.bat not available for %s' % 'dist\\bleachbit_console.exe') </DeepExtract> <DeepExtract> logger.info('Checking bleachbit_console.exe starts') assert_execute(['dist\\bleachbit_console.exe', '--gui', '--exit', '--no-uac'], 'Success') </DeepExtract>
def build(): """Build the application""" logger.info('Deleting directories build and dist') shutil.rmtree('build', ignore_errors=True) shutil.rmtree('dist', ignore_errors=True) shutil.rmtree('BleachBit-Portable', ignore_errors=True) logger.info('Running py2exe') shutil.copyfile('bleachbit.py', 'bleachbit_console.py') cmd = sys.executable + ' -OO setup.py py2exe' logger.info(cmd) p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() logger.info(stdout.decode(setup_encoding)) if stderr: logger.error(stderr.decode(setup_encoding)) if not os.path.exists('dist\\bleachbit.exe'): logger.error('dist\\bleachbit.exe' + ' not found') if msg: logger.error(msg) sys.exit(1) if not os.path.exists('dist\\bleachbit_console.exe'): logger.error('dist\\bleachbit_console.exe' + ' not found') if msg: logger.error(msg) sys.exit(1) os.remove('bleachbit_console.py') if not os.path.exists('dist'): os.makedirs('dist') logger.info('Copying GTK helpers') for exe in glob.glob1(GTK_LIBDIR, 'gspawn-win*-helper*.exe'): shutil.copyfile(os.path.join(GTK_LIBDIR, exe), os.path.join('dist', exe)) for exe in ('fc-cache.exe',): shutil.copyfile(os.path.join(GTK_LIBDIR, exe), os.path.join('dist', exe)) logger.info('Copying GTK files and icon') for d in ('dbus-1', 'fonts', 'gtk-3.0', 'pango'): path = os.path.join(GTK_DIR, 'etc', d) if os.path.exists(path): logger.info('copying {} to {}'.format(path, os.path.join('dist', 'etc', d))) cmd = 'xcopy {} {} /i /s /q'.format(path, os.path.join('dist', 'etc', d)) os.system(cmd) for d in ('girepository-1.0', 'glade', 'gtk-3.0'): path = os.path.join(GTK_DIR, 'lib', d) if os.path.exists(path): logger.info('copying {} to {}'.format(path, os.path.join('dist', 'lib', d))) cmd = 'xcopy {} {} /i /s /q'.format(path, os.path.join('dist', 'lib', d)) os.system(cmd) logger.info('Remove windows fonts dir from fonts.conf file') for d in ('icons',): path = os.path.join(GTK_DIR, 'share', d) if os.path.exists(path): logger.info('copying {} to {}'.format(path, os.path.join('dist', 'share', d))) cmd = 'xcopy {} {} /i /s /q'.format(path, os.path.join('dist', 'share', d)) os.system(cmd) SCHEMAS_DIR = 'share\\glib-2.0\\schemas' gschemas_compiled = os.path.join(GTK_DIR, SCHEMAS_DIR, 'gschemas.compiled') if os.path.exists(gschemas_compiled): os.makedirs(os.path.join('dist', SCHEMAS_DIR)) shutil.copyfile(gschemas_compiled, os.path.join('dist', SCHEMAS_DIR, 'gschemas.compiled')) os.makedirs(os.path.join('dist', 'share'), exist_ok=True) shutil.copyfile('bleachbit.png', 'dist\\share\\bleachbit.png') shutil.copyfile('windows\\bleachbit.ico', 'dist\\share\\bleachbit.ico') for dll in glob.glob1(GTK_LIBDIR, '*.dll'): shutil.copyfile(os.path.join(GTK_LIBDIR, dll), 'dist\\' + dll) os.mkdir('dist\\data') shutil.copyfile('data\\app-menu.ui', 'dist\\data\\app-menu.ui') logger.info('Copying themes') logger.info('copying {} to {}'.format('themes', 'dist\\themes')) cmd = 'xcopy {} {} /i /s /q'.format('themes', 'dist\\themes') os.system(cmd) logger.info('Copying CA bundle') import requests shutil.copyfile(requests.utils.DEFAULT_CA_BUNDLE_PATH, os.path.join('dist', 'cacert.pem')) dist_locale_dir = 'dist\\share\\locale' shutil.rmtree(dist_locale_dir, ignore_errors=True) os.makedirs(dist_locale_dir) logger.info('Copying GTK localizations') locale_dir = os.path.join(GTK_DIR, 'share\\locale\\') for f in recursive_glob(locale_dir, ['gtk30.mo']): if not f.startswith(locale_dir): continue rel_f = f[len(locale_dir):] os.makedirs(os.path.join(dist_locale_dir, os.path.dirname(rel_f))) shutil.copyfile(f, os.path.join(dist_locale_dir, rel_f)) if not os.path.exists(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\gtk30.mo')): logger.error(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\gtk30.mo') + ' not found') if msg: logger.error(msg) sys.exit(1) logger.info('Copying BleachBit localizations') logger.info('copying {} to {}'.format('locale', dist_locale_dir)) cmd = 'xcopy {} {} /i /s /q'.format('locale', dist_locale_dir) os.system(cmd) if not os.path.exists(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\bleachbit.mo')): logger.error(os.path.join(dist_locale_dir, 'es\\LC_MESSAGES\\bleachbit.mo') + ' not found') if msg: logger.error(msg) sys.exit(1) logger.info('Copying BleachBit cleaners') if not os.path.exists('dist\\share\\cleaners'): os.makedirs('dist\\share\\cleaners') cleaners_files = [os.path.join(looproot, filename) for (looproot, _, filenames) in os.walk('cleaners') for filename in filenames if any((fnmatch.fnmatch(filename, pattern) for pattern in ['*.xml']))] for file in cleaners_files: shutil.copy(file, 'dist\\share\\cleaners') logger.info('Checking for CleanerML') if not os.path.exists('dist\\share\\cleaners\\internet_explorer.xml'): logger.error('dist\\share\\cleaners\\internet_explorer.xml' + ' not found') if msg: logger.error(msg) sys.exit(1) logger.info('Copying license') shutil.copy('COPYING', 'dist') if os.path.exists('CodeSign.bat'): logger.info('Signing code: %s' % 'dist\\bleachbit.exe') cmd = 'CodeSign.bat %s' % 'dist\\bleachbit.exe' run_cmd(cmd) else: logger.warning('CodeSign.bat not available for %s' % 'dist\\bleachbit.exe') if os.path.exists('CodeSign.bat'): logger.info('Signing code: %s' % 'dist\\bleachbit_console.exe') cmd = 'CodeSign.bat %s' % 'dist\\bleachbit_console.exe' run_cmd(cmd) else: logger.warning('CodeSign.bat not available for %s' % 'dist\\bleachbit_console.exe') logger.info('Checking bleachbit_console.exe starts') assert_execute(['dist\\bleachbit_console.exe', '--gui', '--exit', '--no-uac'], 'Success') </DeepExtract>
bleachbit
positive
def execute(self, context): pcv = context.object.point_cloud_visualizer pcv2 = pcv.filter_join_object.point_cloud_visualizer c = PCVManager.cache[pcv.uuid] c2 = PCVManager.cache[pcv2.uuid] ovs = c['vertices'] ons = c['normals'] ocs = c['colors'] nvs = c2['vertices'] nns = c2['normals'] ncs = c2['colors'] def apply_matrix(m, vs, ns=None): vs.shape = (-1, 3) vs = np.c_[vs, np.ones(vs.shape[0])] vs = np.dot(m, vs.T)[0:3].T.reshape(-1) vs.shape = (-1, 3) if ns is not None: (_, rot, _) = m.decompose() rmat = rot.to_matrix().to_4x4() ns.shape = (-1, 3) ns = np.c_[ns, np.ones(ns.shape[0])] ns = np.dot(rmat, ns.T)[0:3].T.reshape(-1) ns.shape = (-1, 3) return (vs, ns) <DeepExtract> matrot = nvs.decompose()[1].to_matrix().to_4x4() r = [None] * len(pcv.filter_join_object.matrix_world) for (i, p) in enumerate(pcv.filter_join_object.matrix_world): co = nvs @ Vector((p[0], p[1], p[2])) no = matrot @ Vector((p[3], p[4], p[5])) r[i] = (co.x, co.y, co.z, no.x, no.y, no.z, p[6], p[7], p[8]) (nvs, nns) = r </DeepExtract> <DeepExtract> matrot = nvs.decompose()[1].to_matrix().to_4x4() r = [None] * len(context.object.matrix_world.inverted()) for (i, p) in enumerate(context.object.matrix_world.inverted()): co = nvs @ Vector((p[0], p[1], p[2])) no = matrot @ Vector((p[3], p[4], p[5])) r[i] = (co.x, co.y, co.z, no.x, no.y, no.z, p[6], p[7], p[8]) (nvs, nns) = r </DeepExtract> vs = np.concatenate((ovs, nvs)) ns = np.concatenate((ons, nns)) cs = np.concatenate((ocs, ncs)) preferences = bpy.context.preferences addon_prefs = preferences.addons[__name__].preferences if addon_prefs.shuffle_points: l = len(vs) dt = [('x', '<f8'), ('y', '<f8'), ('z', '<f8'), ('nx', '<f8'), ('ny', '<f8'), ('nz', '<f8'), ('red', '<f8'), ('green', '<f8'), ('blue', '<f8'), ('alpha', '<f8')] a = np.empty(l, dtype=dt) a['x'] = vs[:, 0] a['y'] = vs[:, 1] a['z'] = vs[:, 2] a['nx'] = ns[:, 0] a['ny'] = ns[:, 1] a['nz'] = ns[:, 2] a['red'] = cs[:, 0] a['green'] = cs[:, 1] a['blue'] = cs[:, 2] a['alpha'] = cs[:, 3] np.random.shuffle(a) vs = np.column_stack((a['x'], a['y'], a['z'])) ns = np.column_stack((a['nx'], a['ny'], a['nz'])) cs = np.column_stack((a['red'], a['green'], a['blue'], a['alpha'])) vs = vs.astype(np.float32) ns = ns.astype(np.float32) cs = cs.astype(np.float32) PCVManager.update(pcv.uuid, vs, ns, cs) c2['draw'] = False context.area.tag_redraw() return {'FINISHED'}
def execute(self, context): pcv = context.object.point_cloud_visualizer pcv2 = pcv.filter_join_object.point_cloud_visualizer c = PCVManager.cache[pcv.uuid] c2 = PCVManager.cache[pcv2.uuid] ovs = c['vertices'] ons = c['normals'] ocs = c['colors'] nvs = c2['vertices'] nns = c2['normals'] ncs = c2['colors'] def apply_matrix(m, vs, ns=None): vs.shape = (-1, 3) vs = np.c_[vs, np.ones(vs.shape[0])] vs = np.dot(m, vs.T)[0:3].T.reshape(-1) vs.shape = (-1, 3) if ns is not None: (_, rot, _) = m.decompose() rmat = rot.to_matrix().to_4x4() ns.shape = (-1, 3) ns = np.c_[ns, np.ones(ns.shape[0])] ns = np.dot(rmat, ns.T)[0:3].T.reshape(-1) ns.shape = (-1, 3) return (vs, ns) matrot = nvs.decompose()[1].to_matrix().to_4x4() r = [None] * len(pcv.filter_join_object.matrix_world) for (i, p) in enumerate(pcv.filter_join_object.matrix_world): co = nvs @ Vector((p[0], p[1], p[2])) no = matrot @ Vector((p[3], p[4], p[5])) r[i] = (co.x, co.y, co.z, no.x, no.y, no.z, p[6], p[7], p[8]) (nvs, nns) = r matrot = nvs.decompose()[1].to_matrix().to_4x4() r = [None] * len(context.object.matrix_world.inverted()) for (i, p) in enumerate(context.object.matrix_world.inverted()): co = nvs @ Vector((p[0], p[1], p[2])) no = matrot @ Vector((p[3], p[4], p[5])) r[i] = (co.x, co.y, co.z, no.x, no.y, no.z, p[6], p[7], p[8]) (nvs, nns) = r vs = np.concatenate((ovs, nvs)) ns = np.concatenate((ons, nns)) cs = np.concatenate((ocs, ncs)) preferences = bpy.context.preferences addon_prefs = preferences.addons[__name__].preferences if addon_prefs.shuffle_points: l = len(vs) dt = [('x', '<f8'), ('y', '<f8'), ('z', '<f8'), ('nx', '<f8'), ('ny', '<f8'), ('nz', '<f8'), ('red', '<f8'), ('green', '<f8'), ('blue', '<f8'), ('alpha', '<f8')] a = np.empty(l, dtype=dt) a['x'] = vs[:, 0] a['y'] = vs[:, 1] a['z'] = vs[:, 2] a['nx'] = ns[:, 0] a['ny'] = ns[:, 1] a['nz'] = ns[:, 2] a['red'] = cs[:, 0] a['green'] = cs[:, 1] a['blue'] = cs[:, 2] a['alpha'] = cs[:, 3] np.random.shuffle(a) vs = np.column_stack((a['x'], a['y'], a['z'])) ns = np.column_stack((a['nx'], a['ny'], a['nz'])) cs = np.column_stack((a['red'], a['green'], a['blue'], a['alpha'])) vs = vs.astype(np.float32) ns = ns.astype(np.float32) cs = cs.astype(np.float32) PCVManager.update(pcv.uuid, vs, ns, cs) c2['draw'] = False context.area.tag_redraw() return {'FINISHED'}
bpy
positive
def sample(self, **kwargs): """ Iteratively samples from sequence of `Trials`. Sampling loop:: - until Trial_sequence is exhausted or .reset(): - sample next Trial in Trial_sequence; Args: kwargs: not used. Returns: Trial as `BTgymBaseDataTrial` instance; None, if trial's sequence is exhausted. """ <DeepExtract> if self.sample_num > self.total_samples: self.is_ready = False self.log.warning('Sampling sequence exhausted at {}-th Trial'.format(self.sample_num)) self.sample_instance = None else: (interval, time) = self._get_interval(self.sample_num) self.log.notice('Trial #{} @: {} <--> {};'.format(self.sample_num, time[0], time[-1])) self.log.debug('Trial #{} rows: {} <--> {}'.format(self.sample_num, interval[0], interval[-1])) trial = self._sample_interval(interval, name='sequential_trial_') self.sample_num += 1 self.sample_instance = trial </DeepExtract> if self.sample_instance is None: return False else: self.sample_instance.metadata['type'] = 0 self.sample_instance.metadata['sample_num'] = self.sample_num self.log.debug('got new trial <{}> with metadata: {}'.format(self.sample_instance.filename, self.sample_instance.metadata)) return self.sample_instance
def sample(self, **kwargs): """ Iteratively samples from sequence of `Trials`. Sampling loop:: - until Trial_sequence is exhausted or .reset(): - sample next Trial in Trial_sequence; Args: kwargs: not used. Returns: Trial as `BTgymBaseDataTrial` instance; None, if trial's sequence is exhausted. """ if self.sample_num > self.total_samples: self.is_ready = False self.log.warning('Sampling sequence exhausted at {}-th Trial'.format(self.sample_num)) self.sample_instance = None else: (interval, time) = self._get_interval(self.sample_num) self.log.notice('Trial #{} @: {} <--> {};'.format(self.sample_num, time[0], time[-1])) self.log.debug('Trial #{} rows: {} <--> {}'.format(self.sample_num, interval[0], interval[-1])) trial = self._sample_interval(interval, name='sequential_trial_') self.sample_num += 1 self.sample_instance = trial if self.sample_instance is None: return False else: self.sample_instance.metadata['type'] = 0 self.sample_instance.metadata['sample_num'] = self.sample_num self.log.debug('got new trial <{}> with metadata: {}'.format(self.sample_instance.filename, self.sample_instance.metadata)) return self.sample_instance
btgym
positive
def main(self, argv=None): """ parse command line""" if argv is None: argv = sys.argv else: sys.argv.extend(argv) try: signal.signal(signal.SIGTERM, signalHandler) signal.signal(signal.SIGQUIT, signalHandler) signal.signal(signal.SIGINT, signalHandler) self.parser.add_argument('--runsolver-path', dest='runsolver', default='./target_algorithm/runsolver/runsolver', help='path to runsolver binary (if None, the runsolver is deactivated)') self.parser.add_argument('--temp-file-dir', dest='tmp_dir', default=None, help="directory for temporary files (relative to -exec-dir in SMAC scenario). If 'NONE' use $TMPDIR if available, otherwise './'") self.parser.add_argument('--temp-file-dir-algo', dest='tmp_dir_algo', default=True, type=bool, help='create a directory for temporary files from target algo') self.parser.add_argument('--mem-limit', dest='mem_limit', default=self._mem_limit, type=int, help='memory limit in MB') self.parser.add_argument('--internal', dest='internal', default=False, type=bool, help='skip calling an external target algorithm') self.parser.add_argument('--log', dest='log', default=True, type=bool, help='logs all runs in "target_algo_runs.json" in --temp-file-dir') self.parser.add_argument('--ext-callstring', dest='ext_callstring', default=None, help='Command to get call string via external program;' + 'your programm gets a file with' + 'first line: instance name,' + 'second line: seed' + 'further lines: paramter name, paramater value;' + 'output: one line with callstring for target algorithm') self.parser.add_argument('--ext-parsing', dest='ext_parsing', default=None, help='Command to use an external program to parse the output of your target algorihm;' + 'only paramter: name of output file;' + 'output of your progam:' + 'status: SAT|UNSAT|TIMEOUT|CRASHED\n' + 'quality: <integer>\n' + 'misc: <string>') self.parser.add_argument('--help', dest='show_help', default=False, type=bool, help='shows help') (self.args, target_args) = self.parser.parse_cmd(sys.argv[1:]) args = self.args if args.show_help: self.parser.print_help() self._ta_status = 'ABORT' self._ta_misc = 'help was requested...' self._exit_code = 1 sys.exit(1) if args.runsolver != 'None' and (not os.path.isfile(args.runsolver)) and (not args.internal): self._ta_status = 'ABORT' self._ta_misc = 'runsolver is missing - should have been at %s.' % args.runsolver self._exit_code = 1 sys.exit(1) else: self._runsolver = args.runsolver self._mem_limit = args.mem_limit if args.tmp_dir is None: if 'TMPDIR' in os.environ: args.tmp_dir = os.environ['TMPDIR'] else: args.tmp_dir = '.' if not os.path.isdir(args.tmp_dir): self._ta_status = 'ABORT' self._ta_misc = 'temp directory is missing - should have been at %s.' % args.tmp_dir self._exit_code = 1 sys.exit(1) else: self._tmp_dir = args.tmp_dir if len(target_args) < 5: self._ta_status = 'ABORT' self._ta_misc = 'some required TA parameters (instance, specifics, cutoff, runlength, seed) missing - was [%s].' % ' '.join(target_args) self._exit_code = 1 sys.exit(1) <DeepExtract> self._instance = target_args[0] self._specifics = target_args[1] self._cutoff = int(float(target_args[2]) + 1) self._ta_runtime = self._cutoff self._runlength = int(target_args[3]) self._seed = int(target_args[4]) params = target_args[5:] if len(params) / 2 * 2 != len(params): self._ta_status = 'ABORT' self._ta_misc = 'target algorithm parameter list MUST have even length - found %d arguments.' % len(params) self.print_d(' '.join(params)) self._exit_code = 1 sys.exit(1) self._config_dict = dict(((name, value.strip("'")) for (name, value) in zip(params[::2], params[1::2]))) </DeepExtract> if args.tmp_dir_algo: try: self._tmp_dir_algo = mkdtemp(dir='/tmp/') except OSError: sys.stderr.write('Creating directory for temporary files failed') pass runargs = {'instance': self._instance, 'specifics': self._specifics, 'cutoff': self._cutoff, 'runlength': self._runlength, 'seed': self._seed, 'tmp': self._tmp_dir_algo} if args.ext_callstring: <DeepExtract> callstring_in = NamedTemporaryFile(suffix='.csv', prefix='callstring', dir=self._tmp_dir, delete=False) callstring_in.write('%s\n' % runargs['instance']) callstring_in.write('%d\n' % runargs['seed']) for (name, value) in self._config_dict.items(): callstring_in.write('%s,%s\n' % (name, value)) callstring_in.flush() cmd = args.ext_callstring.split(' ') cmd.append(callstring_in.name) self.print_d(' '.join(cmd)) try: io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE, universal_newlines=True) self._subprocesses.append(io) (out_, _) = io.communicate() self._subprocesses.remove(io) except OSError: self._ta_misc = 'failed to run external program for output parsing : %s' % ' '.join(cmd) self._ta_runtime = self._cutoff self._exit_code = 2 sys.exit(2) if not out_: self._ta_misc = 'external program for output parsing yielded empty output: %s' % ' '.join(cmd) self._ta_runtime = self._cutoff self._exit_code = 2 sys.exit(2) callstring_in.close() os.remove(callstring_in.name) self._instance = runargs['instance'] target_cmd = out_.strip('\n\r\x08') </DeepExtract> else: <DeepExtract> raise NotImplementedError() </DeepExtract> target_cmd = target_cmd.split(' ') target_cmd = filter(lambda x: x != '', target_cmd) if not args.internal: <DeepExtract> random_id = random.randint(0, 1000000) self._watcher_file = NamedTemporaryFile(suffix='.log', prefix='watcher-%d-' % random_id, dir=self._tmp_dir, delete=False) self._solver_file = NamedTemporaryFile(suffix='.log', prefix='solver-%d-' % random_id, dir=self._tmp_dir, delete=False) runsolver_cmd = [] if self._runsolver != 'None': runsolver_cmd = [self._runsolver, '-M', self._mem_limit, '-C', self._cutoff, '-w', self._watcher_file.name, '-o', self._solver_file.name] runsolver_cmd.extend(target_cmd) self.print_d('Calling runsolver. Command-line:') self.print_d(' '.join(map(str, runsolver_cmd))) try: if self._runsolver != 'None': if '"' in runsolver_cmd: runsolver_cmd = ' '.join(map(str, runsolver_cmd)) io = Popen(runsolver_cmd, shell=True, preexec_fn=os.setpgrp, universal_newlines=True) else: io = Popen(map(str, runsolver_cmd), shell=False, preexec_fn=os.setpgrp, universal_newlines=True) else: io = Popen(map(str, runsolver_cmd), stdout=self._solver_file, shell=False, preexec_fn=os.setpgrp, universal_newlines=True) self._subprocesses.append(io) io.wait() self._subprocesses.remove(io) if io.stdout: io.stdout.flush() except OSError: self._ta_status = 'ABORT' self._ta_misc = 'execution failed: %s' % ' '.join(map(str, runsolver_cmd)) self._exit_code = 1 sys.exit(1) self._solver_file.seek(0) </DeepExtract> <DeepExtract> if self._runsolver == 'None': self._ta_exit_code = 0 return self.print_d('Reading runsolver output from %s' % self._watcher_file.name) data = str(self._watcher_file.read()) if re.search('runsolver_max_cpu_time_exceeded', data) or re.search('Maximum CPU time exceeded', data): self._ta_status = 'TIMEOUT' if re.search('runsolver_max_memory_limit_exceeded', data) or re.search('Maximum VSize exceeded', data): self._ta_status = 'TIMEOUT' self._ta_misc = 'memory limit was exceeded' cpu_pattern1 = re.compile('runsolver_cputime: (%s)' % self.float_regex()) cpu_match1 = re.search(cpu_pattern1, data) cpu_pattern2 = re.compile('CPU time \\(s\\): (%s)' % self.float_regex()) cpu_match2 = re.search(cpu_pattern2, data) if cpu_match1: self._ta_runtime = float(cpu_match1.group(1)) if cpu_match2: self._ta_runtime = float(cpu_match2.group(1)) exitcode_pattern = re.compile('Child status: ([0-9]+)') exitcode_match = re.search(exitcode_pattern, data) if exitcode_match: self._ta_exit_code = int(exitcode_match.group(1)) </DeepExtract> try: if 'core' in os.listdir('.'): os.remove('core') except: traceback.print_exc() if args.ext_parsing: <DeepExtract> cmd = args.ext_parsing.split(' ') cmd.append(self._solver_file.name) self.print_d(' '.join(cmd)) try: io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE, universal_newlines=True) self._subprocesses.append(io) (out_, _) = io.communicate() self._subprocesses.remove(io) except OSError: self._ta_misc = 'failed to run external program for output parsing' self._ta_runtime = self._cutoff self._exit_code = 2 sys.exit(2) result_map = {} for line in out_.split('\n'): if line.startswith('status:'): result_map['status'] = line.split(':')[1].strip(' ') elif line.startswith('quality:'): result_map['quality'] = line.split(':')[1].strip(' ') elif line.startswith('misc:'): result_map['misc'] = line.split(':')[1] resultMap = result_map </DeepExtract> else: <DeepExtract> raise NotImplementedError() </DeepExtract> if 'status' in resultMap: self._ta_status = self.RESULT_MAPPING.get(resultMap['status'], resultMap['status']) if 'runtime' in resultMap: self._ta_runtime = resultMap['runtime'] if 'quality' in resultMap: self._ta_quality = resultMap['quality'] if 'misc' in resultMap and (not self._ta_misc): self._ta_misc = resultMap['misc'] if 'misc' in resultMap and self._ta_misc: self._ta_misc += ' - ' + resultMap['misc'] if self._ta_status is 'EXTERNALKILL': self._ta_status = 'CRASHED' sys.exit() except (KeyboardInterrupt, SystemExit): <DeepExtract> if len(self._subprocesses) > 0: print('killing the target run!') try: for sub in self._subprocesses: Popen(['pkill', '-TERM', '-P', str(sub.pid)]) self.print_d('Wait %d seconds ...' % self._DELAY2KILL) time.sleep(self._DELAY2KILL) if sub.returncode is None: sub.kill() self.print_d("done... If anything in the subprocess tree fork'd a new process group, we may not have caught everything...") self._ta_misc = 'forced to exit by signal or keyboard interrupt.' self._ta_runtime = self._cutoff except (OSError, KeyboardInterrupt, SystemExit): self._ta_misc = 'forced to exit by multiple signals/interrupts.' self._ta_runtime = self._cutoff if self._ta_status is 'ABORT' or self._ta_status is 'CRASHED': if len(self._ta_misc) == 0: if self._ta_exit_code: self._ta_misc = 'Problem with run. Exit code was %d.' % self._ta_exit_code else: self._ta_misc = 'Problem with run. Exit code was N/A.' if self._watcher_file and self._solver_file: self._ta_misc = self._ta_misc + '; Preserving runsolver output at %s - preserving target algorithm output at %s' % (self._watcher_file.name or '<none>', self._solver_file.name or '<none>') try: if self._watcher_file: self._watcher_file.close() if self._solver_file: self._solver_file.close() if self._ta_status is not 'ABORT' and self._ta_status is not 'CRASHED': os.remove(self._watcher_file.name) os.remove(self._solver_file.name) if self._tmp_dir_algo: shutil.rmtree(self._tmp_dir_algo) except (OSError, KeyboardInterrupt, SystemExit): self._ta_misc = 'problems removing temporary files during cleanup.' except AttributeError: pass if self._ta_status is 'EXTERNALKILL': self._ta_status = 'CRASHED' self._exit_code = 3 </DeepExtract> <DeepExtract> if self.args and self.args.log: with open('target_algo_runs.json', 'a') as fp: out_dict = {'instance': self._instance, 'seed': self._seed, 'status': self._ta_status, 'time': self._ta_runtime, 'config': self._config_dict, 'misc': self._ta_misc} json.dump(out_dict, fp) fp.write('\n') fp.flush() sys.stdout.write('Result for ParamILS: %s, %s, %s, %s, %s' % (self._ta_status, str(self._ta_runtime), str(self._ta_runlength), str(self._ta_quality), str(self._seed))) if len(self._ta_misc) > 0: sys.stdout.write(', %s' % self._ta_misc) print('') sys.stdout.flush() </DeepExtract> if self._ta_exit_code: sys.exit(self._ta_exit_code) elif self._exit_code: sys.exit(self._exit_code) else: sys.exit(0)
def main(self, argv=None): """ parse command line""" if argv is None: argv = sys.argv else: sys.argv.extend(argv) try: signal.signal(signal.SIGTERM, signalHandler) signal.signal(signal.SIGQUIT, signalHandler) signal.signal(signal.SIGINT, signalHandler) self.parser.add_argument('--runsolver-path', dest='runsolver', default='./target_algorithm/runsolver/runsolver', help='path to runsolver binary (if None, the runsolver is deactivated)') self.parser.add_argument('--temp-file-dir', dest='tmp_dir', default=None, help="directory for temporary files (relative to -exec-dir in SMAC scenario). If 'NONE' use $TMPDIR if available, otherwise './'") self.parser.add_argument('--temp-file-dir-algo', dest='tmp_dir_algo', default=True, type=bool, help='create a directory for temporary files from target algo') self.parser.add_argument('--mem-limit', dest='mem_limit', default=self._mem_limit, type=int, help='memory limit in MB') self.parser.add_argument('--internal', dest='internal', default=False, type=bool, help='skip calling an external target algorithm') self.parser.add_argument('--log', dest='log', default=True, type=bool, help='logs all runs in "target_algo_runs.json" in --temp-file-dir') self.parser.add_argument('--ext-callstring', dest='ext_callstring', default=None, help='Command to get call string via external program;' + 'your programm gets a file with' + 'first line: instance name,' + 'second line: seed' + 'further lines: paramter name, paramater value;' + 'output: one line with callstring for target algorithm') self.parser.add_argument('--ext-parsing', dest='ext_parsing', default=None, help='Command to use an external program to parse the output of your target algorihm;' + 'only paramter: name of output file;' + 'output of your progam:' + 'status: SAT|UNSAT|TIMEOUT|CRASHED\n' + 'quality: <integer>\n' + 'misc: <string>') self.parser.add_argument('--help', dest='show_help', default=False, type=bool, help='shows help') (self.args, target_args) = self.parser.parse_cmd(sys.argv[1:]) args = self.args if args.show_help: self.parser.print_help() self._ta_status = 'ABORT' self._ta_misc = 'help was requested...' self._exit_code = 1 sys.exit(1) if args.runsolver != 'None' and (not os.path.isfile(args.runsolver)) and (not args.internal): self._ta_status = 'ABORT' self._ta_misc = 'runsolver is missing - should have been at %s.' % args.runsolver self._exit_code = 1 sys.exit(1) else: self._runsolver = args.runsolver self._mem_limit = args.mem_limit if args.tmp_dir is None: if 'TMPDIR' in os.environ: args.tmp_dir = os.environ['TMPDIR'] else: args.tmp_dir = '.' if not os.path.isdir(args.tmp_dir): self._ta_status = 'ABORT' self._ta_misc = 'temp directory is missing - should have been at %s.' % args.tmp_dir self._exit_code = 1 sys.exit(1) else: self._tmp_dir = args.tmp_dir if len(target_args) < 5: self._ta_status = 'ABORT' self._ta_misc = 'some required TA parameters (instance, specifics, cutoff, runlength, seed) missing - was [%s].' % ' '.join(target_args) self._exit_code = 1 sys.exit(1) self._instance = target_args[0] self._specifics = target_args[1] self._cutoff = int(float(target_args[2]) + 1) self._ta_runtime = self._cutoff self._runlength = int(target_args[3]) self._seed = int(target_args[4]) params = target_args[5:] if len(params) / 2 * 2 != len(params): self._ta_status = 'ABORT' self._ta_misc = 'target algorithm parameter list MUST have even length - found %d arguments.' % len(params) self.print_d(' '.join(params)) self._exit_code = 1 sys.exit(1) self._config_dict = dict(((name, value.strip("'")) for (name, value) in zip(params[::2], params[1::2]))) if args.tmp_dir_algo: try: self._tmp_dir_algo = mkdtemp(dir='/tmp/') except OSError: sys.stderr.write('Creating directory for temporary files failed') pass runargs = {'instance': self._instance, 'specifics': self._specifics, 'cutoff': self._cutoff, 'runlength': self._runlength, 'seed': self._seed, 'tmp': self._tmp_dir_algo} if args.ext_callstring: callstring_in = NamedTemporaryFile(suffix='.csv', prefix='callstring', dir=self._tmp_dir, delete=False) callstring_in.write('%s\n' % runargs['instance']) callstring_in.write('%d\n' % runargs['seed']) for (name, value) in self._config_dict.items(): callstring_in.write('%s,%s\n' % (name, value)) callstring_in.flush() cmd = args.ext_callstring.split(' ') cmd.append(callstring_in.name) self.print_d(' '.join(cmd)) try: io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE, universal_newlines=True) self._subprocesses.append(io) (out_, _) = io.communicate() self._subprocesses.remove(io) except OSError: self._ta_misc = 'failed to run external program for output parsing : %s' % ' '.join(cmd) self._ta_runtime = self._cutoff self._exit_code = 2 sys.exit(2) if not out_: self._ta_misc = 'external program for output parsing yielded empty output: %s' % ' '.join(cmd) self._ta_runtime = self._cutoff self._exit_code = 2 sys.exit(2) callstring_in.close() os.remove(callstring_in.name) self._instance = runargs['instance'] target_cmd = out_.strip('\n\r\x08') else: raise NotImplementedError() target_cmd = target_cmd.split(' ') target_cmd = filter(lambda x: x != '', target_cmd) if not args.internal: random_id = random.randint(0, 1000000) self._watcher_file = NamedTemporaryFile(suffix='.log', prefix='watcher-%d-' % random_id, dir=self._tmp_dir, delete=False) self._solver_file = NamedTemporaryFile(suffix='.log', prefix='solver-%d-' % random_id, dir=self._tmp_dir, delete=False) runsolver_cmd = [] if self._runsolver != 'None': runsolver_cmd = [self._runsolver, '-M', self._mem_limit, '-C', self._cutoff, '-w', self._watcher_file.name, '-o', self._solver_file.name] runsolver_cmd.extend(target_cmd) self.print_d('Calling runsolver. Command-line:') self.print_d(' '.join(map(str, runsolver_cmd))) try: if self._runsolver != 'None': if '"' in runsolver_cmd: runsolver_cmd = ' '.join(map(str, runsolver_cmd)) io = Popen(runsolver_cmd, shell=True, preexec_fn=os.setpgrp, universal_newlines=True) else: io = Popen(map(str, runsolver_cmd), shell=False, preexec_fn=os.setpgrp, universal_newlines=True) else: io = Popen(map(str, runsolver_cmd), stdout=self._solver_file, shell=False, preexec_fn=os.setpgrp, universal_newlines=True) self._subprocesses.append(io) io.wait() self._subprocesses.remove(io) if io.stdout: io.stdout.flush() except OSError: self._ta_status = 'ABORT' self._ta_misc = 'execution failed: %s' % ' '.join(map(str, runsolver_cmd)) self._exit_code = 1 sys.exit(1) self._solver_file.seek(0) if self._runsolver == 'None': self._ta_exit_code = 0 return self.print_d('Reading runsolver output from %s' % self._watcher_file.name) data = str(self._watcher_file.read()) if re.search('runsolver_max_cpu_time_exceeded', data) or re.search('Maximum CPU time exceeded', data): self._ta_status = 'TIMEOUT' if re.search('runsolver_max_memory_limit_exceeded', data) or re.search('Maximum VSize exceeded', data): self._ta_status = 'TIMEOUT' self._ta_misc = 'memory limit was exceeded' cpu_pattern1 = re.compile('runsolver_cputime: (%s)' % self.float_regex()) cpu_match1 = re.search(cpu_pattern1, data) cpu_pattern2 = re.compile('CPU time \\(s\\): (%s)' % self.float_regex()) cpu_match2 = re.search(cpu_pattern2, data) if cpu_match1: self._ta_runtime = float(cpu_match1.group(1)) if cpu_match2: self._ta_runtime = float(cpu_match2.group(1)) exitcode_pattern = re.compile('Child status: ([0-9]+)') exitcode_match = re.search(exitcode_pattern, data) if exitcode_match: self._ta_exit_code = int(exitcode_match.group(1)) try: if 'core' in os.listdir('.'): os.remove('core') except: traceback.print_exc() if args.ext_parsing: cmd = args.ext_parsing.split(' ') cmd.append(self._solver_file.name) self.print_d(' '.join(cmd)) try: io = Popen(cmd, shell=False, preexec_fn=os.setpgrp, stdout=PIPE, universal_newlines=True) self._subprocesses.append(io) (out_, _) = io.communicate() self._subprocesses.remove(io) except OSError: self._ta_misc = 'failed to run external program for output parsing' self._ta_runtime = self._cutoff self._exit_code = 2 sys.exit(2) result_map = {} for line in out_.split('\n'): if line.startswith('status:'): result_map['status'] = line.split(':')[1].strip(' ') elif line.startswith('quality:'): result_map['quality'] = line.split(':')[1].strip(' ') elif line.startswith('misc:'): result_map['misc'] = line.split(':')[1] resultMap = result_map else: raise NotImplementedError() if 'status' in resultMap: self._ta_status = self.RESULT_MAPPING.get(resultMap['status'], resultMap['status']) if 'runtime' in resultMap: self._ta_runtime = resultMap['runtime'] if 'quality' in resultMap: self._ta_quality = resultMap['quality'] if 'misc' in resultMap and (not self._ta_misc): self._ta_misc = resultMap['misc'] if 'misc' in resultMap and self._ta_misc: self._ta_misc += ' - ' + resultMap['misc'] if self._ta_status is 'EXTERNALKILL': self._ta_status = 'CRASHED' sys.exit() except (KeyboardInterrupt, SystemExit): if len(self._subprocesses) > 0: print('killing the target run!') try: for sub in self._subprocesses: Popen(['pkill', '-TERM', '-P', str(sub.pid)]) self.print_d('Wait %d seconds ...' % self._DELAY2KILL) time.sleep(self._DELAY2KILL) if sub.returncode is None: sub.kill() self.print_d("done... If anything in the subprocess tree fork'd a new process group, we may not have caught everything...") self._ta_misc = 'forced to exit by signal or keyboard interrupt.' self._ta_runtime = self._cutoff except (OSError, KeyboardInterrupt, SystemExit): self._ta_misc = 'forced to exit by multiple signals/interrupts.' self._ta_runtime = self._cutoff if self._ta_status is 'ABORT' or self._ta_status is 'CRASHED': if len(self._ta_misc) == 0: if self._ta_exit_code: self._ta_misc = 'Problem with run. Exit code was %d.' % self._ta_exit_code else: self._ta_misc = 'Problem with run. Exit code was N/A.' if self._watcher_file and self._solver_file: self._ta_misc = self._ta_misc + '; Preserving runsolver output at %s - preserving target algorithm output at %s' % (self._watcher_file.name or '<none>', self._solver_file.name or '<none>') try: if self._watcher_file: self._watcher_file.close() if self._solver_file: self._solver_file.close() if self._ta_status is not 'ABORT' and self._ta_status is not 'CRASHED': os.remove(self._watcher_file.name) os.remove(self._solver_file.name) if self._tmp_dir_algo: shutil.rmtree(self._tmp_dir_algo) except (OSError, KeyboardInterrupt, SystemExit): self._ta_misc = 'problems removing temporary files during cleanup.' except AttributeError: pass if self._ta_status is 'EXTERNALKILL': self._ta_status = 'CRASHED' self._exit_code = 3 if self.args and self.args.log: with open('target_algo_runs.json', 'a') as fp: out_dict = {'instance': self._instance, 'seed': self._seed, 'status': self._ta_status, 'time': self._ta_runtime, 'config': self._config_dict, 'misc': self._ta_misc} json.dump(out_dict, fp) fp.write('\n') fp.flush() sys.stdout.write('Result for ParamILS: %s, %s, %s, %s, %s' % (self._ta_status, str(self._ta_runtime), str(self._ta_runlength), str(self._ta_quality), str(self._seed))) if len(self._ta_misc) > 0: sys.stdout.write(', %s' % self._ta_misc) print('') sys.stdout.flush() if self._ta_exit_code: sys.exit(self._ta_exit_code) elif self._exit_code: sys.exit(self._exit_code) else: sys.exit(0)
CAVE
positive
def compute_inputs_targets(self, group): """ Compute inputs and target outputs for the network. """ <DeepExtract> image_group = [self.load_image(image_index) for image_index in group] </DeepExtract> <DeepExtract> annotations_group = [self.load_annotations(image_index) for image_index in group] for annotations in annotations_group: assert isinstance(annotations, dict), "'load_annotations' should return a list of dictionaries, received: {}".format(type(annotations)) assert 'labels' in annotations, "'load_annotations' should return a list of dictionaries that contain 'labels' and 'bboxes'." assert 'bboxes' in annotations, "'load_annotations' should return a list of dictionaries that contain 'labels' and 'bboxes'." annotations_group = annotations_group </DeepExtract> <DeepExtract> for (index, (image, annotations)) in enumerate(zip(image_group, annotations_group)): invalid_indices = np.where((annotations['bboxes'][:, 2] <= annotations['bboxes'][:, 0]) | (annotations['bboxes'][:, 3] <= annotations['bboxes'][:, 1]) | (annotations['bboxes'][:, 0] < 0) | (annotations['bboxes'][:, 1] < 0) | (annotations['bboxes'][:, 2] <= 0) | (annotations['bboxes'][:, 3] <= 0) | (annotations['bboxes'][:, 2] > image.shape[1]) | (annotations['bboxes'][:, 3] > image.shape[0]))[0] if len(invalid_indices): warnings.warn('Image with id {} (shape {}) contains the following invalid boxes: {}.'.format(group[index], image.shape, annotations['bboxes'][invalid_indices, :])) for k in annotations_group[index].keys(): annotations_group[index][k] = np.delete(annotations[k], invalid_indices, axis=0) (image_group, annotations_group) = (image_group, annotations_group) </DeepExtract> <DeepExtract> assert len(image_group) == len(annotations_group) if self.visual_effect is None: (image_group, annotations_group) = (image_group, annotations_group) for index in range(len(image_group)): (image_group[index], annotations_group[index]) = self.random_visual_effect_group_entry(image_group[index], annotations_group[index]) (image_group, annotations_group) = (image_group, annotations_group) </DeepExtract> <DeepExtract> assert len(image_group) == len(annotations_group) if self.misc_effect is None: (image_group, annotations_group) = (image_group, annotations_group) for index in range(len(image_group)): (image_group[index], annotations_group[index]) = self.random_misc_group_entry(image_group[index], annotations_group[index]) (image_group, annotations_group) = (image_group, annotations_group) </DeepExtract> <DeepExtract> assert len(image_group) == len(annotations_group) for index in range(len(image_group)): (image_group[index], annotations_group[index]) = self.preprocess_group_entry(image_group[index], annotations_group[index]) (image_group, annotations_group) = (image_group, annotations_group) </DeepExtract> <DeepExtract> filtered_image_group = [] filtered_annotations_group = [] for (index, (image, annotations)) in enumerate(zip(image_group, annotations_group)): image_height = image.shape[0] image_width = image.shape[1] annotations['bboxes'][:, 0] = np.clip(annotations['bboxes'][:, 0], 0, image_width - 2) annotations['bboxes'][:, 1] = np.clip(annotations['bboxes'][:, 1], 0, image_height - 2) annotations['bboxes'][:, 2] = np.clip(annotations['bboxes'][:, 2], 1, image_width - 1) annotations['bboxes'][:, 3] = np.clip(annotations['bboxes'][:, 3], 1, image_height - 1) small_indices = np.where((annotations['bboxes'][:, 2] - annotations['bboxes'][:, 0] < 10) | (annotations['bboxes'][:, 3] - annotations['bboxes'][:, 1] < 10))[0] if len(small_indices): for k in annotations_group[index].keys(): annotations_group[index][k] = np.delete(annotations[k], small_indices, axis=0) filtered_image_group.append(image) filtered_annotations_group.append(annotations_group[index]) (image_group, annotations_group) = (filtered_image_group, filtered_annotations_group) </DeepExtract> if len(image_group) == 0: return (None, None) <DeepExtract> batch_images = np.array(image_group).astype(np.float32) inputs = [batch_images] </DeepExtract> <DeepExtract> batches_targets = anchor_targets_bbox(self.anchors, image_group, annotations_group, self.num_classes()) targets = list(batches_targets) </DeepExtract> return (inputs, targets)
def compute_inputs_targets(self, group): """ Compute inputs and target outputs for the network. """ image_group = [self.load_image(image_index) for image_index in group] annotations_group = [self.load_annotations(image_index) for image_index in group] for annotations in annotations_group: assert isinstance(annotations, dict), "'load_annotations' should return a list of dictionaries, received: {}".format(type(annotations)) assert 'labels' in annotations, "'load_annotations' should return a list of dictionaries that contain 'labels' and 'bboxes'." assert 'bboxes' in annotations, "'load_annotations' should return a list of dictionaries that contain 'labels' and 'bboxes'." annotations_group = annotations_group for (index, (image, annotations)) in enumerate(zip(image_group, annotations_group)): invalid_indices = np.where((annotations['bboxes'][:, 2] <= annotations['bboxes'][:, 0]) | (annotations['bboxes'][:, 3] <= annotations['bboxes'][:, 1]) | (annotations['bboxes'][:, 0] < 0) | (annotations['bboxes'][:, 1] < 0) | (annotations['bboxes'][:, 2] <= 0) | (annotations['bboxes'][:, 3] <= 0) | (annotations['bboxes'][:, 2] > image.shape[1]) | (annotations['bboxes'][:, 3] > image.shape[0]))[0] if len(invalid_indices): warnings.warn('Image with id {} (shape {}) contains the following invalid boxes: {}.'.format(group[index], image.shape, annotations['bboxes'][invalid_indices, :])) for k in annotations_group[index].keys(): annotations_group[index][k] = np.delete(annotations[k], invalid_indices, axis=0) (image_group, annotations_group) = (image_group, annotations_group) assert len(image_group) == len(annotations_group) if self.visual_effect is None: (image_group, annotations_group) = (image_group, annotations_group) for index in range(len(image_group)): (image_group[index], annotations_group[index]) = self.random_visual_effect_group_entry(image_group[index], annotations_group[index]) (image_group, annotations_group) = (image_group, annotations_group) assert len(image_group) == len(annotations_group) if self.misc_effect is None: (image_group, annotations_group) = (image_group, annotations_group) for index in range(len(image_group)): (image_group[index], annotations_group[index]) = self.random_misc_group_entry(image_group[index], annotations_group[index]) (image_group, annotations_group) = (image_group, annotations_group) assert len(image_group) == len(annotations_group) for index in range(len(image_group)): (image_group[index], annotations_group[index]) = self.preprocess_group_entry(image_group[index], annotations_group[index]) (image_group, annotations_group) = (image_group, annotations_group) filtered_image_group = [] filtered_annotations_group = [] for (index, (image, annotations)) in enumerate(zip(image_group, annotations_group)): image_height = image.shape[0] image_width = image.shape[1] annotations['bboxes'][:, 0] = np.clip(annotations['bboxes'][:, 0], 0, image_width - 2) annotations['bboxes'][:, 1] = np.clip(annotations['bboxes'][:, 1], 0, image_height - 2) annotations['bboxes'][:, 2] = np.clip(annotations['bboxes'][:, 2], 1, image_width - 1) annotations['bboxes'][:, 3] = np.clip(annotations['bboxes'][:, 3], 1, image_height - 1) small_indices = np.where((annotations['bboxes'][:, 2] - annotations['bboxes'][:, 0] < 10) | (annotations['bboxes'][:, 3] - annotations['bboxes'][:, 1] < 10))[0] if len(small_indices): for k in annotations_group[index].keys(): annotations_group[index][k] = np.delete(annotations[k], small_indices, axis=0) filtered_image_group.append(image) filtered_annotations_group.append(annotations_group[index]) (image_group, annotations_group) = (filtered_image_group, filtered_annotations_group) if len(image_group) == 0: return (None, None) batch_images = np.array(image_group).astype(np.float32) inputs = [batch_images] batches_targets = anchor_targets_bbox(self.anchors, image_group, annotations_group, self.num_classes()) targets = list(batches_targets) return (inputs, targets)
ensembleObjectDetection
positive
def _compile_rule(self, grammar: Grammar, rule: Rule) -> CompiledRule: assert rule.include is None, rule if rule.match is not None: <DeepExtract> captures_ref = tuple(((n, self._visit_rule(grammar, r)) for (n, r) in rule.captures)) </DeepExtract> return MatchRule(rule.name, captures_ref) elif rule.begin is not None and rule.end is not None: (regs, rules) = self._patterns(grammar, rule.patterns) return EndRule(rule.name, rule.content_name, self._captures_ref(grammar, rule.begin_captures), self._captures_ref(grammar, rule.end_captures), rule.end, make_regset(*regs), rules) elif rule.begin is not None and rule.while_ is not None: (regs, rules) = self._patterns(grammar, rule.patterns) return WhileRule(rule.name, rule.content_name, self._captures_ref(grammar, rule.begin_captures), self._captures_ref(grammar, rule.while_captures), rule.while_, make_regset(*regs), rules) else: (regs, rules) = self._patterns(grammar, rule.patterns) return PatternRule(rule.name, make_regset(*regs), rules)
def _compile_rule(self, grammar: Grammar, rule: Rule) -> CompiledRule: assert rule.include is None, rule if rule.match is not None: captures_ref = tuple(((n, self._visit_rule(grammar, r)) for (n, r) in rule.captures)) return MatchRule(rule.name, captures_ref) elif rule.begin is not None and rule.end is not None: (regs, rules) = self._patterns(grammar, rule.patterns) return EndRule(rule.name, rule.content_name, self._captures_ref(grammar, rule.begin_captures), self._captures_ref(grammar, rule.end_captures), rule.end, make_regset(*regs), rules) elif rule.begin is not None and rule.while_ is not None: (regs, rules) = self._patterns(grammar, rule.patterns) return WhileRule(rule.name, rule.content_name, self._captures_ref(grammar, rule.begin_captures), self._captures_ref(grammar, rule.while_captures), rule.while_, make_regset(*regs), rules) else: (regs, rules) = self._patterns(grammar, rule.patterns) return PatternRule(rule.name, make_regset(*regs), rules)
babi
positive
def update(self) -> None: """ Ensures that all remote sources are up-to-date. """ for source_old in self: if isinstance(source_old, RemoteSource): repo = git.Repo(source_old.location) origin = repo.remotes.origin origin.pull() sha = repo.head.object.hexsha version = repo.git.rev_parse(sha, short=8) if version != source_old.version: source_new = RemoteSource(source_old.name, source_old.location, source_old.url, version) logger.info('updated source: %s [%s -> %s]', source_old.name, source_old.version, source_new.version) <DeepExtract> logger.info('loading source %s at %s', source_new.name, source_new.location) if source_new.name in self.__sources: self.unload(source_new) bugs = [] blueprints = [] tools = [] glob_pattern = '{}/**/*.bugzoo.y*ml'.format(source_new.location) for fn in glob.iglob(glob_pattern, recursive=True): if fn.endswith('.yml') or fn.endswith('.yaml'): logger.debug('found manifest file: %s', fn) self.__parse_file(source_new, fn, bugs, blueprints, tools) logger.debug('parsed manifest file: %s', fn) for bug in bugs: self.__installation.bugs.add(bug) for blueprint in blueprints: self.__installation.build.add(blueprint) for tool in tools: self.__installation.tools.add(tool) contents = SourceContents([b.name for b in blueprints], [b.name for b in bugs], [t.name for t in tools]) self.__sources[source_new.name] = source_new self.__contents[source_new.name] = contents logger.info('loaded source: %s', source_new.name) </DeepExtract> else: logger.debug('no updates for source: %s', source_old.name) <DeepExtract> logger.info('saving registry to: %s', self.__registry_fn) d = [s.to_dict() for s in self] os.makedirs(self.__path, exist_ok=True) with open(self.__registry_fn, 'w') as f: yaml.dump(d, f, indent=2, default_flow_style=False) logger.info('saved registry to: %s', self.__registry_fn) </DeepExtract>
def update(self) -> None: """ Ensures that all remote sources are up-to-date. """ for source_old in self: if isinstance(source_old, RemoteSource): repo = git.Repo(source_old.location) origin = repo.remotes.origin origin.pull() sha = repo.head.object.hexsha version = repo.git.rev_parse(sha, short=8) if version != source_old.version: source_new = RemoteSource(source_old.name, source_old.location, source_old.url, version) logger.info('updated source: %s [%s -> %s]', source_old.name, source_old.version, source_new.version) logger.info('loading source %s at %s', source_new.name, source_new.location) if source_new.name in self.__sources: self.unload(source_new) bugs = [] blueprints = [] tools = [] glob_pattern = '{}/**/*.bugzoo.y*ml'.format(source_new.location) for fn in glob.iglob(glob_pattern, recursive=True): if fn.endswith('.yml') or fn.endswith('.yaml'): logger.debug('found manifest file: %s', fn) self.__parse_file(source_new, fn, bugs, blueprints, tools) logger.debug('parsed manifest file: %s', fn) for bug in bugs: self.__installation.bugs.add(bug) for blueprint in blueprints: self.__installation.build.add(blueprint) for tool in tools: self.__installation.tools.add(tool) contents = SourceContents([b.name for b in blueprints], [b.name for b in bugs], [t.name for t in tools]) self.__sources[source_new.name] = source_new self.__contents[source_new.name] = contents logger.info('loaded source: %s', source_new.name) else: logger.debug('no updates for source: %s', source_old.name) logger.info('saving registry to: %s', self.__registry_fn) d = [s.to_dict() for s in self] os.makedirs(self.__path, exist_ok=True) with open(self.__registry_fn, 'w') as f: yaml.dump(d, f, indent=2, default_flow_style=False) logger.info('saved registry to: %s', self.__registry_fn) </DeepExtract>
BugZoo
positive
def normalize_param(self, slf, args, kwargs): """this is where all the magic happens, this will try and find the param and put its value in kwargs if it has a default and stuff""" if self.is_kwarg: <DeepExtract> flags = self.flags name = self.name try: pdefault = self.normalize_default(flags.get('default', None)) prequired = flags['required'] dest_name = flags.get('dest', name) has_val = True (found_name, val) = self.find_kwarg(slf.request, self.names, prequired, pdefault, kwargs) if found_name: kwargs.pop(found_name) else: has_val = 'default' in flags if has_val: kwargs[dest_name] = self.normalize_val(slf.request, val) except ValueError as e: raise CallError(400, '{} failed with {}'.format(name, String(e))) kwargs = kwargs </DeepExtract> else: <DeepExtract> flags = self.flags index = self.index args = list(args) paction = flags['action'] if paction not in set(['store', 'store_false', 'store_true']): raise RuntimeError('unsupported positional param action {}'.format(paction)) if 'dest' in flags: logger.warn('dest is ignored in positional param') try: val = args.pop(index) except IndexError: if flags['required']: raise CallError(400, 'required positional param at index {} does not exist'.format(index)) else: val = self.normalize_default(flags.get('default', None)) try: val = self.normalize_val(slf.request, val) except ValueError as e: raise CallError(400, 'Positional arg {} failed with {}'.format(index, String(e))) args.insert(index, val) args = args </DeepExtract> return (slf, args, kwargs)
def normalize_param(self, slf, args, kwargs): """this is where all the magic happens, this will try and find the param and put its value in kwargs if it has a default and stuff""" if self.is_kwarg: flags = self.flags name = self.name try: pdefault = self.normalize_default(flags.get('default', None)) prequired = flags['required'] dest_name = flags.get('dest', name) has_val = True (found_name, val) = self.find_kwarg(slf.request, self.names, prequired, pdefault, kwargs) if found_name: kwargs.pop(found_name) else: has_val = 'default' in flags if has_val: kwargs[dest_name] = self.normalize_val(slf.request, val) except ValueError as e: raise CallError(400, '{} failed with {}'.format(name, String(e))) kwargs = kwargs else: flags = self.flags index = self.index args = list(args) paction = flags['action'] if paction not in set(['store', 'store_false', 'store_true']): raise RuntimeError('unsupported positional param action {}'.format(paction)) if 'dest' in flags: logger.warn('dest is ignored in positional param') try: val = args.pop(index) except IndexError: if flags['required']: raise CallError(400, 'required positional param at index {} does not exist'.format(index)) else: val = self.normalize_default(flags.get('default', None)) try: val = self.normalize_val(slf.request, val) except ValueError as e: raise CallError(400, 'Positional arg {} failed with {}'.format(index, String(e))) args.insert(index, val) args = args return (slf, args, kwargs)
endpoints
positive
def local_pairwise_distances(x, y, max_distance=9): """Computes pairwise squared l2 distances using a local search window. Optimized implementation using correlation_cost. Args: x: Float32 tensor of shape [height, width, feature_dim]. y: Float32 tensor of shape [height, width, feature_dim]. max_distance: Integer, the maximum distance in pixel coordinates per dimension which is considered to be in the search window. Returns: Float32 distances tensor of shape [height, width, (2 * max_distance + 1) ** 2]. """ if cfg.MODEL_LOCAL_DOWNSAMPLE: (ori_h, ori_w, _) = x.size() x = x.permute(2, 0, 1).unsqueeze(0) x = F.avg_pool2d(x, (2, 2), (2, 2)) y = y.permute(2, 0, 1).unsqueeze(0) y = F.avg_pool2d(y, (2, 2), (2, 2)) x = x.squeeze(0).permute(1, 2, 0) y = y.squeeze(0).permute(1, 2, 0) <DeepExtract> corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = x.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = y.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) corr = corr </DeepExtract> xs = torch.sum(x * x, 2, keepdim=True) ys = torch.sum(y * y, 2, keepdim=True) ones_ys = torch.ones_like(ys) <DeepExtract> corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = ones_ys.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = ys.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) ys = corr </DeepExtract> d = xs + ys - 2 * corr tmp = torch.zeros_like(d) boundary = torch.eq(cross_correlate(ones_ys, ones_ys, max_distance=max_distance), 0) d = torch.where(boundary, tmp.fill_(float('inf')), d) d = (torch.sigmoid(d) - 0.5) * 2 d = d.permute(2, 0, 1).unsqueeze(0) d = F.interpolate(d, size=(ori_h, ori_w), mode='bilinear', align_corners=True) d = d.squeeze(0).permute(1, 2, 0) else: <DeepExtract> corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = x.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = y.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) corr = corr </DeepExtract> xs = torch.sum(x * x, 2, keepdim=True) ys = torch.sum(y * y, 2, keepdim=True) ones_ys = torch.ones_like(ys) <DeepExtract> corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = ones_ys.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = ys.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) ys = corr </DeepExtract> d = xs + ys - 2 * corr tmp = torch.zeros_like(d) boundary = torch.eq(cross_correlate(ones_ys, ones_ys, max_distance=max_distance), 0) d = torch.where(boundary, tmp.fill_(float('inf')), d) return d
def local_pairwise_distances(x, y, max_distance=9): """Computes pairwise squared l2 distances using a local search window. Optimized implementation using correlation_cost. Args: x: Float32 tensor of shape [height, width, feature_dim]. y: Float32 tensor of shape [height, width, feature_dim]. max_distance: Integer, the maximum distance in pixel coordinates per dimension which is considered to be in the search window. Returns: Float32 distances tensor of shape [height, width, (2 * max_distance + 1) ** 2]. """ if cfg.MODEL_LOCAL_DOWNSAMPLE: (ori_h, ori_w, _) = x.size() x = x.permute(2, 0, 1).unsqueeze(0) x = F.avg_pool2d(x, (2, 2), (2, 2)) y = y.permute(2, 0, 1).unsqueeze(0) y = F.avg_pool2d(y, (2, 2), (2, 2)) x = x.squeeze(0).permute(1, 2, 0) y = y.squeeze(0).permute(1, 2, 0) corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = x.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = y.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) corr = corr xs = torch.sum(x * x, 2, keepdim=True) ys = torch.sum(y * y, 2, keepdim=True) ones_ys = torch.ones_like(ys) corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = ones_ys.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = ys.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) ys = corr d = xs + ys - 2 * corr tmp = torch.zeros_like(d) boundary = torch.eq(cross_correlate(ones_ys, ones_ys, max_distance=max_distance), 0) d = torch.where(boundary, tmp.fill_(float('inf')), d) d = (torch.sigmoid(d) - 0.5) * 2 d = d.permute(2, 0, 1).unsqueeze(0) d = F.interpolate(d, size=(ori_h, ori_w), mode='bilinear', align_corners=True) d = d.squeeze(0).permute(1, 2, 0) else: corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = x.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = y.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) corr = corr xs = torch.sum(x * x, 2, keepdim=True) ys = torch.sum(y * y, 2, keepdim=True) ones_ys = torch.ones_like(ys) corr_op = SpatialCorrelationSampler(kernel_size=1, patch_size=2 * max_distance + 1, stride=1, dilation_patch=1, padding=0) xs = ones_ys.permute(2, 0, 1) xs = torch.unsqueeze(xs, 0) ys = ys.permute(2, 0, 1) ys = torch.unsqueeze(ys, 0) corr = corr_op(xs, ys) (bs, _, _, hh, ww) = corr.size() corr = corr.view(bs, -1, hh, ww) corr = torch.squeeze(corr, 0) corr = corr.permute(1, 2, 0) ys = corr d = xs + ys - 2 * corr tmp = torch.zeros_like(d) boundary = torch.eq(cross_correlate(ones_ys, ones_ys, max_distance=max_distance), 0) d = torch.where(boundary, tmp.fill_(float('inf')), d) return d
CVPR2020_MANet
positive
def test_mount_nfs_tls_netns(mocker): <DeepExtract> popen_mock = MagicMock() popen_mock.communicate.return_value = (stdout, stderr) popen_mock.returncode = returncode mock = mocker.patch('subprocess.Popen', return_value=popen_mock) </DeepExtract> optimize_readahead_window_mock = mocker.patch('mount_efs.optimize_readahead_window') options = dict(DEFAULT_OPTIONS) options['tls'] = None options['netns'] = NETNS mount_efs.mount_nfs(_get_config(mount_nfs_command_retry='false'), DNS_NAME, '/', '/mnt', options) (args, _) = mock.call_args args = args[0] assert 'nsenter' == args[NETNS_NSENTER_ARG_IDX] assert '--net=' + NETNS == args[NETNS_PATH_ARG_IDX] assert '/sbin/mount.nfs4' == args[NFS_BIN_ARG_IDX + NETNS_NFS_OFFSET] assert DNS_NAME not in args[NFS_MOUNT_PATH_IDX + NETNS_NFS_OFFSET] assert '127.0.0.1' in args[NFS_MOUNT_PATH_IDX + NETNS_NFS_OFFSET] assert '/mnt' in args[NFS_MOUNT_POINT_IDX + NETNS_NFS_OFFSET] utils.assert_called_once(optimize_readahead_window_mock)
def test_mount_nfs_tls_netns(mocker): popen_mock = MagicMock() popen_mock.communicate.return_value = (stdout, stderr) popen_mock.returncode = returncode mock = mocker.patch('subprocess.Popen', return_value=popen_mock) optimize_readahead_window_mock = mocker.patch('mount_efs.optimize_readahead_window') options = dict(DEFAULT_OPTIONS) options['tls'] = None options['netns'] = NETNS mount_efs.mount_nfs(_get_config(mount_nfs_command_retry='false'), DNS_NAME, '/', '/mnt', options) (args, _) = mock.call_args args = args[0] assert 'nsenter' == args[NETNS_NSENTER_ARG_IDX] assert '--net=' + NETNS == args[NETNS_PATH_ARG_IDX] assert '/sbin/mount.nfs4' == args[NFS_BIN_ARG_IDX + NETNS_NFS_OFFSET] assert DNS_NAME not in args[NFS_MOUNT_PATH_IDX + NETNS_NFS_OFFSET] assert '127.0.0.1' in args[NFS_MOUNT_PATH_IDX + NETNS_NFS_OFFSET] assert '/mnt' in args[NFS_MOUNT_POINT_IDX + NETNS_NFS_OFFSET] utils.assert_called_once(optimize_readahead_window_mock)
efs-utils
positive
def update(key, E, P): (k, ko, faus) = key if p.conf_mult: sigma = mu[key] + p.conf_mult * stats.spread[key] * [[1], [-1]] lines_s[0].set_ydata(wrap(sigma[0, p.dims])) lines_s[1].set_ydata(wrap(sigma[1, p.dims])) line_mu.set_ydata(wrap(mu[key][p.dims])) else: for (n, line) in enumerate(lines_E): line.set_ydata(wrap(E[n, p.dims])) <DeepExtract> (k, ko, faus) = key if ko is None: return if faus == 'f': return if not hasattr(stats, 'w'): return w = stats.w[key] alpha = (w / w.max()).clip(0.1, 0.4) for (line, a) in zip(lines_E, alpha): line.set_alpha(a) if scatters is not None: colors = scatters.get_facecolor()[:, :3] if len(colors) == 1: colors = colors.repeat(len(w), axis=0) scatters.set_color(np.hstack([colors, alpha[:, None]])) </DeepExtract> line_x.set_ydata(wrap(xx[k, p.dims])) text_t.set_text(format_time(k, ko, stats.HMM.tseq.tt[k])) if 'f' in faus: if p.obs_inds is not None: line_y.set_ydata(yy[ko]) line_y.set_zorder(5) line_y.set_visible(True) if 'u' in faus: if p.obs_inds is not None: line_y.set_visible(False) return
def update(key, E, P): (k, ko, faus) = key if p.conf_mult: sigma = mu[key] + p.conf_mult * stats.spread[key] * [[1], [-1]] lines_s[0].set_ydata(wrap(sigma[0, p.dims])) lines_s[1].set_ydata(wrap(sigma[1, p.dims])) line_mu.set_ydata(wrap(mu[key][p.dims])) else: for (n, line) in enumerate(lines_E): line.set_ydata(wrap(E[n, p.dims])) (k, ko, faus) = key if ko is None: return if faus == 'f': return if not hasattr(stats, 'w'): return w = stats.w[key] alpha = (w / w.max()).clip(0.1, 0.4) for (line, a) in zip(lines_E, alpha): line.set_alpha(a) if scatters is not None: colors = scatters.get_facecolor()[:, :3] if len(colors) == 1: colors = colors.repeat(len(w), axis=0) scatters.set_color(np.hstack([colors, alpha[:, None]])) line_x.set_ydata(wrap(xx[k, p.dims])) text_t.set_text(format_time(k, ko, stats.HMM.tseq.tt[k])) if 'f' in faus: if p.obs_inds is not None: line_y.set_ydata(yy[ko]) line_y.set_zorder(5) line_y.set_visible(True) if 'u' in faus: if p.obs_inds is not None: line_y.set_visible(False) return
DAPPER
positive
def get_usr_passwd(decrypt_file): """ Read the decrypted file and returns the username and password Input: (str) decrypted file Output:(str) username Output:(str) password """ try: with open(decrypt_file, 'r') as f: arr = f.read().split(' ') except IOError as e: <DeepExtract> log('ERROR: Failed to open file {0}: {1}.'.format(decrypt_file, e.strerror) + '\n') if verbose >= 0: print('ERROR: Failed to open file {0}: {1}.'.format(decrypt_file, e.strerror)) </DeepExtract> sys.exit(3) return arr
def get_usr_passwd(decrypt_file): """ Read the decrypted file and returns the username and password Input: (str) decrypted file Output:(str) username Output:(str) password """ try: with open(decrypt_file, 'r') as f: arr = f.read().split(' ') except IOError as e: log('ERROR: Failed to open file {0}: {1}.'.format(decrypt_file, e.strerror) + '\n') if verbose >= 0: print('ERROR: Failed to open file {0}: {1}.'.format(decrypt_file, e.strerror)) sys.exit(3) return arr
ansible-power-aix
positive
def test_should_execute_build_dags_command(self): <DeepExtract> self.prj.project_dir = self.prj.project_dir or TEST_PROJECT_PATH mkdir(self.prj.project_dir / '.dags') (self.prj.project_dir / '.dags' / 'leftover').touch() </DeepExtract> clear_image_leftovers(self.prj) clear_package_leftovers(self.prj) <DeepExtract> return self.subprocess_run('python setup.py build_project --build-dags', text=True).stdout </DeepExtract> self.assertFalse(dags_leftovers_exist(TEST_PROJECT_PATH)) self.assertTrue(dags_contain(TEST_PROJECT_PATH, repr(datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(hours=24)))) <DeepExtract> return self.subprocess_run(['python', 'setup.py', 'build_project', '--build-dags', '--start-time', '2020-01-02 00:00:00'], text=True).stdout </DeepExtract> self.assertTrue(dags_contain(TEST_PROJECT_PATH, 'datetime.datetime(2020, 1, 1, 0, 0)')) <DeepExtract> return self.subprocess_run('python setup.py build_project --build-dags --workflow workflow1', text=True).stdout </DeepExtract> self.assertTrue(self.single_dag_for_workflow_exists('workflow1'))
def test_should_execute_build_dags_command(self): self.prj.project_dir = self.prj.project_dir or TEST_PROJECT_PATH mkdir(self.prj.project_dir / '.dags') (self.prj.project_dir / '.dags' / 'leftover').touch() clear_image_leftovers(self.prj) clear_package_leftovers(self.prj) return self.subprocess_run('python setup.py build_project --build-dags', text=True).stdout self.assertFalse(dags_leftovers_exist(TEST_PROJECT_PATH)) self.assertTrue(dags_contain(TEST_PROJECT_PATH, repr(datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(hours=24)))) return self.subprocess_run(['python', 'setup.py', 'build_project', '--build-dags', '--start-time', '2020-01-02 00:00:00'], text=True).stdout self.assertTrue(dags_contain(TEST_PROJECT_PATH, 'datetime.datetime(2020, 1, 1, 0, 0)')) return self.subprocess_run('python setup.py build_project --build-dags --workflow workflow1', text=True).stdout self.assertTrue(self.single_dag_for_workflow_exists('workflow1'))
bigflow
positive
def resample_random_block(self, n_perturbations: int): self.keep_heuristic = 'WeightOnly' if self.keep_heuristic == 'WeightOnly': sorted_idx = torch.argsort(self.perturbed_edge_weight) idx_keep = (self.perturbed_edge_weight <= self.eps).sum().long() if idx_keep < sorted_idx.size(0) // 2: idx_keep = sorted_idx.size(0) // 2 else: raise NotImplementedError('Only keep_heuristic=`WeightOnly` supported') sorted_idx = sorted_idx[idx_keep:] self.current_search_space = self.current_search_space[sorted_idx] self.modified_edge_index = self.modified_edge_index[:, sorted_idx] self.perturbed_edge_weight = self.perturbed_edge_weight[sorted_idx] for i in range(self.max_final_samples): n_edges_resample = self.search_space_size - self.current_search_space.size(0) lin_index = torch.randint(self.n_possible_edges, (n_edges_resample,), device=self.device) (self.current_search_space, unique_idx) = torch.unique(torch.cat((self.current_search_space, lin_index)), sorted=True, return_inverse=True) if self.make_undirected: <DeepExtract> row_idx = (self.n - 2 - torch.floor(torch.sqrt(-8 * self.current_search_space.double() + 4 * self.n * (self.n - 1) - 7) / 2.0 - 0.5)).long() col_idx = self.current_search_space + row_idx + 1 - self.n * (self.n - 1) // 2 + (self.n - row_idx) * (self.n - row_idx - 1) // 2 self.modified_edge_index = torch.stack((row_idx, col_idx)) </DeepExtract> else: <DeepExtract> row_idx = self.current_search_space // self.n col_idx = self.current_search_space % self.n self.modified_edge_index = torch.stack((row_idx, col_idx)) </DeepExtract> perturbed_edge_weight_old = self.perturbed_edge_weight.clone() self.perturbed_edge_weight = torch.full_like(self.current_search_space, self.eps, dtype=torch.float32) self.perturbed_edge_weight[unique_idx[:perturbed_edge_weight_old.size(0)]] = perturbed_edge_weight_old if not self.make_undirected: is_not_self_loop = self.modified_edge_index[0] != self.modified_edge_index[1] self.current_search_space = self.current_search_space[is_not_self_loop] self.modified_edge_index = self.modified_edge_index[:, is_not_self_loop] self.perturbed_edge_weight = self.perturbed_edge_weight[is_not_self_loop] if self.current_search_space.size(0) > n_perturbations: return raise RuntimeError('Sampling random block was not successfull. Please decrease `n_perturbations`.')
def resample_random_block(self, n_perturbations: int): self.keep_heuristic = 'WeightOnly' if self.keep_heuristic == 'WeightOnly': sorted_idx = torch.argsort(self.perturbed_edge_weight) idx_keep = (self.perturbed_edge_weight <= self.eps).sum().long() if idx_keep < sorted_idx.size(0) // 2: idx_keep = sorted_idx.size(0) // 2 else: raise NotImplementedError('Only keep_heuristic=`WeightOnly` supported') sorted_idx = sorted_idx[idx_keep:] self.current_search_space = self.current_search_space[sorted_idx] self.modified_edge_index = self.modified_edge_index[:, sorted_idx] self.perturbed_edge_weight = self.perturbed_edge_weight[sorted_idx] for i in range(self.max_final_samples): n_edges_resample = self.search_space_size - self.current_search_space.size(0) lin_index = torch.randint(self.n_possible_edges, (n_edges_resample,), device=self.device) (self.current_search_space, unique_idx) = torch.unique(torch.cat((self.current_search_space, lin_index)), sorted=True, return_inverse=True) if self.make_undirected: row_idx = (self.n - 2 - torch.floor(torch.sqrt(-8 * self.current_search_space.double() + 4 * self.n * (self.n - 1) - 7) / 2.0 - 0.5)).long() col_idx = self.current_search_space + row_idx + 1 - self.n * (self.n - 1) // 2 + (self.n - row_idx) * (self.n - row_idx - 1) // 2 self.modified_edge_index = torch.stack((row_idx, col_idx)) else: row_idx = self.current_search_space // self.n col_idx = self.current_search_space % self.n self.modified_edge_index = torch.stack((row_idx, col_idx)) perturbed_edge_weight_old = self.perturbed_edge_weight.clone() self.perturbed_edge_weight = torch.full_like(self.current_search_space, self.eps, dtype=torch.float32) self.perturbed_edge_weight[unique_idx[:perturbed_edge_weight_old.size(0)]] = perturbed_edge_weight_old if not self.make_undirected: is_not_self_loop = self.modified_edge_index[0] != self.modified_edge_index[1] self.current_search_space = self.current_search_space[is_not_self_loop] self.modified_edge_index = self.modified_edge_index[:, is_not_self_loop] self.perturbed_edge_weight = self.perturbed_edge_weight[is_not_self_loop] if self.current_search_space.size(0) > n_perturbations: return raise RuntimeError('Sampling random block was not successfull. Please decrease `n_perturbations`.')
DeepRobust
positive
def _execute(root_task): """Implements task execution loop. The postcondition of this method is that all tasks in the dependency tree of root_task that aren't blocked on batch items waiting to be flushed should be executed until they are (or until they're computed). This is done by running a depth-first search on the dependency tree. :param root_task: root of the dependency tree :return: ``None`` """ init_num_tasks = len(self._tasks) self._tasks.append(root_task) while len(self._tasks) > init_num_tasks: if len(self._tasks) > _debug_options.MAX_TASK_STACK_SIZE: <DeepExtract> _state.reset() </DeepExtract> debug.dump(self) raise RuntimeError('Number of scheduled tasks exceeded maximum threshold.') task = self._tasks[-1] if _debug_options.DUMP_SCHEDULER_STATE: <DeepExtract> current_time = time.time() if current_time - self._last_dump_time < _debug_options.SCHEDULER_STATE_DUMP_INTERVAL: return self._last_dump_time = current_time debug.write('\n--- Scheduler state dump: --------------------------------------------') try: self.dump() if last_task is not None: debug.write('Last task: %s' % debug.str(last_task), 1) finally: debug.write('----------------------------------------------------------------------\n') stdout.flush() stderr.flush() </DeepExtract> if task.is_computed(): self._tasks.pop() elif isinstance(task, AsyncTask): <DeepExtract> if task.is_blocked(): if task._dependencies_scheduled: if _debug_options.DUMP_CONTINUE_TASK: debug.write('@async: skipping %s' % debug.str(task)) task._dependencies_scheduled = False task._pause_contexts() self._tasks.pop() else: task._dependencies_scheduled = True task._resume_contexts() for dependency in task._dependencies: if not dependency.is_computed(): if _debug_options.DUMP_SCHEDULE_TASK: debug.write('@async: scheduling task %s' % debug.str(dependency)) if _debug_options.DUMP_DEPENDENCIES: debug.write('@async: +dependency: %s needs %s' % (debug.str(task), debug.str(dependency))) self._tasks.append(dependency) else: self._continue_with_task(task) </DeepExtract> elif isinstance(task, batching.BatchItemBase): <DeepExtract> if task.batch.is_flushed(): if _debug_options.DUMP_SCHEDULE_BATCH: debug.write("@async: can't schedule flushed batch %s" % debug.str(task.batch)) return False if _debug_options.DUMP_SCHEDULE_BATCH and task.batch not in self._batches: debug.write('@async: scheduling batch %s' % debug.str(task.batch)) self._batches.add(task.batch) return True </DeepExtract> self._tasks.pop() else: task._compute() self._tasks.pop()
def _execute(root_task): """Implements task execution loop. The postcondition of this method is that all tasks in the dependency tree of root_task that aren't blocked on batch items waiting to be flushed should be executed until they are (or until they're computed). This is done by running a depth-first search on the dependency tree. :param root_task: root of the dependency tree :return: ``None`` """ init_num_tasks = len(self._tasks) self._tasks.append(root_task) while len(self._tasks) > init_num_tasks: if len(self._tasks) > _debug_options.MAX_TASK_STACK_SIZE: _state.reset() debug.dump(self) raise RuntimeError('Number of scheduled tasks exceeded maximum threshold.') task = self._tasks[-1] if _debug_options.DUMP_SCHEDULER_STATE: current_time = time.time() if current_time - self._last_dump_time < _debug_options.SCHEDULER_STATE_DUMP_INTERVAL: return self._last_dump_time = current_time debug.write('\n--- Scheduler state dump: --------------------------------------------') try: self.dump() if last_task is not None: debug.write('Last task: %s' % debug.str(last_task), 1) finally: debug.write('----------------------------------------------------------------------\n') stdout.flush() stderr.flush() if task.is_computed(): self._tasks.pop() elif isinstance(task, AsyncTask): if task.is_blocked(): if task._dependencies_scheduled: if _debug_options.DUMP_CONTINUE_TASK: debug.write('@async: skipping %s' % debug.str(task)) task._dependencies_scheduled = False task._pause_contexts() self._tasks.pop() else: task._dependencies_scheduled = True task._resume_contexts() for dependency in task._dependencies: if not dependency.is_computed(): if _debug_options.DUMP_SCHEDULE_TASK: debug.write('@async: scheduling task %s' % debug.str(dependency)) if _debug_options.DUMP_DEPENDENCIES: debug.write('@async: +dependency: %s needs %s' % (debug.str(task), debug.str(dependency))) self._tasks.append(dependency) else: self._continue_with_task(task) elif isinstance(task, batching.BatchItemBase): if task.batch.is_flushed(): if _debug_options.DUMP_SCHEDULE_BATCH: debug.write("@async: can't schedule flushed batch %s" % debug.str(task.batch)) return False if _debug_options.DUMP_SCHEDULE_BATCH and task.batch not in self._batches: debug.write('@async: scheduling batch %s' % debug.str(task.batch)) self._batches.add(task.batch) return True self._tasks.pop() else: task._compute() self._tasks.pop()
asynq
positive
def _statistics(table, convergence=False, ratios=False, participation=False): """ Common implementation to :func:`comment_statistics` and :func:`user_statistics` functions. """ col_names = {1: 'agree', -1: 'disagree', 0: 'skipped'} for col in col_names: if col not in table: table[col] = 0 table.columns = [col_names[k] for k in table.columns] table = table[['agree', 'disagree', 'skipped']].copy() if convergence: <DeepExtract> e = 1e-50 table['convergence'] = abs(table[agree] - table[disagree]) / (table[agree] + table[disagree] + e) </DeepExtract> if participation is not False: <DeepExtract> e = 1e-50 table['participation'] = (table[agree] + table[disagree] + table[skipped]) / (participation + e) </DeepExtract> if ratios: e = 1e-50 data = table[['agree', 'disagree', 'skipped']] norm = data.sum(axis=1).values norm = norm[:, None][:, [0, 0, 0]] data /= norm + e table[['agree', 'disagree', 'skipped']] = data return table
def _statistics(table, convergence=False, ratios=False, participation=False): """ Common implementation to :func:`comment_statistics` and :func:`user_statistics` functions. """ col_names = {1: 'agree', -1: 'disagree', 0: 'skipped'} for col in col_names: if col not in table: table[col] = 0 table.columns = [col_names[k] for k in table.columns] table = table[['agree', 'disagree', 'skipped']].copy() if convergence: e = 1e-50 table['convergence'] = abs(table[agree] - table[disagree]) / (table[agree] + table[disagree] + e) if participation is not False: e = 1e-50 table['participation'] = (table[agree] + table[disagree] + table[skipped]) / (participation + e) if ratios: e = 1e-50 data = table[['agree', 'disagree', 'skipped']] norm = data.sum(axis=1).values norm = norm[:, None][:, [0, 0, 0]] data /= norm + e table[['agree', 'disagree', 'skipped']] = data return table
ej-server
positive
def get_observation(self, portfolio_vector=False): """ Return observation df with prices and asset amounts :param portfolio_vector: bool: whether to include or not asset amounts :return: pandas DataFrame: """ try: <DeepExtract> while True: try: obs_list = [] keys = [] is_bounded = True if not end: end = self.timestamp is_bounded = False if not start: start = end - timedelta(minutes=self.period * self.obs_steps) index = pd.date_range(start=start, end=end, freq='%dT' % self.period).ceil('%dT' % self.period)[-self.obs_steps:] is_bounded = False else: index = pd.date_range(start=start, end=end, freq='%dT' % self.period).ceil('%dT' % self.period) if portfolio_vector: port_vec = self.get_sampled_portfolio(index) if port_vec.shape[0] == 0: port_vec = self.get_sampled_portfolio().iloc[-1:] port_vec.index = [index[0]] last_balance = self.get_balance() port_vec.at[port_vec.index[-1], list(last_balance.keys())] = list(last_balance.values()) for pair in self.pairs: keys.append(pair) history = self.get_ohlc(pair, index) history = pd.concat([history, port_vec[pair.split('_')[1]]], axis=1) obs_list.append(history) keys.append(self._fiat) obs_list.append(port_vec[self._fiat]) obs = pd.concat(obs_list, keys=keys, axis=1) cols_to_bfill = [col for col in zip(self.pairs, self.symbols)] + [(self._fiat, self._fiat)] obs = obs.fillna(obs[cols_to_bfill].ffill().bfill()) if not is_bounded: assert obs.shape[0] >= self.obs_steps, 'Dataframe is to small. Shape: %s' % str(obs.shape) self.obs_df = obs.apply(convert_to.decimal, raw=True) else: for pair in self.pairs: keys.append(pair) history = self.get_ohlc(pair, index) obs_list.append(history) obs = pd.concat(obs_list, keys=keys, axis=1) if not is_bounded: assert obs.shape[0] >= self.obs_steps, 'Dataframe is to small. Shape: %s' % str(obs.shape) self.obs_df = obs.apply(convert_to.decimal, raw=True) except MaxRetriesException: Logger.error(TradingEnvironment.get_history, 'Retries exhausted. Waiting for connection...') except Exception as e: Logger.error(TradingEnvironment.get_history, self.parse_error(e)) raise e </DeepExtract> return self.obs_df except Exception as e: Logger.error(TrainingEnvironment.get_observation, self.parse_error(e)) raise e
def get_observation(self, portfolio_vector=False): """ Return observation df with prices and asset amounts :param portfolio_vector: bool: whether to include or not asset amounts :return: pandas DataFrame: """ try: while True: try: obs_list = [] keys = [] is_bounded = True if not end: end = self.timestamp is_bounded = False if not start: start = end - timedelta(minutes=self.period * self.obs_steps) index = pd.date_range(start=start, end=end, freq='%dT' % self.period).ceil('%dT' % self.period)[-self.obs_steps:] is_bounded = False else: index = pd.date_range(start=start, end=end, freq='%dT' % self.period).ceil('%dT' % self.period) if portfolio_vector: port_vec = self.get_sampled_portfolio(index) if port_vec.shape[0] == 0: port_vec = self.get_sampled_portfolio().iloc[-1:] port_vec.index = [index[0]] last_balance = self.get_balance() port_vec.at[port_vec.index[-1], list(last_balance.keys())] = list(last_balance.values()) for pair in self.pairs: keys.append(pair) history = self.get_ohlc(pair, index) history = pd.concat([history, port_vec[pair.split('_')[1]]], axis=1) obs_list.append(history) keys.append(self._fiat) obs_list.append(port_vec[self._fiat]) obs = pd.concat(obs_list, keys=keys, axis=1) cols_to_bfill = [col for col in zip(self.pairs, self.symbols)] + [(self._fiat, self._fiat)] obs = obs.fillna(obs[cols_to_bfill].ffill().bfill()) if not is_bounded: assert obs.shape[0] >= self.obs_steps, 'Dataframe is to small. Shape: %s' % str(obs.shape) self.obs_df = obs.apply(convert_to.decimal, raw=True) else: for pair in self.pairs: keys.append(pair) history = self.get_ohlc(pair, index) obs_list.append(history) obs = pd.concat(obs_list, keys=keys, axis=1) if not is_bounded: assert obs.shape[0] >= self.obs_steps, 'Dataframe is to small. Shape: %s' % str(obs.shape) self.obs_df = obs.apply(convert_to.decimal, raw=True) except MaxRetriesException: Logger.error(TradingEnvironment.get_history, 'Retries exhausted. Waiting for connection...') except Exception as e: Logger.error(TradingEnvironment.get_history, self.parse_error(e)) raise e return self.obs_df except Exception as e: Logger.error(TrainingEnvironment.get_observation, self.parse_error(e)) raise e
cryptotrader
positive
def _handle_scale_action(script, spec, xclip, ident): """ Handles scale actions or dispatches them to the appropriate handler. """ if script['scl']: scl = script['scl'] if len(spec) == 1: <DeepExtract> if type(xclip) is Live.Clip.Clip: comp = script['scl'] xclip.name = '%s %s SCL %s %s %s %s %s' % (ident, script['top'].script_name, comp._scales.page_index, comp.tonic, comp.in_key, comp._offsets.page_index, comp.orientation_is_horizontal) </DeepExtract> return elif len(spec) >= 5: <DeepExtract> if len(spec) >= 5: scale = parse_int(spec[1], None, 0, scl._scales.num_pages - 1) if scale is not None: scl._scales.set_page_index(scale) tonic = parse_int(spec[2], None, 0, scl._tonics.num_pages - 1) if tonic is not None: scl._tonics.set_page_index(tonic) scl._in_key = spec[3].strip() == 'TRUE' if len(spec) == 5: self._toggle_scale_offset(scl, ['ON'] if spec[4].strip() == 'TRUE' else ['OFF']) else: offset = parse_int(spec[4], None, 0, scl._offsets.num_pages - 1) if offset is not None: scl._offsets.set_page_index(offset) scl._orientation_is_horizontal = spec[5].strip() == 'TRUE' </DeepExtract> elif spec[1] == 'INKEY': <DeepExtract> if spec[2:]: setattr(scl, '_in_key', spec[2:][0].strip() == 'ON') else: setattr(scl, '_in_key', not getattr(scl, '_in_key')) </DeepExtract> elif spec[1] == 'HORZ': <DeepExtract> if spec[2:]: setattr(scl, '_orientation_is_horizontal', spec[2:][0].strip() == 'ON') else: setattr(scl, '_orientation_is_horizontal', not getattr(scl, '_orientation_is_horizontal')) </DeepExtract> elif spec[1] == 'ROOT': <DeepExtract> if spec[2:]: spec[2:] = spec[2:][0].strip() current_v = getattr(scl._tonics, '_page_index') new_v = current_v if spec[2:].isdigit(): new_v = parse_int(spec[2:], current_v + 1, 0 + 1, scl._tonics.num_pages - 1 + 1) - 1 elif spec[2:] == '>': new_v = current_v + 1 if new_v > scl._tonics.num_pages - 1: new_v = 0 elif spec[2:] == '<': new_v = current_v - 1 if new_v < 0: new_v = scl._tonics.num_pages - 1 elif NOTE_NAMES and spec[2:] in NOTE_NAMES: new_v = NOTE_NAMES.index(spec[2:]) if 'set_page_index': getattr(scl._tonics, 'set_page_index')(new_v) else: setattr(scl._tonics, '_page_index', new_v) </DeepExtract> elif spec[1] == 'TYPE': <DeepExtract> if [' '.join(spec[2:])]: [' '.join(spec[2:])] = [' '.join(spec[2:])][0].strip() current_v = getattr(scl._scales, '_page_index') new_v = current_v if [' '.join(spec[2:])].isdigit(): new_v = parse_int([' '.join(spec[2:])], current_v + 1, 0 + 1, scl._scales.num_pages - 1 + 1) - 1 elif [' '.join(spec[2:])] == '>': new_v = current_v + 1 if new_v > scl._scales.num_pages - 1: new_v = 0 elif [' '.join(spec[2:])] == '<': new_v = current_v - 1 if new_v < 0: new_v = scl._scales.num_pages - 1 elif S_TYPES and [' '.join(spec[2:])] in S_TYPES: new_v = S_TYPES.index([' '.join(spec[2:])]) if 'set_page_index': getattr(scl._scales, 'set_page_index')(new_v) else: setattr(scl._scales, '_page_index', new_v) </DeepExtract> elif spec[1] == 'OFFSET': <DeepExtract> if spec[2:]: spec[2:] = spec[2:][0].strip() current_v = getattr(scl._offsets, '_page_index') new_v = current_v if spec[2:].isdigit(): new_v = parse_int(spec[2:], current_v + 1, 0 + 1, scl._offsets.num_pages - 1 + 1) - 1 elif spec[2:] == '>': new_v = current_v + 1 if new_v > scl._offsets.num_pages - 1: new_v = 0 elif spec[2:] == '<': new_v = current_v - 1 if new_v < 0: new_v = scl._offsets.num_pages - 1 elif O_NAMES and spec[2:] in O_NAMES: new_v = O_NAMES.index(spec[2:]) if 'set_page_index': getattr(scl._offsets, 'set_page_index')(new_v) else: setattr(scl._offsets, '_page_index', new_v) </DeepExtract> elif spec[1] == 'SEQ': <DeepExtract> offset = FOURTHS_OFFSET if spec[2:] and spec[2:][0].strip() == 'ON' or (not spec[2:] and scl._offsets.page_index): offset = SEQ_OFFSET scl._offsets.page_index = offset </DeepExtract> scl._notify_scale_settings()
def _handle_scale_action(script, spec, xclip, ident): """ Handles scale actions or dispatches them to the appropriate handler. """ if script['scl']: scl = script['scl'] if len(spec) == 1: if type(xclip) is Live.Clip.Clip: comp = script['scl'] xclip.name = '%s %s SCL %s %s %s %s %s' % (ident, script['top'].script_name, comp._scales.page_index, comp.tonic, comp.in_key, comp._offsets.page_index, comp.orientation_is_horizontal) return elif len(spec) >= 5: if len(spec) >= 5: scale = parse_int(spec[1], None, 0, scl._scales.num_pages - 1) if scale is not None: scl._scales.set_page_index(scale) tonic = parse_int(spec[2], None, 0, scl._tonics.num_pages - 1) if tonic is not None: scl._tonics.set_page_index(tonic) scl._in_key = spec[3].strip() == 'TRUE' if len(spec) == 5: self._toggle_scale_offset(scl, ['ON'] if spec[4].strip() == 'TRUE' else ['OFF']) else: offset = parse_int(spec[4], None, 0, scl._offsets.num_pages - 1) if offset is not None: scl._offsets.set_page_index(offset) scl._orientation_is_horizontal = spec[5].strip() == 'TRUE' elif spec[1] == 'INKEY': if spec[2:]: setattr(scl, '_in_key', spec[2:][0].strip() == 'ON') else: setattr(scl, '_in_key', not getattr(scl, '_in_key')) elif spec[1] == 'HORZ': if spec[2:]: setattr(scl, '_orientation_is_horizontal', spec[2:][0].strip() == 'ON') else: setattr(scl, '_orientation_is_horizontal', not getattr(scl, '_orientation_is_horizontal')) elif spec[1] == 'ROOT': if spec[2:]: spec[2:] = spec[2:][0].strip() current_v = getattr(scl._tonics, '_page_index') new_v = current_v if spec[2:].isdigit(): new_v = parse_int(spec[2:], current_v + 1, 0 + 1, scl._tonics.num_pages - 1 + 1) - 1 elif spec[2:] == '>': new_v = current_v + 1 if new_v > scl._tonics.num_pages - 1: new_v = 0 elif spec[2:] == '<': new_v = current_v - 1 if new_v < 0: new_v = scl._tonics.num_pages - 1 elif NOTE_NAMES and spec[2:] in NOTE_NAMES: new_v = NOTE_NAMES.index(spec[2:]) if 'set_page_index': getattr(scl._tonics, 'set_page_index')(new_v) else: setattr(scl._tonics, '_page_index', new_v) elif spec[1] == 'TYPE': if [' '.join(spec[2:])]: [' '.join(spec[2:])] = [' '.join(spec[2:])][0].strip() current_v = getattr(scl._scales, '_page_index') new_v = current_v if [' '.join(spec[2:])].isdigit(): new_v = parse_int([' '.join(spec[2:])], current_v + 1, 0 + 1, scl._scales.num_pages - 1 + 1) - 1 elif [' '.join(spec[2:])] == '>': new_v = current_v + 1 if new_v > scl._scales.num_pages - 1: new_v = 0 elif [' '.join(spec[2:])] == '<': new_v = current_v - 1 if new_v < 0: new_v = scl._scales.num_pages - 1 elif S_TYPES and [' '.join(spec[2:])] in S_TYPES: new_v = S_TYPES.index([' '.join(spec[2:])]) if 'set_page_index': getattr(scl._scales, 'set_page_index')(new_v) else: setattr(scl._scales, '_page_index', new_v) elif spec[1] == 'OFFSET': if spec[2:]: spec[2:] = spec[2:][0].strip() current_v = getattr(scl._offsets, '_page_index') new_v = current_v if spec[2:].isdigit(): new_v = parse_int(spec[2:], current_v + 1, 0 + 1, scl._offsets.num_pages - 1 + 1) - 1 elif spec[2:] == '>': new_v = current_v + 1 if new_v > scl._offsets.num_pages - 1: new_v = 0 elif spec[2:] == '<': new_v = current_v - 1 if new_v < 0: new_v = scl._offsets.num_pages - 1 elif O_NAMES and spec[2:] in O_NAMES: new_v = O_NAMES.index(spec[2:]) if 'set_page_index': getattr(scl._offsets, 'set_page_index')(new_v) else: setattr(scl._offsets, '_page_index', new_v) elif spec[1] == 'SEQ': offset = FOURTHS_OFFSET if spec[2:] and spec[2:][0].strip() == 'ON' or (not spec[2:] and scl._offsets.page_index): offset = SEQ_OFFSET scl._offsets.page_index = offset scl._notify_scale_settings()
clyphx-live10
positive
def mouseReleaseEvent(self, event): self.mouseButtons = event.buttons() ctrlPressed = event.modifiers() & QtCore.Qt.ControlModifier shiftPressed = event.modifiers() & QtCore.Qt.ShiftModifier altPressed = event.modifiers() & QtCore.Qt.AltModifier if event.button() == QtCore.Qt.LeftButton: if not self.config.correctionMode: if ctrlPressed: if shiftPressed and self.drawPolyClosed: <DeepExtract> if not self.annotation: return if self.mouseObj < 0: return obj = self.annotation.objects[self.mouseObj] intersection = self.drawPoly.intersected(self.getPolygon(obj)) if not intersection.isEmpty(): self.drawPoly = intersection (label, ok) = self.getLabelFromUser(obj.label) if ok and label: self.appendObject(label, intersection) self.clearPolygon() self.statusBar().showMessage(self.defaultStatusbar) self.deselectAllObjects() self.update() </DeepExtract> if altPressed and self.drawPolyClosed: <DeepExtract> if not self.annotation: return if self.mouseObj < 0: return obj = self.annotation.objects[self.mouseObj] union = self.drawPoly.united(self.getPolygon(obj)) if not union.isEmpty(): self.drawPoly = union (label, ok) = self.getLabelFromUser(obj.label) if ok and label: self.appendObject(label, union) self.clearPolygon() self.statusBar().showMessage(self.defaultStatusbar) self.deselectAllObjects() self.update() </DeepExtract> else: <DeepExtract> if self.mouseObj < 0: self.deselectObject() return if not self.mouseObj in self.selObjs: self.selObjs.append(self.mouseObj) else: self.deselectObject() self.initPolygonFromObject() if self.selObjs: for act in self.actSelObj + self.actPolyOrSelObj: act.setEnabled(True) for act in self.singleActSelObj: act.setEnabled(len(self.selObjs) == 1) self.infoOnSelectedObject() </DeepExtract> elif not self.drawPolyClosed: if self.ptClosesPoly(): <DeepExtract> self.drawPolyClosed = True for act in self.actClosedPoly: act.setEnabled(True) message = 'What should I do with the polygon? Press n to create a new object, press Ctrl + Left Click to intersect with another object' self.statusBar().showMessage(message) </DeepExtract> elif self.mousePosScaled is not None: if not self.drawPolyClosed and self.drawPoly.isEmpty(): self.mousePosOnZoom = self.mousePos <DeepExtract> self.drawPoly.append(self.mousePosScaled) for act in self.actPolyOrSelObj: act.setEnabled(True) </DeepExtract> elif self.drawPolyClosed: self.draggedPt = -1 elif self.in_progress_bbox is not None: if self.in_progress_bbox.width() > 20: description = QtGui.QInputDialog.getText(self, 'Error Description', 'Please describe the labeling error briefly.') if description[1] and description[0]: self.corrections.append(CorrectionBox(self.in_progress_bbox, annotation=description[0])) self.corrections[self.selected_correction].unselect() self.selected_correction = len(self.corrections) - 1 self.corrections[self.selected_correction].select() <DeepExtract> if not 'Added correction.': return self.changes.append('Added correction.') for act in self.actChanges: act.setEnabled(True) </DeepExtract> self.in_progress_annotation = None self.in_progress_bbox = None elif event.button() == QtCore.Qt.RightButton: <DeepExtract> self.config.zoom = not self.config.zoom if self.config.zoom: self.mousePosOnZoom = self.mousePos self.updateMousePos(event.posF()) else: self.updateMousePos(event.posF()) if not self.config.correctionMode and self.draggedPt >= 0: self.drawPoly.replace(self.draggedPt, self.mousePosScaled) </DeepExtract> self.update()
def mouseReleaseEvent(self, event): self.mouseButtons = event.buttons() ctrlPressed = event.modifiers() & QtCore.Qt.ControlModifier shiftPressed = event.modifiers() & QtCore.Qt.ShiftModifier altPressed = event.modifiers() & QtCore.Qt.AltModifier if event.button() == QtCore.Qt.LeftButton: if not self.config.correctionMode: if ctrlPressed: if shiftPressed and self.drawPolyClosed: if not self.annotation: return if self.mouseObj < 0: return obj = self.annotation.objects[self.mouseObj] intersection = self.drawPoly.intersected(self.getPolygon(obj)) if not intersection.isEmpty(): self.drawPoly = intersection (label, ok) = self.getLabelFromUser(obj.label) if ok and label: self.appendObject(label, intersection) self.clearPolygon() self.statusBar().showMessage(self.defaultStatusbar) self.deselectAllObjects() self.update() if altPressed and self.drawPolyClosed: if not self.annotation: return if self.mouseObj < 0: return obj = self.annotation.objects[self.mouseObj] union = self.drawPoly.united(self.getPolygon(obj)) if not union.isEmpty(): self.drawPoly = union (label, ok) = self.getLabelFromUser(obj.label) if ok and label: self.appendObject(label, union) self.clearPolygon() self.statusBar().showMessage(self.defaultStatusbar) self.deselectAllObjects() self.update() else: if self.mouseObj < 0: self.deselectObject() return if not self.mouseObj in self.selObjs: self.selObjs.append(self.mouseObj) else: self.deselectObject() self.initPolygonFromObject() if self.selObjs: for act in self.actSelObj + self.actPolyOrSelObj: act.setEnabled(True) for act in self.singleActSelObj: act.setEnabled(len(self.selObjs) == 1) self.infoOnSelectedObject() elif not self.drawPolyClosed: if self.ptClosesPoly(): self.drawPolyClosed = True for act in self.actClosedPoly: act.setEnabled(True) message = 'What should I do with the polygon? Press n to create a new object, press Ctrl + Left Click to intersect with another object' self.statusBar().showMessage(message) elif self.mousePosScaled is not None: if not self.drawPolyClosed and self.drawPoly.isEmpty(): self.mousePosOnZoom = self.mousePos self.drawPoly.append(self.mousePosScaled) for act in self.actPolyOrSelObj: act.setEnabled(True) elif self.drawPolyClosed: self.draggedPt = -1 elif self.in_progress_bbox is not None: if self.in_progress_bbox.width() > 20: description = QtGui.QInputDialog.getText(self, 'Error Description', 'Please describe the labeling error briefly.') if description[1] and description[0]: self.corrections.append(CorrectionBox(self.in_progress_bbox, annotation=description[0])) self.corrections[self.selected_correction].unselect() self.selected_correction = len(self.corrections) - 1 self.corrections[self.selected_correction].select() if not 'Added correction.': return self.changes.append('Added correction.') for act in self.actChanges: act.setEnabled(True) self.in_progress_annotation = None self.in_progress_bbox = None elif event.button() == QtCore.Qt.RightButton: self.config.zoom = not self.config.zoom if self.config.zoom: self.mousePosOnZoom = self.mousePos self.updateMousePos(event.posF()) else: self.updateMousePos(event.posF()) if not self.config.correctionMode and self.draggedPt >= 0: self.drawPoly.replace(self.draggedPt, self.mousePosScaled) self.update()
CBST
positive
def __iter__(self): for (train_index, test_index) in self.folds.split(self.files): <DeepExtract> for fileid in self.fileids(train_index): yield list(self.reader.docs(fileids=[fileid])) </DeepExtract> <DeepExtract> y_train = [self.reader.categories(fileids=[fileid])[0] for fileid in self.fileids(train_index)] </DeepExtract> <DeepExtract> for fileid in self.fileids(test_index): yield list(self.reader.docs(fileids=[fileid])) </DeepExtract> <DeepExtract> y_test = [self.reader.categories(fileids=[fileid])[0] for fileid in self.fileids(test_index)] </DeepExtract> yield (X_train, X_test, y_train, y_test)
def __iter__(self): for (train_index, test_index) in self.folds.split(self.files): for fileid in self.fileids(train_index): yield list(self.reader.docs(fileids=[fileid])) y_train = [self.reader.categories(fileids=[fileid])[0] for fileid in self.fileids(train_index)] for fileid in self.fileids(test_index): yield list(self.reader.docs(fileids=[fileid])) y_test = [self.reader.categories(fileids=[fileid])[0] for fileid in self.fileids(test_index)] yield (X_train, X_test, y_train, y_test)
atap
positive
def _eager_eval(input_val: tf.Tensor, label_val: tf.Tensor): layers = model.layers[1:] param_grads = {} tapes = {} regs = {} our_loss = -1 for op in scheduled_result.schedule: if isinstance(op, AllocateRegister): pass elif isinstance(op, DeallocateRegister): if op.register_id in regs: del regs[op.register_id] if op.register_id in tapes: del tapes[op.register_id] elif isinstance(op, OperatorEvaluation) and g.is_forward_node(op.id): idx = op.id layer = layers[idx] with tf.GradientTape(persistent=True, watch_accessed_variables=False) as tape: <DeepExtract> output_nodes = [] if not is_backward else [i for i in op.arg_regs.keys() if i not in g.args[idx]] input_nodes = [i for i in op.arg_regs.keys() if i not in output_nodes] layers_to_dep_position = {layer_id: position for (position, layer_id) in enumerate(g.args[idx])} input_layers = list(sorted(input_nodes, key=layers_to_dep_position.get)) + output_nodes </DeepExtract> inputs = [regs[op.arg_regs[arg_layer_id]] for arg_layer_id in input_layers] inputs = inputs if len(inputs) > 0 else [input_val] logger.debug(f'reg[{op.out_register}] ⟵ {layer.name}({[op.arg_regs[x] for x in input_layers]})') for var in itertools.chain(inputs, layer.variables): tape.watch(var) if len(inputs) > 1: regs[op.out_register] = tf.stop_gradient(layer(inputs)) else: regs[op.out_register] = tf.stop_gradient(layer(*inputs)) tapes[op.out_register] = tape elif isinstance(op, OperatorEvaluation) and g.is_loss_node(op.id): with tf.GradientTape(persistent=True, watch_accessed_variables=False) as tape: inputs = [regs[op.arg_regs[arg_idx]] for arg_idx in sorted(op.arg_regs.keys())] inputs += [label_val] for x in inputs: tape.watch(x) regs[op.out_register] = loss(*inputs) tapes[op.out_register] = tape our_loss = regs[op.out_register] logger.debug(f'reg[{op.out_register}] ⟵ loss_fn ({our_loss})') out_grads = None return (our_loss, out_grads)
def _eager_eval(input_val: tf.Tensor, label_val: tf.Tensor): layers = model.layers[1:] param_grads = {} tapes = {} regs = {} our_loss = -1 for op in scheduled_result.schedule: if isinstance(op, AllocateRegister): pass elif isinstance(op, DeallocateRegister): if op.register_id in regs: del regs[op.register_id] if op.register_id in tapes: del tapes[op.register_id] elif isinstance(op, OperatorEvaluation) and g.is_forward_node(op.id): idx = op.id layer = layers[idx] with tf.GradientTape(persistent=True, watch_accessed_variables=False) as tape: output_nodes = [] if not is_backward else [i for i in op.arg_regs.keys() if i not in g.args[idx]] input_nodes = [i for i in op.arg_regs.keys() if i not in output_nodes] layers_to_dep_position = {layer_id: position for (position, layer_id) in enumerate(g.args[idx])} input_layers = list(sorted(input_nodes, key=layers_to_dep_position.get)) + output_nodes inputs = [regs[op.arg_regs[arg_layer_id]] for arg_layer_id in input_layers] inputs = inputs if len(inputs) > 0 else [input_val] logger.debug(f'reg[{op.out_register}] ⟵ {layer.name}({[op.arg_regs[x] for x in input_layers]})') for var in itertools.chain(inputs, layer.variables): tape.watch(var) if len(inputs) > 1: regs[op.out_register] = tf.stop_gradient(layer(inputs)) else: regs[op.out_register] = tf.stop_gradient(layer(*inputs)) tapes[op.out_register] = tape elif isinstance(op, OperatorEvaluation) and g.is_loss_node(op.id): with tf.GradientTape(persistent=True, watch_accessed_variables=False) as tape: inputs = [regs[op.arg_regs[arg_idx]] for arg_idx in sorted(op.arg_regs.keys())] inputs += [label_val] for x in inputs: tape.watch(x) regs[op.out_register] = loss(*inputs) tapes[op.out_register] = tape our_loss = regs[op.out_register] logger.debug(f'reg[{op.out_register}] ⟵ loss_fn ({our_loss})') out_grads = None return (our_loss, out_grads)
dtr-prototype
positive
def _describe_autoscaling(autoscaling_config, name): targets = [] policies = [] for item in autoscaling_config: dimension = item['dimension'] resource_name = item['resource_name'] <DeepExtract> resource_id = 'table/{0}'.format(name) if 'table' in dimension else 'table/{0}/index/{1}'.format(name, resource_name) resource_id = resource_id </DeepExtract> sc_targets = self.app_autoscaling_conn.describe_scalable_targets(service_namespace='dynamodb', resources_ids=[resource_id], scalable_dimension=dimension) targets.extend(sc_targets) autoscaling_policy = item.get('config') if autoscaling_policy: policy_name = autoscaling_policy['policy_name'] sc_policies = self.app_autoscaling_conn.describe_scaling_policies(service_namespace='dynamodb', policy_names=[policy_name], resource_id=resource_id, scalable_dimension=dimension) policies.extend(sc_policies) return {'targets': targets, 'policies': policies}
def _describe_autoscaling(autoscaling_config, name): targets = [] policies = [] for item in autoscaling_config: dimension = item['dimension'] resource_name = item['resource_name'] resource_id = 'table/{0}'.format(name) if 'table' in dimension else 'table/{0}/index/{1}'.format(name, resource_name) resource_id = resource_id sc_targets = self.app_autoscaling_conn.describe_scalable_targets(service_namespace='dynamodb', resources_ids=[resource_id], scalable_dimension=dimension) targets.extend(sc_targets) autoscaling_policy = item.get('config') if autoscaling_policy: policy_name = autoscaling_policy['policy_name'] sc_policies = self.app_autoscaling_conn.describe_scaling_policies(service_namespace='dynamodb', policy_names=[policy_name], resource_id=resource_id, scalable_dimension=dimension) policies.extend(sc_policies) return {'targets': targets, 'policies': policies}
aws-syndicate
positive
def __init__(self): <DeepExtract> self.images = {} self.scatters = {} self.histograms = {} self.heatmaps = {} </DeepExtract> self.output_dirs = exp.OUT_DIRS self.prefix = exp._file_string('') self.image_scale = (-1, 1)
def __init__(self): self.images = {} self.scatters = {} self.histograms = {} self.heatmaps = {} self.output_dirs = exp.OUT_DIRS self.prefix = exp._file_string('') self.image_scale = (-1, 1)
cortex
positive
def perform(self): connection = http.client.HTTPConnection('timezoneapi.io') try: response = connection.request('GET', 'api/ip').getresponse() <DeepExtract> while True: if self.find('offset_hours"', response): while self.read_byte(response) != ord('"'): pass buf = bytearray(8) i = 0 b = self.read_byte(response) while b != ord('"'): buf[i] = b b = self.read_byte(response) i += 1 s = ''.join(map(chr, buf[:i])) self.offset_hours = int(s) </DeepExtract> logging.info('Timezoned: offset_hours: {}', self.offset_hours) return True finally: connection.close()
def perform(self): connection = http.client.HTTPConnection('timezoneapi.io') try: response = connection.request('GET', 'api/ip').getresponse() while True: if self.find('offset_hours"', response): while self.read_byte(response) != ord('"'): pass buf = bytearray(8) i = 0 b = self.read_byte(response) while b != ord('"'): buf[i] = b b = self.read_byte(response) i += 1 s = ''.join(map(chr, buf[:i])) self.offset_hours = int(s) logging.info('Timezoned: offset_hours: {}', self.offset_hours) return True finally: connection.close()
esp8266
positive
def main(self): try: if self.module.params['state'] == 'present': <DeepExtract> log('ModuleExecutor.update_or_create()') if not self.csvserver_exists(): self.module_result['changed'] = True self.prepared_list.append('Create cs vserver') if not self.module.check_mode: log('Csvserver group does not exist. Will create.') self.create_csvserver() elif not self.csvserver_identical(): log('Existing csvserver does not have identical values to configured. Will update.') self.module_result['changed'] = True if not self.module.check_mode: self.update_csvserver() else: log('Existing csvserver has identical values to configured.') self.sync_bindings() </DeepExtract> <DeepExtract> log('ModuleExecutor.do_state_change()') if self.module.check_mode: return post_data = {'csvserver': {'name': self.configured_csvserver['name']}} disabled = self.module.params['disabled'] if disabled: action = 'disable' else: action = 'enable' log('disable/enable post data %s' % post_data) result = self.fetcher.post(post_data=post_data, resource='csvserver', action=action) log('result of post %s' % result) if result['http_response_data']['status'] != 200: msg = 'Disable/Enable operation failed' self.module.fail_json(msg=msg, **self.module_result) </DeepExtract> elif self.module.params['state'] == 'absent': <DeepExtract> log('ModuleExecutor.delete()') if self.csvserver_exists(): self.module_result['changed'] = True self.prepared_list.append('Delete cs vserver') if not self.module.check_mode: self.delete_csvserver() </DeepExtract> if self.module._diff: self.module_result['diff'] = {'prepared': '\n'.join(self.prepared_list)} self.module.exit_json(**self.module_result) except NitroException as e: msg = 'nitro exception errorcode=%s, message=%s, severity=%s' % (str(e.errorcode), e.message, e.severity) self.module.fail_json(msg=msg, **self.module_result) except Exception as e: msg = 'Exception %s: %s' % (type(e), str(e)) self.module.fail_json(msg=msg, **self.module_result)
def main(self): try: if self.module.params['state'] == 'present': log('ModuleExecutor.update_or_create()') if not self.csvserver_exists(): self.module_result['changed'] = True self.prepared_list.append('Create cs vserver') if not self.module.check_mode: log('Csvserver group does not exist. Will create.') self.create_csvserver() elif not self.csvserver_identical(): log('Existing csvserver does not have identical values to configured. Will update.') self.module_result['changed'] = True if not self.module.check_mode: self.update_csvserver() else: log('Existing csvserver has identical values to configured.') self.sync_bindings() log('ModuleExecutor.do_state_change()') if self.module.check_mode: return post_data = {'csvserver': {'name': self.configured_csvserver['name']}} disabled = self.module.params['disabled'] if disabled: action = 'disable' else: action = 'enable' log('disable/enable post data %s' % post_data) result = self.fetcher.post(post_data=post_data, resource='csvserver', action=action) log('result of post %s' % result) if result['http_response_data']['status'] != 200: msg = 'Disable/Enable operation failed' self.module.fail_json(msg=msg, **self.module_result) elif self.module.params['state'] == 'absent': log('ModuleExecutor.delete()') if self.csvserver_exists(): self.module_result['changed'] = True self.prepared_list.append('Delete cs vserver') if not self.module.check_mode: self.delete_csvserver() if self.module._diff: self.module_result['diff'] = {'prepared': '\n'.join(self.prepared_list)} self.module.exit_json(**self.module_result) except NitroException as e: msg = 'nitro exception errorcode=%s, message=%s, severity=%s' % (str(e.errorcode), e.message, e.severity) self.module.fail_json(msg=msg, **self.module_result) except Exception as e: msg = 'Exception %s: %s' % (type(e), str(e)) self.module.fail_json(msg=msg, **self.module_result)
citrix-adc-ansible-modules
positive
def test_packages_with_protocol_exact(client): """Start with a blank database.""" <DeepExtract> license = License.query.filter_by(name='MIT').first() author = User.query.first() mod = Package() mod.state = PackageState.APPROVED mod.name = 'Bob'.lower() mod.title = 'Bob' mod.license = license mod.media_license = license mod.type = PackageType.MOD mod.author = author mod.short_desc = 'The content library should not be used yet as it is still in alpha' mod.desc = 'This is the long desc' db.session.add(mod) rels = [] for (minv, maxv) in [('5.0', '5.0')]: rel = PackageRelease() rel.package = mod rel.title = 'test' rel.url = 'https://github.com/ezhh/handholds/archive/master.zip' if minv: rel.min_rel = MinetestRelease.query.filter_by(name=minv).first() assert rel.min_rel if maxv: rel.max_rel = MinetestRelease.query.filter_by(name=maxv).first() assert rel.max_rel rel.approved = True db.session.add(rel) rels.append(rel) db.session.flush() return [rel.id for rel in rels] </DeepExtract> db.session.commit() packages = parse_json(client.get('/api/packages/?protocol_version=20').data) assert len(packages) == 0 packages = parse_json(client.get('/api/packages/?protocol_version=32').data) assert len(packages) == 0 packages = parse_json(client.get('/api/packages/?protocol_version=37').data) assert len(packages) == 1 assert packages[0]['name'] == 'bob' packages = parse_json(client.get('/api/packages/?protocol_version=38').data) assert len(packages) == 0 packages = parse_json(client.get('/api/packages/?protocol_version=40').data) assert len(packages) == 0 validate_package_list(packages, True)
def test_packages_with_protocol_exact(client): """Start with a blank database.""" license = License.query.filter_by(name='MIT').first() author = User.query.first() mod = Package() mod.state = PackageState.APPROVED mod.name = 'Bob'.lower() mod.title = 'Bob' mod.license = license mod.media_license = license mod.type = PackageType.MOD mod.author = author mod.short_desc = 'The content library should not be used yet as it is still in alpha' mod.desc = 'This is the long desc' db.session.add(mod) rels = [] for (minv, maxv) in [('5.0', '5.0')]: rel = PackageRelease() rel.package = mod rel.title = 'test' rel.url = 'https://github.com/ezhh/handholds/archive/master.zip' if minv: rel.min_rel = MinetestRelease.query.filter_by(name=minv).first() assert rel.min_rel if maxv: rel.max_rel = MinetestRelease.query.filter_by(name=maxv).first() assert rel.max_rel rel.approved = True db.session.add(rel) rels.append(rel) db.session.flush() return [rel.id for rel in rels] db.session.commit() packages = parse_json(client.get('/api/packages/?protocol_version=20').data) assert len(packages) == 0 packages = parse_json(client.get('/api/packages/?protocol_version=32').data) assert len(packages) == 0 packages = parse_json(client.get('/api/packages/?protocol_version=37').data) assert len(packages) == 1 assert packages[0]['name'] == 'bob' packages = parse_json(client.get('/api/packages/?protocol_version=38').data) assert len(packages) == 0 packages = parse_json(client.get('/api/packages/?protocol_version=40').data) assert len(packages) == 0 validate_package_list(packages, True)
contentdb
positive
def mapper(self, _, line): line = line.strip() match = self.logpat.search(line) date_time = None requester = None user_agent = None operation = None try: for n in range(self.NUM_ENTRIES_PER_LINE): group = match.group(1 + n) if n == self.S3_LOG_DATE_TIME: <DeepExtract> date_time = None time_zone_parsed = None date_parsed = group[:group.find(':')] time_parsed = group[group.find(':') + 1:group.find('+') - 1] time_zone_parsed = group[group.find('+'):] try: date_struct = time.strptime(date_parsed, '%d/%b/%Y') converted_date = time.strftime('%Y-%m-%d', date_struct) date_time = converted_date + ' ' + time_parsed except ValueError as error: raise ValueError(error) else: (date, date_time, time_zone_parsed) = (converted_date, date_time, time_zone_parsed) </DeepExtract> date_time = date + ' 00:00:00' elif n == self.S3_LOG_REQUESTER_ID: requester = group elif n == self.S3_LOG_USER_AGENT: user_agent = group elif n == self.S3_LOG_OPERATION: operation = group else: pass except Exception: yield (('Error while parsing line: %s', line), 1) else: yield ((date_time, requester, user_agent, operation), 1)
def mapper(self, _, line): line = line.strip() match = self.logpat.search(line) date_time = None requester = None user_agent = None operation = None try: for n in range(self.NUM_ENTRIES_PER_LINE): group = match.group(1 + n) if n == self.S3_LOG_DATE_TIME: date_time = None time_zone_parsed = None date_parsed = group[:group.find(':')] time_parsed = group[group.find(':') + 1:group.find('+') - 1] time_zone_parsed = group[group.find('+'):] try: date_struct = time.strptime(date_parsed, '%d/%b/%Y') converted_date = time.strftime('%Y-%m-%d', date_struct) date_time = converted_date + ' ' + time_parsed except ValueError as error: raise ValueError(error) else: (date, date_time, time_zone_parsed) = (converted_date, date_time, time_zone_parsed) date_time = date + ' 00:00:00' elif n == self.S3_LOG_REQUESTER_ID: requester = group elif n == self.S3_LOG_USER_AGENT: user_agent = group elif n == self.S3_LOG_OPERATION: operation = group else: pass except Exception: yield (('Error while parsing line: %s', line), 1) else: yield ((date_time, requester, user_agent, operation), 1)
data-science-ipython-notebooks
positive
def _iocp_send(self, buf, *args): """Internal use only; use 'send' with 'yield' instead. """ def _send(err, n): if self._timeout and self._notifier: self._notifier._del_timeout(self) if err or n == 0: self._write_overlap.object = self._write_result = None if not err: err = winerror.ERROR_CONNECTION_INVALID if err == winerror.ERROR_CONNECTION_INVALID or err == winerror.ERROR_OPERATION_ABORTED: self._write_coro._proceed_(0) else: self._write_coro.throw(socket.error(err)) else: self._write_overlap.object = None self._write_coro._proceed_(n) if not self._asyncoro: self._asyncoro = AsynCoro.scheduler() self._notifier = self._asyncoro._notifier <DeepExtract> pass </DeepExtract> if self._timeout: self._notifier._add_timeout(self) self._write_overlap.object = _send self._write_coro = AsynCoro.cur_coro(self._asyncoro) self._write_coro._await_() (err, n) = win32file.WSASend(self._fileno, buf, self._write_overlap, 0) if err != winerror.ERROR_IO_PENDING and err: self._write_overlap.object(err, n)
def _iocp_send(self, buf, *args): """Internal use only; use 'send' with 'yield' instead. """ def _send(err, n): if self._timeout and self._notifier: self._notifier._del_timeout(self) if err or n == 0: self._write_overlap.object = self._write_result = None if not err: err = winerror.ERROR_CONNECTION_INVALID if err == winerror.ERROR_CONNECTION_INVALID or err == winerror.ERROR_OPERATION_ABORTED: self._write_coro._proceed_(0) else: self._write_coro.throw(socket.error(err)) else: self._write_overlap.object = None self._write_coro._proceed_(n) if not self._asyncoro: self._asyncoro = AsynCoro.scheduler() self._notifier = self._asyncoro._notifier pass if self._timeout: self._notifier._add_timeout(self) self._write_overlap.object = _send self._write_coro = AsynCoro.cur_coro(self._asyncoro) self._write_coro._await_() (err, n) = win32file.WSASend(self._fileno, buf, self._write_overlap, 0) if err != winerror.ERROR_IO_PENDING and err: self._write_overlap.object(err, n)
asyncoro
positive
def with_config_overrides(config_overrides, emitted_fork=None, emit=True): """ WARNING: the spec_test decorator must wrap this, to ensure the decorated test actually runs. This decorator forces the test to yield, and pytest doesn't run generator tests, and instead silently passes it. Use 'spec_configured_state_test' instead of 'spec_state_test' if you are unsure. This is a decorator that applies a dict of config value overrides to the spec during execution. """ def decorator(fn): def wrapper(*args, spec: Spec, **kw): <DeepExtract> config = _get_copy_of_spec(spec).config._asdict() config.update(((k, config_overrides[k]) for k in config.keys() & config_overrides.keys())) config_types = _get_copy_of_spec(spec).Configuration.__annotations__ modified_config = {k: config_types[k](v) for (k, v) in config.items()} _get_copy_of_spec(spec).config = _get_copy_of_spec(spec).Configuration(**modified_config) output_config = _get_basic_dict(modified_config) (_get_copy_of_spec(spec), output_config) = (_get_copy_of_spec(spec), output_config) </DeepExtract> if 'phases' in kw: phases = {} for fork in kw['phases']: <DeepExtract> config = _get_copy_of_spec(kw['phases'][fork]).config._asdict() config.update(((k, config_overrides[k]) for k in config.keys() & config_overrides.keys())) config_types = _get_copy_of_spec(kw['phases'][fork]).Configuration.__annotations__ modified_config = {k: config_types[k](v) for (k, v) in config.items()} _get_copy_of_spec(kw['phases'][fork]).config = _get_copy_of_spec(kw['phases'][fork]).Configuration(**modified_config) output_config = _get_basic_dict(modified_config) (phases[fork], output) = (_get_copy_of_spec(kw['phases'][fork]), output_config) </DeepExtract> if emitted_fork == fork: output_config = output kw['phases'] = phases if emit: yield ('config', 'cfg', output_config) out = fn(*args, spec=spec, **kw) if out is not None: yield from out return wrapper return decorator
def with_config_overrides(config_overrides, emitted_fork=None, emit=True): """ WARNING: the spec_test decorator must wrap this, to ensure the decorated test actually runs. This decorator forces the test to yield, and pytest doesn't run generator tests, and instead silently passes it. Use 'spec_configured_state_test' instead of 'spec_state_test' if you are unsure. This is a decorator that applies a dict of config value overrides to the spec during execution. """ def decorator(fn): def wrapper(*args, spec: Spec, **kw): config = _get_copy_of_spec(spec).config._asdict() config.update(((k, config_overrides[k]) for k in config.keys() & config_overrides.keys())) config_types = _get_copy_of_spec(spec).Configuration.__annotations__ modified_config = {k: config_types[k](v) for (k, v) in config.items()} _get_copy_of_spec(spec).config = _get_copy_of_spec(spec).Configuration(**modified_config) output_config = _get_basic_dict(modified_config) (_get_copy_of_spec(spec), output_config) = (_get_copy_of_spec(spec), output_config) if 'phases' in kw: phases = {} for fork in kw['phases']: config = _get_copy_of_spec(kw['phases'][fork]).config._asdict() config.update(((k, config_overrides[k]) for k in config.keys() & config_overrides.keys())) config_types = _get_copy_of_spec(kw['phases'][fork]).Configuration.__annotations__ modified_config = {k: config_types[k](v) for (k, v) in config.items()} _get_copy_of_spec(kw['phases'][fork]).config = _get_copy_of_spec(kw['phases'][fork]).Configuration(**modified_config) output_config = _get_basic_dict(modified_config) (phases[fork], output) = (_get_copy_of_spec(kw['phases'][fork]), output_config) if emitted_fork == fork: output_config = output kw['phases'] = phases if emit: yield ('config', 'cfg', output_config) out = fn(*args, spec=spec, **kw) if out is not None: yield from out return wrapper return decorator
eth2.0-specs
positive
def riou_cc(rbboxes, qrbboxes, standup_thresh=0.0): <DeepExtract> corners = corners_nd(rbboxes[:, 2:4], origin=origin) if rbboxes[:, 4] is not None: corners = rotation_2d(corners, rbboxes[:, 4]) corners += rbboxes[:, :2].reshape([-1, 1, 2]) boxes_corners = corners </DeepExtract> <DeepExtract> assert len(boxes_corners.shape) == 3 standup_boxes = [] standup_boxes.append(np.min(boxes_corners, axis=1)) standup_boxes.append(np.max(boxes_corners, axis=1)) boxes_standup = np.concatenate(standup_boxes, -1) </DeepExtract> <DeepExtract> corners = corners_nd(qrbboxes[:, 2:4], origin=origin) if qrbboxes[:, 4] is not None: corners = rotation_2d(corners, qrbboxes[:, 4]) corners += qrbboxes[:, :2].reshape([-1, 1, 2]) qboxes_corners = corners </DeepExtract> <DeepExtract> assert len(qboxes_corners.shape) == 3 standup_boxes = [] standup_boxes.append(np.min(qboxes_corners, axis=1)) standup_boxes.append(np.max(qboxes_corners, axis=1)) qboxes_standup = np.concatenate(standup_boxes, -1) </DeepExtract> <DeepExtract> N = boxes_standup.shape[0] K = qboxes_standup.shape[0] overlaps = np.zeros((N, K), dtype=boxes_standup.dtype) for k in range(K): box_area = (qboxes_standup[k, 2] - qboxes_standup[k, 0] + 0.0) * (qboxes_standup[k, 3] - qboxes_standup[k, 1] + 0.0) for n in range(N): iw = min(boxes_standup[n, 2], qboxes_standup[k, 2]) - max(boxes_standup[n, 0], qboxes_standup[k, 0]) + 0.0 if iw > 0: ih = min(boxes_standup[n, 3], qboxes_standup[k, 3]) - max(boxes_standup[n, 1], qboxes_standup[k, 1]) + 0.0 if ih > 0: ua = (boxes_standup[n, 2] - boxes_standup[n, 0] + 0.0) * (boxes_standup[n, 3] - boxes_standup[n, 1] + 0.0) + box_area - iw * ih overlaps[n, k] = iw * ih / ua standup_iou = overlaps </DeepExtract> return rbbox_iou(boxes_corners, qboxes_corners, standup_iou, standup_thresh)
def riou_cc(rbboxes, qrbboxes, standup_thresh=0.0): corners = corners_nd(rbboxes[:, 2:4], origin=origin) if rbboxes[:, 4] is not None: corners = rotation_2d(corners, rbboxes[:, 4]) corners += rbboxes[:, :2].reshape([-1, 1, 2]) boxes_corners = corners assert len(boxes_corners.shape) == 3 standup_boxes = [] standup_boxes.append(np.min(boxes_corners, axis=1)) standup_boxes.append(np.max(boxes_corners, axis=1)) boxes_standup = np.concatenate(standup_boxes, -1) corners = corners_nd(qrbboxes[:, 2:4], origin=origin) if qrbboxes[:, 4] is not None: corners = rotation_2d(corners, qrbboxes[:, 4]) corners += qrbboxes[:, :2].reshape([-1, 1, 2]) qboxes_corners = corners assert len(qboxes_corners.shape) == 3 standup_boxes = [] standup_boxes.append(np.min(qboxes_corners, axis=1)) standup_boxes.append(np.max(qboxes_corners, axis=1)) qboxes_standup = np.concatenate(standup_boxes, -1) N = boxes_standup.shape[0] K = qboxes_standup.shape[0] overlaps = np.zeros((N, K), dtype=boxes_standup.dtype) for k in range(K): box_area = (qboxes_standup[k, 2] - qboxes_standup[k, 0] + 0.0) * (qboxes_standup[k, 3] - qboxes_standup[k, 1] + 0.0) for n in range(N): iw = min(boxes_standup[n, 2], qboxes_standup[k, 2]) - max(boxes_standup[n, 0], qboxes_standup[k, 0]) + 0.0 if iw > 0: ih = min(boxes_standup[n, 3], qboxes_standup[k, 3]) - max(boxes_standup[n, 1], qboxes_standup[k, 1]) + 0.0 if ih > 0: ua = (boxes_standup[n, 2] - boxes_standup[n, 0] + 0.0) * (boxes_standup[n, 3] - boxes_standup[n, 1] + 0.0) + box_area - iw * ih overlaps[n, k] = iw * ih / ua standup_iou = overlaps return rbbox_iou(boxes_corners, qboxes_corners, standup_iou, standup_thresh)
CenterPoint
positive
def set_yaw(self): try: yaw = self.config.animation_yaw except (TypeError, KeyError) as e: <DeepExtract> self.state = "Can't set yaw from config 'ANIMATION' section. {}".format(e) if True: logger.error("Can't set yaw from config 'ANIMATION' section. {}".format(e)) </DeepExtract> return for frame in self.original_frames: try: frame.set_yaw(yaw) except ValueError as e: <DeepExtract> self.state = "Can't set yaw from config 'ANIMATION' section. {}".format(e) if True: logger.error("Can't set yaw from config 'ANIMATION' section. {}".format(e)) </DeepExtract> return
def set_yaw(self): try: yaw = self.config.animation_yaw except (TypeError, KeyError) as e: self.state = "Can't set yaw from config 'ANIMATION' section. {}".format(e) if True: logger.error("Can't set yaw from config 'ANIMATION' section. {}".format(e)) return for frame in self.original_frames: try: frame.set_yaw(yaw) except ValueError as e: self.state = "Can't set yaw from config 'ANIMATION' section. {}".format(e) if True: logger.error("Can't set yaw from config 'ANIMATION' section. {}".format(e)) return
clever-show
positive
def get_subcommand_query(query_str: str) -> Optional[SubcommandQuery]: """ Determine whether current query is of a subcommand. If so first returned the corresponding SubcommandQeury object. """ if not query_str: return None query_parts = query_str.strip().split(None, maxsplit=1) if len(query_parts) < 2: query_str = '' else: query_str = query_parts[1] <DeepExtract> matching = [s for s in subcommands if s.name.lower() == query_parts[0].lower()] if matching: subcommand = matching[0] </DeepExtract> if subcommand: return SubcommandQuery(subcommand=subcommand, query=query_str)
def get_subcommand_query(query_str: str) -> Optional[SubcommandQuery]: """ Determine whether current query is of a subcommand. If so first returned the corresponding SubcommandQeury object. """ if not query_str: return None query_parts = query_str.strip().split(None, maxsplit=1) if len(query_parts) < 2: query_str = '' else: query_str = query_parts[1] matching = [s for s in subcommands if s.name.lower() == query_parts[0].lower()] if matching: subcommand = matching[0] if subcommand: return SubcommandQuery(subcommand=subcommand, query=query_str)
awesome-albert-plugins
positive
def __init__(self, info_params: Union[ParamsDict, ExpandedParamsDict], allow_renames=True, ignore_unused_sampled=False): self.set_logger() self.allow_renames = allow_renames self._infos = {} self._input: ParamValuesDict = {} self._input_funcs = {} self._input_args = {} self._input_dependencies: Dict[str, Set[str]] = {} self._dropped: Set[str] = set() self._output: ParamValuesDict = {} self._constant: ParamValuesDict = {} self._sampled: ParamValuesDict = {} self._sampled_renames: Dict[str, List[str]] = {} self._derived: ParamValuesDict = {} self._derived_inputs = [] self._derived_funcs = {} self._derived_args = {} self._derived_dependencies: Dict[str, Set[str]] = {} for (p, info) in info_params.items(): if isinstance(info, Mapping) and (not set(info).issubset(partags)): raise LoggedError(self.log, "Parameter '%s' has unknown options %s", p, set(info).difference(partags)) <DeepExtract> info = deepcopy_where_possible(info) if not isinstance(info, Mapping): if info is None: info = {} elif isinstance(info, Sequence) and (not isinstance(info, str)): values = list(info) allowed_lengths = [2, 4, 5] if len(values) not in allowed_lengths: raise LoggedError(__name__, 'Parameter info length not valid: %d. The allowed lengths are %r. See documentation.', len(values), allowed_lengths) info = {'prior': [values[0], values[1]]} if len(values) >= 4: info['ref'] = [values[2], values[3]] if len(values) == 5: info['proposal'] = values[4] else: info = {'value': info} if all((f not in info for f in ['prior', 'value', 'derived'])): info['derived'] = default_derived value = info.get('value') if isinstance(value, str) or callable(value): info['derived'] = info.get('derived', True) info = info </DeepExtract> self._infos[p] = info if is_fixed_or_function_param(info): if isinstance(info['value'], Real): self._constant[p] = float(info['value']) self._input[p] = self._constant[p] if info.get('drop'): self._dropped.add(p) else: self._input[p] = np.nan self._input_funcs[p] = get_external_function(info['value']) self._input_args[p] = getfullargspec(self._input_funcs[p]).args if is_sampled_param(info): self._sampled[p] = np.nan self._input[p] = np.nan if info.get('drop'): self._dropped.add(p) self._sampled_renames[p] = str_to_list(info.get('renames') or []) if is_derived_param(info): self._derived[p] = np.nan if info['derived'] is True and is_fixed_or_function_param(info): self._derived_inputs.append(p) elif info['derived'] is True: self._output[p] = np.nan else: self._derived_funcs[p] = get_external_function(info['derived']) self._derived_args[p] = getfullargspec(self._derived_funcs[p]).args for p in chain(self._sampled, self._derived): if not is_valid_variable_name(p): is_in = p in self._sampled eg_in = " p_prime:\n prior: ...\n %s: 'lambda p_prime: p_prime'\n" % p eg_out = " p_prime: 'lambda %s: %s'\n" % (p, p) raise LoggedError(self.log, "Parameter name '%s' is not a valid Python variable name (it needs to start with a letter or '_').\nIf this is an %s parameter of a likelihood or theory, whose name you cannot change,%s define an associated %s one with a valid name 'p_prime' as: \n\n%s", p, 'input' if is_in else 'output', '' if is_in else ' remove it and', 'sampled' if is_in else 'derived', eg_in if is_in else eg_out) known_input = set(self._input) all_input_arguments = set(chain(*self._input_args.values())) bad_input_dependencies = all_input_arguments - known_input if bad_input_dependencies: raise LoggedError(self.log, 'Input parameters defined as functions can only depend on other input parameters. In particular, an input parameter cannot depend on %r. Use an explicit Theory calculator for more complex dependencies.\nIf you intended to define a derived output parameter use derived: instead of value:', list(bad_input_dependencies)) for arg in all_input_arguments.union(*self._derived_args.values()).difference(known_input).difference(self._derived): self._output[arg] = np.nan self._directly_output = [p for p in self._derived if p in self._output] <DeepExtract> wrapped_funcs: Tuple[Dict[str, _WrappedFunc], Dict[str, _WrappedFunc]] = ({}, {}) known = set(chain(self._constant, self._sampled)) for (derived, wrapped_func) in zip((False, True), wrapped_funcs): if derived: inputs = self._derived_funcs.copy() input_args = self._derived_args known.update(self._output) output = self._derived dependencies = self._derived_dependencies else: inputs = self._input_funcs.copy() input_args = self._input_args output = self._input dependencies = self._input_dependencies while inputs: for (p, func) in inputs.items(): args = input_args[p] if not known.issuperset(args): continue known.add(p) dependencies[p] = set(chain(args, *(dependencies.get(arg, []) for arg in args))) if set(args).issubset(self._constant): self._constant[p] = self._call_param_func(p, func, {arg: self._constant[arg] for arg in args}) output[p] = self._constant[p] else: wrapped_func[p] = (func, {arg: self._constant.get(arg) for arg in args}, [arg for arg in args if arg not in self._constant]) del inputs[p] break else: raise LoggedError(self.log, 'Could not resolve arguments for parameters %s. Maybe there is a circular dependency between derived parameters?', list(inputs)) (self._wrapped_input_funcs, self._wrapped_derived_funcs) = wrapped_funcs </DeepExtract> self._sampled_input_dependence = {s: [i for i in self._input if s in self._input_dependencies.get(i, {})] for s in self._sampled} if not ignore_unused_sampled: self._dropped_not_directly_used = self._dropped.intersection((p for (p, v) in self._sampled_input_dependence.items() if not v)) else: self._dropped_not_directly_used = set() labels_inv_repeated = invert_dict(self.labels()) labels_inv_repeated = {k: v for (k, v) in labels_inv_repeated.items() if len(v) > 1} if labels_inv_repeated: self.mpi_warning('There are repeated parameter labels: %r', labels_inv_repeated)
def __init__(self, info_params: Union[ParamsDict, ExpandedParamsDict], allow_renames=True, ignore_unused_sampled=False): self.set_logger() self.allow_renames = allow_renames self._infos = {} self._input: ParamValuesDict = {} self._input_funcs = {} self._input_args = {} self._input_dependencies: Dict[str, Set[str]] = {} self._dropped: Set[str] = set() self._output: ParamValuesDict = {} self._constant: ParamValuesDict = {} self._sampled: ParamValuesDict = {} self._sampled_renames: Dict[str, List[str]] = {} self._derived: ParamValuesDict = {} self._derived_inputs = [] self._derived_funcs = {} self._derived_args = {} self._derived_dependencies: Dict[str, Set[str]] = {} for (p, info) in info_params.items(): if isinstance(info, Mapping) and (not set(info).issubset(partags)): raise LoggedError(self.log, "Parameter '%s' has unknown options %s", p, set(info).difference(partags)) info = deepcopy_where_possible(info) if not isinstance(info, Mapping): if info is None: info = {} elif isinstance(info, Sequence) and (not isinstance(info, str)): values = list(info) allowed_lengths = [2, 4, 5] if len(values) not in allowed_lengths: raise LoggedError(__name__, 'Parameter info length not valid: %d. The allowed lengths are %r. See documentation.', len(values), allowed_lengths) info = {'prior': [values[0], values[1]]} if len(values) >= 4: info['ref'] = [values[2], values[3]] if len(values) == 5: info['proposal'] = values[4] else: info = {'value': info} if all((f not in info for f in ['prior', 'value', 'derived'])): info['derived'] = default_derived value = info.get('value') if isinstance(value, str) or callable(value): info['derived'] = info.get('derived', True) info = info self._infos[p] = info if is_fixed_or_function_param(info): if isinstance(info['value'], Real): self._constant[p] = float(info['value']) self._input[p] = self._constant[p] if info.get('drop'): self._dropped.add(p) else: self._input[p] = np.nan self._input_funcs[p] = get_external_function(info['value']) self._input_args[p] = getfullargspec(self._input_funcs[p]).args if is_sampled_param(info): self._sampled[p] = np.nan self._input[p] = np.nan if info.get('drop'): self._dropped.add(p) self._sampled_renames[p] = str_to_list(info.get('renames') or []) if is_derived_param(info): self._derived[p] = np.nan if info['derived'] is True and is_fixed_or_function_param(info): self._derived_inputs.append(p) elif info['derived'] is True: self._output[p] = np.nan else: self._derived_funcs[p] = get_external_function(info['derived']) self._derived_args[p] = getfullargspec(self._derived_funcs[p]).args for p in chain(self._sampled, self._derived): if not is_valid_variable_name(p): is_in = p in self._sampled eg_in = " p_prime:\n prior: ...\n %s: 'lambda p_prime: p_prime'\n" % p eg_out = " p_prime: 'lambda %s: %s'\n" % (p, p) raise LoggedError(self.log, "Parameter name '%s' is not a valid Python variable name (it needs to start with a letter or '_').\nIf this is an %s parameter of a likelihood or theory, whose name you cannot change,%s define an associated %s one with a valid name 'p_prime' as: \n\n%s", p, 'input' if is_in else 'output', '' if is_in else ' remove it and', 'sampled' if is_in else 'derived', eg_in if is_in else eg_out) known_input = set(self._input) all_input_arguments = set(chain(*self._input_args.values())) bad_input_dependencies = all_input_arguments - known_input if bad_input_dependencies: raise LoggedError(self.log, 'Input parameters defined as functions can only depend on other input parameters. In particular, an input parameter cannot depend on %r. Use an explicit Theory calculator for more complex dependencies.\nIf you intended to define a derived output parameter use derived: instead of value:', list(bad_input_dependencies)) for arg in all_input_arguments.union(*self._derived_args.values()).difference(known_input).difference(self._derived): self._output[arg] = np.nan self._directly_output = [p for p in self._derived if p in self._output] wrapped_funcs: Tuple[Dict[str, _WrappedFunc], Dict[str, _WrappedFunc]] = ({}, {}) known = set(chain(self._constant, self._sampled)) for (derived, wrapped_func) in zip((False, True), wrapped_funcs): if derived: inputs = self._derived_funcs.copy() input_args = self._derived_args known.update(self._output) output = self._derived dependencies = self._derived_dependencies else: inputs = self._input_funcs.copy() input_args = self._input_args output = self._input dependencies = self._input_dependencies while inputs: for (p, func) in inputs.items(): args = input_args[p] if not known.issuperset(args): continue known.add(p) dependencies[p] = set(chain(args, *(dependencies.get(arg, []) for arg in args))) if set(args).issubset(self._constant): self._constant[p] = self._call_param_func(p, func, {arg: self._constant[arg] for arg in args}) output[p] = self._constant[p] else: wrapped_func[p] = (func, {arg: self._constant.get(arg) for arg in args}, [arg for arg in args if arg not in self._constant]) del inputs[p] break else: raise LoggedError(self.log, 'Could not resolve arguments for parameters %s. Maybe there is a circular dependency between derived parameters?', list(inputs)) (self._wrapped_input_funcs, self._wrapped_derived_funcs) = wrapped_funcs self._sampled_input_dependence = {s: [i for i in self._input if s in self._input_dependencies.get(i, {})] for s in self._sampled} if not ignore_unused_sampled: self._dropped_not_directly_used = self._dropped.intersection((p for (p, v) in self._sampled_input_dependence.items() if not v)) else: self._dropped_not_directly_used = set() labels_inv_repeated = invert_dict(self.labels()) labels_inv_repeated = {k: v for (k, v) in labels_inv_repeated.items() if len(v) > 1} if labels_inv_repeated: self.mpi_warning('There are repeated parameter labels: %r', labels_inv_repeated)
cobaya
positive
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): """Gets additional manifest files that are added to the default one generated by the linker.""" <DeepExtract> files = self._GetAndMunge(self.msvs_settings[config], ('VCManifestTool', 'AdditionalManifestFiles'), [], prefix, append, map) </DeepExtract> if isinstance(files, str): files = files.split(';') return [os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config))) for f in files]
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): """Gets additional manifest files that are added to the default one generated by the linker.""" files = self._GetAndMunge(self.msvs_settings[config], ('VCManifestTool', 'AdditionalManifestFiles'), [], prefix, append, map) if isinstance(files, str): files = files.split(';') return [os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config))) for f in files]
archived-pangyp
positive
def save_preprocessors(data, task_ids, sample_ids, foldid): for (i, taskid) in enumerate(task_ids): <DeepExtract> if isinstance(self.outdir, str): dumpdir = self.outdir self.outdir = [_ for _ in self.outdir] args = {'target': taskid, 'trial': sample_ids[i], 'fold': foldid} for (i, item) in enumerate(self.outdir): if isinstance(item, tuple): (name, patt) = item if args[name] is None: self.outdir[i] = None else: self.outdir[i] = patt % args[name] instdir = '/'.join([part for part in self.outdir if part is not None]) dumpdir = instdir </DeepExtract> data.dump_preprocessors(dumpdir, slice(i, i + 1))
def save_preprocessors(data, task_ids, sample_ids, foldid): for (i, taskid) in enumerate(task_ids): if isinstance(self.outdir, str): dumpdir = self.outdir self.outdir = [_ for _ in self.outdir] args = {'target': taskid, 'trial': sample_ids[i], 'fold': foldid} for (i, item) in enumerate(self.outdir): if isinstance(item, tuple): (name, patt) = item if args[name] is None: self.outdir[i] = None else: self.outdir[i] = patt % args[name] instdir = '/'.join([part for part in self.outdir if part is not None]) dumpdir = instdir data.dump_preprocessors(dumpdir, slice(i, i + 1))
DeepBind
positive
def test_canonical_atom_featurizer(): test_featurizer = CanonicalAtomFeaturizer() assert test_featurizer.feat_size() == 74 assert test_featurizer.feat_size('h') == 74 <DeepExtract> mol = Chem.MolFromSmiles('CCO') </DeepExtract> feats = test_featurizer(mol) assert list(feats.keys()) == ['h'] assert torch.allclose(feats['h'], torch.tensor([[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0]]))
def test_canonical_atom_featurizer(): test_featurizer = CanonicalAtomFeaturizer() assert test_featurizer.feat_size() == 74 assert test_featurizer.feat_size('h') == 74 mol = Chem.MolFromSmiles('CCO') feats = test_featurizer(mol) assert list(feats.keys()) == ['h'] assert torch.allclose(feats['h'], torch.tensor([[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0]]))
dgl-lifesci
positive
def purge(self): target_config = self.config.config['targets'][self.target_iqn] groups = target_config['groups'] if self.group_name in groups: for mbr in groups[self.group_name]['members']: <DeepExtract> target_config = self.config.config['targets'][self.target_iqn] client_md = target_config['clients'][mbr] client_md['group_name'] = '' self.config.update_item('targets', self.target_iqn, target_config) self.logger.info('Removed {} from group {}'.format(mbr, self.group_name)) </DeepExtract> groups.pop(self.group_name) self.config.update_item('targets', self.target_iqn, target_config) self.config.commit() self.logger.info('Group {} removed'.format(self.group_name)) else: <DeepExtract> self.error = True self.error_msg = 'Group name requested does not exist' self.logger.debug('Error: {}'.format(self.error_msg)) </DeepExtract> return
def purge(self): target_config = self.config.config['targets'][self.target_iqn] groups = target_config['groups'] if self.group_name in groups: for mbr in groups[self.group_name]['members']: target_config = self.config.config['targets'][self.target_iqn] client_md = target_config['clients'][mbr] client_md['group_name'] = '' self.config.update_item('targets', self.target_iqn, target_config) self.logger.info('Removed {} from group {}'.format(mbr, self.group_name)) groups.pop(self.group_name) self.config.update_item('targets', self.target_iqn, target_config) self.config.commit() self.logger.info('Group {} removed'.format(self.group_name)) else: self.error = True self.error_msg = 'Group name requested does not exist' self.logger.debug('Error: {}'.format(self.error_msg)) return
ceph-iscsi
positive
def oil_paint_filter(image: Union[list, np.ndarray], filter_size: int=5, edges_only: bool=True, rgb: bool=False) -> Union[list, np.ndarray]: """ Applies the oil paint filter on a single channel image (or more than one channel, where each channel is a replica of the other). This could be desired for corrupting rendered depth maps to appear more realistic. Also trims the redundant channels if they exist. :param image: Input image or list of images :param filter_size: Filter size, should be an odd number. :param edges_only: If true, applies the filter on the edges only. :param rgb: Apply the filter on an RGB image (if the image has 3 channels, they're assumed to not be replicated). :return: filtered image """ if rgb: if isinstance(image, list) or (hasattr(image, 'shape') and len(image.shape) > 3): return [oil_paint_filter(img, filter_size, edges_only, rgb) for img in image] intensity_img = np.sum(image, axis=2) / 3.0 neighbors = np.array(_PostProcessingUtility.get_pixel_neighbors_stacked(image, filter_size, return_list=True)) neighbors_intensity = _PostProcessingUtility.get_pixel_neighbors_stacked(intensity_img, filter_size) mode_intensity = stats.mode(neighbors_intensity, axis=2)[0].reshape(image.shape[0], image.shape[1]) mode_keys = np.argwhere(neighbors_intensity == np.expand_dims(mode_intensity, axis=3)) (_, unique_indices) = np.unique(mode_keys[:, 0:2], axis=0, return_index=True) unique_keys = mode_keys[unique_indices] filtered_img = neighbors[unique_keys[:, 2], unique_keys[:, 0], unique_keys[:, 1], :].reshape(image.shape[0], image.shape[1], image.shape[2]) if edges_only: edges = cv2.Canny(image, 0, np.max(image)) image[edges > 0] = filtered_img[edges > 0] filtered_img = image else: <DeepExtract> if isinstance(image, list): image = [trim_redundant_channels(ele) for ele in image] if hasattr(image, 'shape') and len(image.shape) > 3: image = np.array([trim_redundant_channels(ele) for ele in image]) if hasattr(image, 'shape') and len(image.shape) == 3 and (image.shape[2] == 3): image = image[:, :, 0] image = image </DeepExtract> if isinstance(image, list) or (hasattr(image, 'shape') and len(image.shape) > 2): return [oil_paint_filter(img, filter_size, edges_only, rgb) for img in image] if len(image.shape) == 3 and image.shape[2] > 1: image = image[:, :, 0] filtered_img = stats.mode(_PostProcessingUtility.get_pixel_neighbors_stacked(image, filter_size), axis=2)[0] filtered_img = filtered_img.reshape(filtered_img.shape[0], filtered_img.shape[1]) if edges_only: _image = np.copy(image) _max = np.max(_image) if np.max(_image) != np.inf else np.unique(_image)[-2] _image[_image > _max] = _max _image = _image / _max * 255.0 __img = np.uint8(_image) edges = cv2.Canny(__img, 0, np.max(__img)) image[edges > 0] = filtered_img[edges > 0] filtered_img = image return filtered_img
def oil_paint_filter(image: Union[list, np.ndarray], filter_size: int=5, edges_only: bool=True, rgb: bool=False) -> Union[list, np.ndarray]: """ Applies the oil paint filter on a single channel image (or more than one channel, where each channel is a replica of the other). This could be desired for corrupting rendered depth maps to appear more realistic. Also trims the redundant channels if they exist. :param image: Input image or list of images :param filter_size: Filter size, should be an odd number. :param edges_only: If true, applies the filter on the edges only. :param rgb: Apply the filter on an RGB image (if the image has 3 channels, they're assumed to not be replicated). :return: filtered image """ if rgb: if isinstance(image, list) or (hasattr(image, 'shape') and len(image.shape) > 3): return [oil_paint_filter(img, filter_size, edges_only, rgb) for img in image] intensity_img = np.sum(image, axis=2) / 3.0 neighbors = np.array(_PostProcessingUtility.get_pixel_neighbors_stacked(image, filter_size, return_list=True)) neighbors_intensity = _PostProcessingUtility.get_pixel_neighbors_stacked(intensity_img, filter_size) mode_intensity = stats.mode(neighbors_intensity, axis=2)[0].reshape(image.shape[0], image.shape[1]) mode_keys = np.argwhere(neighbors_intensity == np.expand_dims(mode_intensity, axis=3)) (_, unique_indices) = np.unique(mode_keys[:, 0:2], axis=0, return_index=True) unique_keys = mode_keys[unique_indices] filtered_img = neighbors[unique_keys[:, 2], unique_keys[:, 0], unique_keys[:, 1], :].reshape(image.shape[0], image.shape[1], image.shape[2]) if edges_only: edges = cv2.Canny(image, 0, np.max(image)) image[edges > 0] = filtered_img[edges > 0] filtered_img = image else: if isinstance(image, list): image = [trim_redundant_channels(ele) for ele in image] if hasattr(image, 'shape') and len(image.shape) > 3: image = np.array([trim_redundant_channels(ele) for ele in image]) if hasattr(image, 'shape') and len(image.shape) == 3 and (image.shape[2] == 3): image = image[:, :, 0] image = image if isinstance(image, list) or (hasattr(image, 'shape') and len(image.shape) > 2): return [oil_paint_filter(img, filter_size, edges_only, rgb) for img in image] if len(image.shape) == 3 and image.shape[2] > 1: image = image[:, :, 0] filtered_img = stats.mode(_PostProcessingUtility.get_pixel_neighbors_stacked(image, filter_size), axis=2)[0] filtered_img = filtered_img.reshape(filtered_img.shape[0], filtered_img.shape[1]) if edges_only: _image = np.copy(image) _max = np.max(_image) if np.max(_image) != np.inf else np.unique(_image)[-2] _image[_image > _max] = _max _image = _image / _max * 255.0 __img = np.uint8(_image) edges = cv2.Canny(__img, 0, np.max(__img)) image[edges > 0] = filtered_img[edges > 0] filtered_img = image return filtered_img
BlenderProc
positive
def source(self) -> str: num_frames = len(self.frames) if num_frames == 0: raise RuntimeError('No frame has been added yet.') if num_frames > 1 and 'center' not in self.frames: raise RuntimeError('JointView mode needs a center frame. Use `joint_view center ADD xxx` to add center track/frame.') (gr1, gr2) = self.current_range if gr1 is None: raise RuntimeError('No genome range found.Use `goto chr1:5000000-6000000` to set the genome range.') frame_dict = {} source = '' for (pos, src) in self.frames.items(): <DeepExtract> if pos != 'center': src += 'return frame\n' else: src += 'return list(frame.tracks.values())[0]\n' src = '\n'.join((' ' + line for line in src.split('\n'))) + '\n' frame_var = f'{pos}_frame' src = f'def fetch_{frame_var}():\n' + src src += f'{frame_var} = fetch_{frame_var}()\n' (frame_var, frame_src) = (frame_var, src) </DeepExtract> source += frame_src frame_dict[pos] = frame_var if 'center' in self.frames: source += f"frame = JointView({frame_dict['center']}, left={frame_dict.get('left')}, right={frame_dict.get('right')}, bottom={frame_dict.get('bottom')}, top={frame_dict.get('top')})\n" else: source += f'frame = {list(frame_dict.values())[0]}\n' return source
def source(self) -> str: num_frames = len(self.frames) if num_frames == 0: raise RuntimeError('No frame has been added yet.') if num_frames > 1 and 'center' not in self.frames: raise RuntimeError('JointView mode needs a center frame. Use `joint_view center ADD xxx` to add center track/frame.') (gr1, gr2) = self.current_range if gr1 is None: raise RuntimeError('No genome range found.Use `goto chr1:5000000-6000000` to set the genome range.') frame_dict = {} source = '' for (pos, src) in self.frames.items(): if pos != 'center': src += 'return frame\n' else: src += 'return list(frame.tracks.values())[0]\n' src = '\n'.join((' ' + line for line in src.split('\n'))) + '\n' frame_var = f'{pos}_frame' src = f'def fetch_{frame_var}():\n' + src src += f'{frame_var} = fetch_{frame_var}()\n' (frame_var, frame_src) = (frame_var, src) source += frame_src frame_dict[pos] = frame_var if 'center' in self.frames: source += f"frame = JointView({frame_dict['center']}, left={frame_dict.get('left')}, right={frame_dict.get('right')}, bottom={frame_dict.get('bottom')}, top={frame_dict.get('top')})\n" else: source += f'frame = {list(frame_dict.values())[0]}\n' return source
CoolBox
positive
def validate_interaction_types(interactionType, definition): if interactionType == 'choice' or interactionType == 'sequencing': if 'choices' in definition: <DeepExtract> interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['choices'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['choices']), ' '.join(not_allowed))) </DeepExtract> choices = definition['choices'] <DeepExtract> if not isinstance(choices, list): self.return_error('%s is not a properly formatted array' % 'Activity definition choices') </DeepExtract> <DeepExtract> id_list = [] for act in choices: self.check_if_dict(act, '%s interaction component' % 'choices') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'choices') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'choices') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'choices') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'choices') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'choices') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) </DeepExtract> elif interactionType == 'likert': if 'scale' in definition: <DeepExtract> interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['scale'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['scale']), ' '.join(not_allowed))) </DeepExtract> scale = definition['scale'] <DeepExtract> if not isinstance(scale, list): self.return_error('%s is not a properly formatted array' % 'Activity definition scale') </DeepExtract> <DeepExtract> id_list = [] for act in scale: self.check_if_dict(act, '%s interaction component' % 'scale') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'scale') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'scale') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'scale') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'scale') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'scale') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) </DeepExtract> elif interactionType == 'matching': if 'source' in definition: <DeepExtract> interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['target', 'source'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['target', 'source']), ' '.join(not_allowed))) </DeepExtract> source = definition['source'] <DeepExtract> if not isinstance(source, list): self.return_error('%s is not a properly formatted array' % 'Activity definition source') </DeepExtract> <DeepExtract> id_list = [] for act in source: self.check_if_dict(act, '%s interaction component' % 'source') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'source') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'source') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'source') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'source') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'source') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) </DeepExtract> if 'target' in definition: <DeepExtract> interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['target', 'source'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['target', 'source']), ' '.join(not_allowed))) </DeepExtract> target = definition['target'] <DeepExtract> if not isinstance(target, list): self.return_error('%s is not a properly formatted array' % 'Activity definition target') </DeepExtract> <DeepExtract> id_list = [] for act in target: self.check_if_dict(act, '%s interaction component' % 'target') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'target') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'target') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'target') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'target') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'target') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) </DeepExtract> elif interactionType == 'performance': if 'steps' in definition: <DeepExtract> interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['steps'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['steps']), ' '.join(not_allowed))) </DeepExtract> steps = definition['steps'] <DeepExtract> if not isinstance(steps, list): self.return_error('%s is not a properly formatted array' % 'Activity definition steps') </DeepExtract> <DeepExtract> id_list = [] for act in steps: self.check_if_dict(act, '%s interaction component' % 'steps') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'steps') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'steps') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'steps') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'steps') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'steps') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) </DeepExtract>
def validate_interaction_types(interactionType, definition): if interactionType == 'choice' or interactionType == 'sequencing': if 'choices' in definition: interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['choices'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['choices']), ' '.join(not_allowed))) choices = definition['choices'] if not isinstance(choices, list): self.return_error('%s is not a properly formatted array' % 'Activity definition choices') id_list = [] for act in choices: self.check_if_dict(act, '%s interaction component' % 'choices') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'choices') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'choices') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'choices') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'choices') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'choices') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) elif interactionType == 'likert': if 'scale' in definition: interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['scale'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['scale']), ' '.join(not_allowed))) scale = definition['scale'] if not isinstance(scale, list): self.return_error('%s is not a properly formatted array' % 'Activity definition scale') id_list = [] for act in scale: self.check_if_dict(act, '%s interaction component' % 'scale') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'scale') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'scale') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'scale') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'scale') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'scale') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) elif interactionType == 'matching': if 'source' in definition: interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['target', 'source'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['target', 'source']), ' '.join(not_allowed))) source = definition['source'] if not isinstance(source, list): self.return_error('%s is not a properly formatted array' % 'Activity definition source') id_list = [] for act in source: self.check_if_dict(act, '%s interaction component' % 'source') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'source') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'source') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'source') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'source') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'source') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) if 'target' in definition: interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['target', 'source'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['target', 'source']), ' '.join(not_allowed))) target = definition['target'] if not isinstance(target, list): self.return_error('%s is not a properly formatted array' % 'Activity definition target') id_list = [] for act in target: self.check_if_dict(act, '%s interaction component' % 'target') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'target') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'target') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'target') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'target') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'target') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) elif interactionType == 'performance': if 'steps' in definition: interaction_components = set(['choices', 'scale', 'source', 'target', 'steps']) keys = set(definition.keys()) both = interaction_components.intersection(keys) not_allowed = list(both - set(['steps'])) if not_allowed: self.return_error('Only interaction component field(s) allowed (%s) - not allowed: %s' % (' '.join(['steps']), ' '.join(not_allowed))) steps = definition['steps'] if not isinstance(steps, list): self.return_error('%s is not a properly formatted array' % 'Activity definition steps') id_list = [] for act in steps: self.check_if_dict(act, '%s interaction component' % 'steps') self.check_allowed_fields(int_act_fields, act, 'Activity definition %s' % 'steps') self.check_required_fields(int_act_fields, act, 'Activity definition %s' % 'steps') if not isinstance(act['id'], str): self.return_error('Interaction activity in component %s has an id that is not a string' % 'steps') id_list.append(act['id']) if 'description' in act: self.check_if_dict(act['description'], '%s interaction component description' % 'steps') self.validate_lang_map(list(act['description'].keys()), '%s interaction component description' % 'steps') dups = set([i for i in id_list if id_list.count(i) > 1]) if dups: self.return_error('Interaction activities shared the same id(s) (%s) which is not allowed' % ' '.join(dups)) </DeepExtract>
ADL_LRS
positive
def updateLayer(self): """ data changed slot. """ <DeepExtract> self.sliderContrast.setEnabled(True) self.sliderSaturation.setEnabled(True) self.sliderVibrance.setEnabled(True) self.sliderBrightness.setEnabled(True) </DeepExtract> self.layer.applyToStack() self.layer.parentImage.onImageChanged() for intname in ['High', 'manualCurve']: item = self.listWidget2.items[intname] if self.options['Multi-Mode']: item.setFlags(item.flags() | Qt.ItemIsEnabled) else: item.setFlags(item.flags() & ~Qt.ItemIsEnabled) cf = getattr(self, 'dock', None) if cf is None: return if self.options['manualCurve'] and self.options['Multi-Mode']: cf.showNormal() else: cf.hide()
def updateLayer(self): """ data changed slot. """ self.sliderContrast.setEnabled(True) self.sliderSaturation.setEnabled(True) self.sliderVibrance.setEnabled(True) self.sliderBrightness.setEnabled(True) self.layer.applyToStack() self.layer.parentImage.onImageChanged() for intname in ['High', 'manualCurve']: item = self.listWidget2.items[intname] if self.options['Multi-Mode']: item.setFlags(item.flags() | Qt.ItemIsEnabled) else: item.setFlags(item.flags() & ~Qt.ItemIsEnabled) cf = getattr(self, 'dock', None) if cf is None: return if self.options['manualCurve'] and self.options['Multi-Mode']: cf.showNormal() else: cf.hide()
bLUe_PYSIDE2
positive
def write_column(context, column_name, column_spec, items, buf, types_check=False): column_options = {'context': context, 'types_check': types_check} <DeepExtract> context = column_options['context'] if use_numpy is None: use_numpy = context.client_settings['use_numpy'] if context else False if use_numpy: from .numpy.service import get_numpy_column_by_spec try: column = get_numpy_column_by_spec(column_spec, column_options) except errors.UnknownTypeError: use_numpy = False logger.warning('NumPy support is not implemented for %s. Using generic column', column_spec) def create_column_with_options(x): column = get_column_by_spec(x, column_options, use_numpy=use_numpy) if column_spec == 'String' or column_spec.startswith('FixedString'): column = create_string_column(column_spec, column_options) elif column_spec.startswith('Enum'): column = create_enum_column(column_spec, column_options) elif column_spec.startswith('DateTime'): column = create_datetime_column(column_spec, column_options) elif column_spec.startswith('Decimal'): column = create_decimal_column(column_spec, column_options) elif column_spec.startswith('Array'): column = create_array_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('Tuple'): column = create_tuple_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('Nested'): column = create_nested_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('Nullable'): column = create_nullable_column(column_spec, create_column_with_options) elif column_spec.startswith('LowCardinality'): column = create_low_cardinality_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('SimpleAggregateFunction'): column = create_simple_aggregate_function_column(column_spec, create_column_with_options) elif column_spec.startswith('Map'): column = create_map_column(column_spec, create_column_with_options, column_options) else: for (alias, primitive) in aliases: if column_spec.startswith(alias): column = create_column_with_options(primitive + column_spec[len(alias):]) try: cls = column_by_type[column_spec] column = cls(**column_options) except KeyError: raise errors.UnknownTypeError('Unknown type {}'.format(column_spec)) </DeepExtract> try: column.write_state_prefix(buf) column.write_data(items, buf) except column_exceptions.ColumnTypeMismatchException as e: raise errors.TypeMismatchError('Type mismatch in VALUES section. Expected {} got {}: {} for column "{}".'.format(column_spec, type(e.args[0]), e.args[0], column_name)) except (column_exceptions.StructPackException, OverflowError) as e: error = e.args[0] raise errors.TypeMismatchError('Type mismatch in VALUES section. Repeat query with types_check=True for detailed info. Column {}: {}'.format(column_name, str(error)))
def write_column(context, column_name, column_spec, items, buf, types_check=False): column_options = {'context': context, 'types_check': types_check} context = column_options['context'] if use_numpy is None: use_numpy = context.client_settings['use_numpy'] if context else False if use_numpy: from .numpy.service import get_numpy_column_by_spec try: column = get_numpy_column_by_spec(column_spec, column_options) except errors.UnknownTypeError: use_numpy = False logger.warning('NumPy support is not implemented for %s. Using generic column', column_spec) def create_column_with_options(x): column = get_column_by_spec(x, column_options, use_numpy=use_numpy) if column_spec == 'String' or column_spec.startswith('FixedString'): column = create_string_column(column_spec, column_options) elif column_spec.startswith('Enum'): column = create_enum_column(column_spec, column_options) elif column_spec.startswith('DateTime'): column = create_datetime_column(column_spec, column_options) elif column_spec.startswith('Decimal'): column = create_decimal_column(column_spec, column_options) elif column_spec.startswith('Array'): column = create_array_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('Tuple'): column = create_tuple_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('Nested'): column = create_nested_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('Nullable'): column = create_nullable_column(column_spec, create_column_with_options) elif column_spec.startswith('LowCardinality'): column = create_low_cardinality_column(column_spec, create_column_with_options, column_options) elif column_spec.startswith('SimpleAggregateFunction'): column = create_simple_aggregate_function_column(column_spec, create_column_with_options) elif column_spec.startswith('Map'): column = create_map_column(column_spec, create_column_with_options, column_options) else: for (alias, primitive) in aliases: if column_spec.startswith(alias): column = create_column_with_options(primitive + column_spec[len(alias):]) try: cls = column_by_type[column_spec] column = cls(**column_options) except KeyError: raise errors.UnknownTypeError('Unknown type {}'.format(column_spec)) try: column.write_state_prefix(buf) column.write_data(items, buf) except column_exceptions.ColumnTypeMismatchException as e: raise errors.TypeMismatchError('Type mismatch in VALUES section. Expected {} got {}: {} for column "{}".'.format(column_spec, type(e.args[0]), e.args[0], column_name)) except (column_exceptions.StructPackException, OverflowError) as e: error = e.args[0] raise errors.TypeMismatchError('Type mismatch in VALUES section. Repeat query with types_check=True for detailed info. Column {}: {}'.format(column_name, str(error)))
clickhouse-driver
positive
def __call__(self, batch, is_training, safe_key=None): c = self.config gc = self.global_config batch = dict(batch) dtype = jnp.bfloat16 if gc.bfloat16 else jnp.float32 if safe_key is None: safe_key = prng.SafeKey(hk.next_rng_key()) output = {} <DeepExtract> batch['msa_profile'] = utils.mask_mean(batch['msa_mask'][:, :, None], jax.nn.one_hot(batch['msa'], 22), axis=0) </DeepExtract> with utils.bfloat16_context(): target_feat = jax.nn.one_hot(batch['aatype'], 21).astype(dtype) preprocess_1d = common_modules.Linear(c.msa_channel, name='preprocess_1d')(target_feat) (safe_key, sample_key, mask_key) = safe_key.split(3) <DeepExtract> logits = (jnp.clip(jnp.sum(batch['msa_mask'], axis=-1), 0.0, 1.0) - 1.0) * 1000000.0 if 'cluster_bias_mask' not in batch: cluster_bias_mask = jnp.pad(jnp.zeros(batch['msa'].shape[0] - 1), (1, 0), constant_values=1.0) else: cluster_bias_mask = batch['cluster_bias_mask'] logits += cluster_bias_mask * 1000000.0 index_order = gumbel_argsort_sample_idx(sample_key.get(), logits) sel_idx = index_order[:c.num_msa] extra_idx = index_order[c.num_msa:] for k in ['msa', 'deletion_matrix', 'msa_mask', 'bert_mask']: if k in batch: batch['extra_' + k] = batch[k][extra_idx] batch[k] = batch[k][sel_idx] batch = batch </DeepExtract> <DeepExtract> random_aa = jnp.array([0.05] * 20 + [0.0, 0.0], dtype=jnp.float32) categorical_probs = c.masked_msa.uniform_prob * random_aa + c.masked_msa.profile_prob * batch['msa_profile'] + c.masked_msa.same_prob * jax.nn.one_hot(batch['msa'], 22) pad_shapes = [[0, 0] for _ in range(len(categorical_probs.shape))] pad_shapes[-1][1] = 1 mask_prob = 1.0 - c.masked_msa.profile_prob - c.masked_msa.same_prob - c.masked_msa.uniform_prob assert mask_prob >= 0.0 categorical_probs = jnp.pad(categorical_probs, pad_shapes, constant_values=mask_prob) sh = batch['msa'].shape (mask_key, mask_subkey, gumbel_subkey) = mask_key.split(3) uniform = utils.padding_consistent_rng(jax.random.uniform) mask_position = uniform(mask_subkey.get(), sh) < c.masked_msa.replace_fraction mask_position *= batch['msa_mask'] logits = jnp.log(categorical_probs + epsilon) bert_msa = gumbel_max_sample(gumbel_subkey.get(), logits) bert_msa = jnp.where(mask_position, jnp.argmax(bert_msa, axis=-1), batch['msa']) bert_msa *= batch['msa_mask'] if 'bert_mask' in batch: batch['bert_mask'] *= mask_position.astype(jnp.float32) else: batch['bert_mask'] = mask_position.astype(jnp.float32) batch['true_msa'] = batch['msa'] batch['msa'] = bert_msa batch = batch </DeepExtract> <DeepExtract> weights = jnp.array([1.0] * 21 + [gap_agreement_weight] + [0.0], dtype=jnp.float32) msa_mask = batch['msa_mask'] msa_one_hot = jax.nn.one_hot(batch['msa'], 23) extra_mask = batch['extra_msa_mask'] extra_one_hot = jax.nn.one_hot(batch['extra_msa'], 23) msa_one_hot_masked = msa_mask[:, :, None] * msa_one_hot extra_one_hot_masked = extra_mask[:, :, None] * extra_one_hot agreement = jnp.einsum('mrc, nrc->nm', extra_one_hot_masked, weights * msa_one_hot_masked) cluster_assignment = jax.nn.softmax(1000.0 * agreement, axis=0) cluster_assignment *= jnp.einsum('mr, nr->mn', msa_mask, extra_mask) cluster_count = jnp.sum(cluster_assignment, axis=-1) cluster_count += 1.0 msa_sum = jnp.einsum('nm, mrc->nrc', cluster_assignment, extra_one_hot_masked) msa_sum += msa_one_hot_masked cluster_profile = msa_sum / cluster_count[:, None, None] extra_deletion_matrix = batch['extra_deletion_matrix'] deletion_matrix = batch['deletion_matrix'] del_sum = jnp.einsum('nm, mc->nc', cluster_assignment, extra_mask * extra_deletion_matrix) del_sum += deletion_matrix cluster_deletion_mean = del_sum / cluster_count[:, None] (batch['cluster_profile'], batch['cluster_deletion_mean']) = (cluster_profile, cluster_deletion_mean) </DeepExtract> msa_feat = create_msa_feat(batch).astype(dtype) preprocess_msa = common_modules.Linear(c.msa_channel, name='preprocess_msa')(msa_feat) msa_activations = jnp.expand_dims(preprocess_1d, axis=0) + preprocess_msa left_single = common_modules.Linear(c.pair_channel, name='left_single')(target_feat) right_single = common_modules.Linear(c.pair_channel, name='right_single')(target_feat) pair_activations = left_single[:, None] + right_single[None] mask_2d = batch['seq_mask'][:, None] * batch['seq_mask'][None, :] mask_2d = mask_2d.astype(dtype) if c.recycle_pos: prev_pseudo_beta = modules.pseudo_beta_fn(batch['aatype'], batch['prev_pos'], None) dgram = modules.dgram_from_positions(prev_pseudo_beta, **self.config.prev_pos) dgram = dgram.astype(dtype) pair_activations += common_modules.Linear(c.pair_channel, name='prev_pos_linear')(dgram) if c.recycle_features: prev_msa_first_row = common_modules.LayerNorm(axis=[-1], create_scale=True, create_offset=True, name='prev_msa_first_row_norm')(batch['prev_msa_first_row']).astype(dtype) msa_activations = msa_activations.at[0].add(prev_msa_first_row) pair_activations += common_modules.LayerNorm(axis=[-1], create_scale=True, create_offset=True, name='prev_pair_norm')(batch['prev_pair']).astype(dtype) if c.max_relative_idx: pair_activations += self._relative_encoding(batch) if c.template.enabled: template_module = TemplateEmbedding(c.template, gc) template_batch = {'template_aatype': batch['template_aatype'], 'template_all_atom_positions': batch['template_all_atom_positions'], 'template_all_atom_mask': batch['template_all_atom_mask']} multichain_mask = batch['asym_id'][:, None] == batch['asym_id'][None, :] (safe_key, safe_subkey) = safe_key.split() template_act = template_module(query_embedding=pair_activations, template_batch=template_batch, padding_mask_2d=mask_2d, multichain_mask_2d=multichain_mask, is_training=is_training, safe_key=safe_subkey) pair_activations += template_act <DeepExtract> extra_msa = batch['extra_msa'][:c.num_extra_msa] deletion_matrix = batch['extra_deletion_matrix'][:c.num_extra_msa] msa_1hot = jax.nn.one_hot(extra_msa, 23) has_deletion = jnp.clip(deletion_matrix, 0.0, 1.0)[..., None] deletion_value = (jnp.arctan(deletion_matrix / 3.0) * (2.0 / jnp.pi))[..., None] extra_msa_mask = batch['extra_msa_mask'][:c.num_extra_msa] (extra_msa_feat, extra_msa_mask) = (jnp.concatenate([msa_1hot, has_deletion, deletion_value], axis=-1), extra_msa_mask) </DeepExtract> extra_msa_activations = common_modules.Linear(c.extra_msa_channel, name='extra_msa_activations')(extra_msa_feat).astype(dtype) extra_msa_mask = extra_msa_mask.astype(dtype) extra_evoformer_input = {'msa': extra_msa_activations, 'pair': pair_activations} extra_masks = {'msa': extra_msa_mask, 'pair': mask_2d} extra_evoformer_iteration = modules.EvoformerIteration(c.evoformer, gc, is_extra_msa=True, name='extra_msa_stack') def extra_evoformer_fn(x): (act, safe_key) = x (safe_key, safe_subkey) = safe_key.split() extra_evoformer_output = extra_evoformer_iteration(activations=act, masks=extra_masks, is_training=is_training, safe_key=safe_subkey) return (extra_evoformer_output, safe_key) if gc.use_remat: extra_evoformer_fn = hk.remat(extra_evoformer_fn) (safe_key, safe_subkey) = safe_key.split() extra_evoformer_stack = layer_stack.layer_stack(c.extra_msa_stack_num_block)(extra_evoformer_fn) (extra_evoformer_output, safe_key) = extra_evoformer_stack((extra_evoformer_input, safe_subkey)) pair_activations = extra_evoformer_output['pair'] num_msa_sequences = msa_activations.shape[0] evoformer_input = {'msa': msa_activations, 'pair': pair_activations} evoformer_masks = {'msa': batch['msa_mask'].astype(dtype), 'pair': mask_2d} if c.template.enabled: <DeepExtract> aatype_one_hot = jax.nn.one_hot(batch['template_aatype'], 22, axis=-1) num_templates = batch['template_aatype'].shape[0] all_chi_angles = [] all_chi_masks = [] for i in range(num_templates): atom_pos = geometry.Vec3Array.from_array(batch['template_all_atom_positions'][i, :, :, :]) (template_chi_angles, template_chi_mask) = all_atom_multimer.compute_chi_angles(atom_pos, batch['template_all_atom_mask'][i, :, :], batch['template_aatype'][i, :]) all_chi_angles.append(template_chi_angles) all_chi_masks.append(template_chi_mask) chi_angles = jnp.stack(all_chi_angles, axis=0) chi_mask = jnp.stack(all_chi_masks, axis=0) template_features = jnp.concatenate([aatype_one_hot, jnp.sin(chi_angles) * chi_mask, jnp.cos(chi_angles) * chi_mask, chi_mask], axis=-1) template_mask = chi_mask[:, :, 0] if gc.bfloat16: template_features = template_features.astype(jnp.bfloat16) template_mask = template_mask.astype(jnp.bfloat16) template_activations = common_modules.Linear(c.msa_channel, initializer='relu', name='template_single_embedding')(template_features) template_activations = jax.nn.relu(template_activations) template_activations = common_modules.Linear(c.msa_channel, initializer='relu', name='template_projection')(template_activations) (template_features, template_masks) = (template_activations, template_mask) </DeepExtract> evoformer_input['msa'] = jnp.concatenate([evoformer_input['msa'], template_features], axis=0) evoformer_masks['msa'] = jnp.concatenate([evoformer_masks['msa'], template_masks], axis=0) evoformer_iteration = modules.EvoformerIteration(c.evoformer, gc, is_extra_msa=False, name='evoformer_iteration') def evoformer_fn(x): (act, safe_key) = x (safe_key, safe_subkey) = safe_key.split() evoformer_output = evoformer_iteration(activations=act, masks=evoformer_masks, is_training=is_training, safe_key=safe_subkey) return (evoformer_output, safe_key) if gc.use_remat: evoformer_fn = hk.remat(evoformer_fn) (safe_key, safe_subkey) = safe_key.split() evoformer_stack = layer_stack.layer_stack(c.evoformer_num_block)(evoformer_fn) def run_evoformer(evoformer_input): (evoformer_output, _) = evoformer_stack((evoformer_input, safe_subkey)) return evoformer_output <DeepExtract> (evoformer_output, _) = evoformer_stack((evoformer_input, safe_subkey)) evoformer_output = evoformer_output </DeepExtract> msa_activations = evoformer_output['msa'] pair_activations = evoformer_output['pair'] single_activations = common_modules.Linear(c.seq_channel, name='single_activations')(msa_activations[0]) output.update({'single': single_activations, 'pair': pair_activations, 'msa': msa_activations[:num_msa_sequences, :, :], 'msa_first_row': msa_activations[0]}) if not gc.bfloat16_output: for (k, v) in output.items(): if v.dtype == jnp.bfloat16: output[k] = v.astype(jnp.float32) return output
def __call__(self, batch, is_training, safe_key=None): c = self.config gc = self.global_config batch = dict(batch) dtype = jnp.bfloat16 if gc.bfloat16 else jnp.float32 if safe_key is None: safe_key = prng.SafeKey(hk.next_rng_key()) output = {} batch['msa_profile'] = utils.mask_mean(batch['msa_mask'][:, :, None], jax.nn.one_hot(batch['msa'], 22), axis=0) with utils.bfloat16_context(): target_feat = jax.nn.one_hot(batch['aatype'], 21).astype(dtype) preprocess_1d = common_modules.Linear(c.msa_channel, name='preprocess_1d')(target_feat) (safe_key, sample_key, mask_key) = safe_key.split(3) logits = (jnp.clip(jnp.sum(batch['msa_mask'], axis=-1), 0.0, 1.0) - 1.0) * 1000000.0 if 'cluster_bias_mask' not in batch: cluster_bias_mask = jnp.pad(jnp.zeros(batch['msa'].shape[0] - 1), (1, 0), constant_values=1.0) else: cluster_bias_mask = batch['cluster_bias_mask'] logits += cluster_bias_mask * 1000000.0 index_order = gumbel_argsort_sample_idx(sample_key.get(), logits) sel_idx = index_order[:c.num_msa] extra_idx = index_order[c.num_msa:] for k in ['msa', 'deletion_matrix', 'msa_mask', 'bert_mask']: if k in batch: batch['extra_' + k] = batch[k][extra_idx] batch[k] = batch[k][sel_idx] batch = batch random_aa = jnp.array([0.05] * 20 + [0.0, 0.0], dtype=jnp.float32) categorical_probs = c.masked_msa.uniform_prob * random_aa + c.masked_msa.profile_prob * batch['msa_profile'] + c.masked_msa.same_prob * jax.nn.one_hot(batch['msa'], 22) pad_shapes = [[0, 0] for _ in range(len(categorical_probs.shape))] pad_shapes[-1][1] = 1 mask_prob = 1.0 - c.masked_msa.profile_prob - c.masked_msa.same_prob - c.masked_msa.uniform_prob assert mask_prob >= 0.0 categorical_probs = jnp.pad(categorical_probs, pad_shapes, constant_values=mask_prob) sh = batch['msa'].shape (mask_key, mask_subkey, gumbel_subkey) = mask_key.split(3) uniform = utils.padding_consistent_rng(jax.random.uniform) mask_position = uniform(mask_subkey.get(), sh) < c.masked_msa.replace_fraction mask_position *= batch['msa_mask'] logits = jnp.log(categorical_probs + epsilon) bert_msa = gumbel_max_sample(gumbel_subkey.get(), logits) bert_msa = jnp.where(mask_position, jnp.argmax(bert_msa, axis=-1), batch['msa']) bert_msa *= batch['msa_mask'] if 'bert_mask' in batch: batch['bert_mask'] *= mask_position.astype(jnp.float32) else: batch['bert_mask'] = mask_position.astype(jnp.float32) batch['true_msa'] = batch['msa'] batch['msa'] = bert_msa batch = batch weights = jnp.array([1.0] * 21 + [gap_agreement_weight] + [0.0], dtype=jnp.float32) msa_mask = batch['msa_mask'] msa_one_hot = jax.nn.one_hot(batch['msa'], 23) extra_mask = batch['extra_msa_mask'] extra_one_hot = jax.nn.one_hot(batch['extra_msa'], 23) msa_one_hot_masked = msa_mask[:, :, None] * msa_one_hot extra_one_hot_masked = extra_mask[:, :, None] * extra_one_hot agreement = jnp.einsum('mrc, nrc->nm', extra_one_hot_masked, weights * msa_one_hot_masked) cluster_assignment = jax.nn.softmax(1000.0 * agreement, axis=0) cluster_assignment *= jnp.einsum('mr, nr->mn', msa_mask, extra_mask) cluster_count = jnp.sum(cluster_assignment, axis=-1) cluster_count += 1.0 msa_sum = jnp.einsum('nm, mrc->nrc', cluster_assignment, extra_one_hot_masked) msa_sum += msa_one_hot_masked cluster_profile = msa_sum / cluster_count[:, None, None] extra_deletion_matrix = batch['extra_deletion_matrix'] deletion_matrix = batch['deletion_matrix'] del_sum = jnp.einsum('nm, mc->nc', cluster_assignment, extra_mask * extra_deletion_matrix) del_sum += deletion_matrix cluster_deletion_mean = del_sum / cluster_count[:, None] (batch['cluster_profile'], batch['cluster_deletion_mean']) = (cluster_profile, cluster_deletion_mean) msa_feat = create_msa_feat(batch).astype(dtype) preprocess_msa = common_modules.Linear(c.msa_channel, name='preprocess_msa')(msa_feat) msa_activations = jnp.expand_dims(preprocess_1d, axis=0) + preprocess_msa left_single = common_modules.Linear(c.pair_channel, name='left_single')(target_feat) right_single = common_modules.Linear(c.pair_channel, name='right_single')(target_feat) pair_activations = left_single[:, None] + right_single[None] mask_2d = batch['seq_mask'][:, None] * batch['seq_mask'][None, :] mask_2d = mask_2d.astype(dtype) if c.recycle_pos: prev_pseudo_beta = modules.pseudo_beta_fn(batch['aatype'], batch['prev_pos'], None) dgram = modules.dgram_from_positions(prev_pseudo_beta, **self.config.prev_pos) dgram = dgram.astype(dtype) pair_activations += common_modules.Linear(c.pair_channel, name='prev_pos_linear')(dgram) if c.recycle_features: prev_msa_first_row = common_modules.LayerNorm(axis=[-1], create_scale=True, create_offset=True, name='prev_msa_first_row_norm')(batch['prev_msa_first_row']).astype(dtype) msa_activations = msa_activations.at[0].add(prev_msa_first_row) pair_activations += common_modules.LayerNorm(axis=[-1], create_scale=True, create_offset=True, name='prev_pair_norm')(batch['prev_pair']).astype(dtype) if c.max_relative_idx: pair_activations += self._relative_encoding(batch) if c.template.enabled: template_module = TemplateEmbedding(c.template, gc) template_batch = {'template_aatype': batch['template_aatype'], 'template_all_atom_positions': batch['template_all_atom_positions'], 'template_all_atom_mask': batch['template_all_atom_mask']} multichain_mask = batch['asym_id'][:, None] == batch['asym_id'][None, :] (safe_key, safe_subkey) = safe_key.split() template_act = template_module(query_embedding=pair_activations, template_batch=template_batch, padding_mask_2d=mask_2d, multichain_mask_2d=multichain_mask, is_training=is_training, safe_key=safe_subkey) pair_activations += template_act extra_msa = batch['extra_msa'][:c.num_extra_msa] deletion_matrix = batch['extra_deletion_matrix'][:c.num_extra_msa] msa_1hot = jax.nn.one_hot(extra_msa, 23) has_deletion = jnp.clip(deletion_matrix, 0.0, 1.0)[..., None] deletion_value = (jnp.arctan(deletion_matrix / 3.0) * (2.0 / jnp.pi))[..., None] extra_msa_mask = batch['extra_msa_mask'][:c.num_extra_msa] (extra_msa_feat, extra_msa_mask) = (jnp.concatenate([msa_1hot, has_deletion, deletion_value], axis=-1), extra_msa_mask) extra_msa_activations = common_modules.Linear(c.extra_msa_channel, name='extra_msa_activations')(extra_msa_feat).astype(dtype) extra_msa_mask = extra_msa_mask.astype(dtype) extra_evoformer_input = {'msa': extra_msa_activations, 'pair': pair_activations} extra_masks = {'msa': extra_msa_mask, 'pair': mask_2d} extra_evoformer_iteration = modules.EvoformerIteration(c.evoformer, gc, is_extra_msa=True, name='extra_msa_stack') def extra_evoformer_fn(x): (act, safe_key) = x (safe_key, safe_subkey) = safe_key.split() extra_evoformer_output = extra_evoformer_iteration(activations=act, masks=extra_masks, is_training=is_training, safe_key=safe_subkey) return (extra_evoformer_output, safe_key) if gc.use_remat: extra_evoformer_fn = hk.remat(extra_evoformer_fn) (safe_key, safe_subkey) = safe_key.split() extra_evoformer_stack = layer_stack.layer_stack(c.extra_msa_stack_num_block)(extra_evoformer_fn) (extra_evoformer_output, safe_key) = extra_evoformer_stack((extra_evoformer_input, safe_subkey)) pair_activations = extra_evoformer_output['pair'] num_msa_sequences = msa_activations.shape[0] evoformer_input = {'msa': msa_activations, 'pair': pair_activations} evoformer_masks = {'msa': batch['msa_mask'].astype(dtype), 'pair': mask_2d} if c.template.enabled: aatype_one_hot = jax.nn.one_hot(batch['template_aatype'], 22, axis=-1) num_templates = batch['template_aatype'].shape[0] all_chi_angles = [] all_chi_masks = [] for i in range(num_templates): atom_pos = geometry.Vec3Array.from_array(batch['template_all_atom_positions'][i, :, :, :]) (template_chi_angles, template_chi_mask) = all_atom_multimer.compute_chi_angles(atom_pos, batch['template_all_atom_mask'][i, :, :], batch['template_aatype'][i, :]) all_chi_angles.append(template_chi_angles) all_chi_masks.append(template_chi_mask) chi_angles = jnp.stack(all_chi_angles, axis=0) chi_mask = jnp.stack(all_chi_masks, axis=0) template_features = jnp.concatenate([aatype_one_hot, jnp.sin(chi_angles) * chi_mask, jnp.cos(chi_angles) * chi_mask, chi_mask], axis=-1) template_mask = chi_mask[:, :, 0] if gc.bfloat16: template_features = template_features.astype(jnp.bfloat16) template_mask = template_mask.astype(jnp.bfloat16) template_activations = common_modules.Linear(c.msa_channel, initializer='relu', name='template_single_embedding')(template_features) template_activations = jax.nn.relu(template_activations) template_activations = common_modules.Linear(c.msa_channel, initializer='relu', name='template_projection')(template_activations) (template_features, template_masks) = (template_activations, template_mask) evoformer_input['msa'] = jnp.concatenate([evoformer_input['msa'], template_features], axis=0) evoformer_masks['msa'] = jnp.concatenate([evoformer_masks['msa'], template_masks], axis=0) evoformer_iteration = modules.EvoformerIteration(c.evoformer, gc, is_extra_msa=False, name='evoformer_iteration') def evoformer_fn(x): (act, safe_key) = x (safe_key, safe_subkey) = safe_key.split() evoformer_output = evoformer_iteration(activations=act, masks=evoformer_masks, is_training=is_training, safe_key=safe_subkey) return (evoformer_output, safe_key) if gc.use_remat: evoformer_fn = hk.remat(evoformer_fn) (safe_key, safe_subkey) = safe_key.split() evoformer_stack = layer_stack.layer_stack(c.evoformer_num_block)(evoformer_fn) def run_evoformer(evoformer_input): (evoformer_output, _) = evoformer_stack((evoformer_input, safe_subkey)) return evoformer_output (evoformer_output, _) = evoformer_stack((evoformer_input, safe_subkey)) evoformer_output = evoformer_output msa_activations = evoformer_output['msa'] pair_activations = evoformer_output['pair'] single_activations = common_modules.Linear(c.seq_channel, name='single_activations')(msa_activations[0]) output.update({'single': single_activations, 'pair': pair_activations, 'msa': msa_activations[:num_msa_sequences, :, :], 'msa_first_row': msa_activations[0]}) if not gc.bfloat16_output: for (k, v) in output.items(): if v.dtype == jnp.bfloat16: output[k] = v.astype(jnp.float32) return output
alphafold
positive
def _below_min(spec): """Generates values below spec's min. Args: spec: An instance of `TensorSpec`. Yields: A sequence of tuples of `(value, index)` where `index` is an indexer into `value` where the element has been set below the spec's min. """ if not spec.HasField('min'): return np_type = tensor_utils.data_type_to_np_type(spec.dtype) min_type_value = tensor_spec_utils.np_range_info(np_type).min minimum = tensor_spec_utils.bounds(spec).min for index in np.ndindex(*spec.shape): min_index_value = minimum if np.isscalar(minimum) else minimum[index] if min_type_value < min_index_value: <DeepExtract> if _is_numeric_type(spec.dtype): value = tensor_spec_utils.bounds(spec).min else: value = tensor_utils.data_type_to_np_type(spec.dtype).type() shape = np.asarray(spec.shape) shape[shape < 0] = 1 value = np.full(shape=shape, fill_value=value, dtype=dtype) </DeepExtract> value[index] = min_type_value yield (value, index)
def _below_min(spec): """Generates values below spec's min. Args: spec: An instance of `TensorSpec`. Yields: A sequence of tuples of `(value, index)` where `index` is an indexer into `value` where the element has been set below the spec's min. """ if not spec.HasField('min'): return np_type = tensor_utils.data_type_to_np_type(spec.dtype) min_type_value = tensor_spec_utils.np_range_info(np_type).min minimum = tensor_spec_utils.bounds(spec).min for index in np.ndindex(*spec.shape): min_index_value = minimum if np.isscalar(minimum) else minimum[index] if min_type_value < min_index_value: if _is_numeric_type(spec.dtype): value = tensor_spec_utils.bounds(spec).min else: value = tensor_utils.data_type_to_np_type(spec.dtype).type() shape = np.asarray(spec.shape) shape[shape < 0] = 1 value = np.full(shape=shape, fill_value=value, dtype=dtype) value[index] = min_type_value yield (value, index)
dm_env_rpc
positive
def status_unit(unit): <DeepExtract> conf = self.load_unit_conf(unit) if conf is not None: conf = conf conf = self.default_unit_conf(unit) </DeepExtract> result = '%s - %s' % (unit, self.get_description_from(conf)) loaded = conf.loaded() if loaded: filename = str(conf.filename()) <DeepExtract> unit_file = strE(conf.filename()) if self.is_sysv_file(unit_file): state = self.is_enabled_sysv(unit_file) if state: enabled = 'enabled' enabled = 'disabled' enabled = self.get_enabled_from(conf) </DeepExtract> result += '\n Loaded: {loaded} ({filename}, {enabled})'.format(**locals()) for path in conf.overrides(): result += '\n Drop-In: {path}'.format(**locals()) else: result += '\n Loaded: failed' return (3, result) <DeepExtract> if conf.name().endswith('.service'): active = self.get_active_service_from(conf) elif conf.name().endswith('.socket'): service_unit = self.get_socket_service_from(conf) service_conf = self.load_unit_conf(service_unit) active = self.get_active_service_from(service_conf) elif conf.name().endswith('.target'): active = self.get_active_target_from(conf) else: logg.debug('is-active not implemented for unit type: %s', conf.name()) active = 'unknown' </DeepExtract> <DeepExtract> if not conf: substate = None pid_file = self.pid_file_from(conf) if pid_file: if not os.path.exists(pid_file): substate = 'dead' status_file = self.get_status_file_from(conf) if self.getsize(status_file): state = self.get_status_from(conf, 'ActiveState', '') if state: if state in ['active']: substate = self.get_status_from(conf, 'SubState', 'running') else: substate = self.get_status_from(conf, 'SubState', 'dead') pid = self.read_mainpid_from(conf) if DEBUG_STATUS: logg.debug("pid_file '%s' => PID %s", pid_file or status_file, strE(pid)) if pid: if not pid_exists(pid) or pid_zombie(pid): substate = 'failed' substate = 'running' else: substate = 'dead' </DeepExtract> result += '\n Active: {} ({})'.format(active, substate) if active == 'active': return (0, result) else: return (3, result)
def status_unit(unit): conf = self.load_unit_conf(unit) if conf is not None: conf = conf conf = self.default_unit_conf(unit) result = '%s - %s' % (unit, self.get_description_from(conf)) loaded = conf.loaded() if loaded: filename = str(conf.filename()) unit_file = strE(conf.filename()) if self.is_sysv_file(unit_file): state = self.is_enabled_sysv(unit_file) if state: enabled = 'enabled' enabled = 'disabled' enabled = self.get_enabled_from(conf) result += '\n Loaded: {loaded} ({filename}, {enabled})'.format(**locals()) for path in conf.overrides(): result += '\n Drop-In: {path}'.format(**locals()) else: result += '\n Loaded: failed' return (3, result) if conf.name().endswith('.service'): active = self.get_active_service_from(conf) elif conf.name().endswith('.socket'): service_unit = self.get_socket_service_from(conf) service_conf = self.load_unit_conf(service_unit) active = self.get_active_service_from(service_conf) elif conf.name().endswith('.target'): active = self.get_active_target_from(conf) else: logg.debug('is-active not implemented for unit type: %s', conf.name()) active = 'unknown' if not conf: substate = None pid_file = self.pid_file_from(conf) if pid_file: if not os.path.exists(pid_file): substate = 'dead' status_file = self.get_status_file_from(conf) if self.getsize(status_file): state = self.get_status_from(conf, 'ActiveState', '') if state: if state in ['active']: substate = self.get_status_from(conf, 'SubState', 'running') else: substate = self.get_status_from(conf, 'SubState', 'dead') pid = self.read_mainpid_from(conf) if DEBUG_STATUS: logg.debug("pid_file '%s' => PID %s", pid_file or status_file, strE(pid)) if pid: if not pid_exists(pid) or pid_zombie(pid): substate = 'failed' substate = 'running' else: substate = 'dead' result += '\n Active: {} ({})'.format(active, substate) if active == 'active': return (0, result) else: return (3, result)
deployment
positive
def sample_all_generator(self, batch_size, include_env_infos=False): if not self.can_sample(): return (start_indices, weights, steps, observations_im, observations_vec, goals, actions, rewards, dones, env_infos) = ([], [], [], [], [], [], [], [], [], []) for start_index in range(len(self) - 1): <DeepExtract> start_index = start_index % self._size (start_index + self._N + 1) % self._curr_size = (start_index + self._N + 1) % self._curr_size % self._size if start_index <= (start_index + self._N + 1) % self._curr_size: indices = np.arange(start_index, (start_index + self._N + 1) % self._curr_size) elif start_index > (start_index + self._N + 1) % self._curr_size: indices = np.arange(start_index - self._size, (start_index + self._N + 1) % self._curr_size) </DeepExtract> steps_i = self._steps[indices] <DeepExtract> indices = self._get_indices(start_index - self._obs_history_len + 1, (start_index + 1) % self._size) observations_im = self._observations_im[indices] observations_vec = self._observations_vec[indices] dones = self._dones[indices] encountered_done = False for i in range(len(dones) - 2, -1, -1): encountered_done = encountered_done or dones[i] if encountered_done: observations_im[i, ...] = 0.0 observations_vec[i, ...] = 0.0 (obs_im_i, obs_vec_i) = (observations_im, observations_vec) </DeepExtract> observations_im_i = np.vstack([obs_im_i, self._observations_im[indices[1:]]]) observations_vec_i = np.vstack([obs_vec_i, self._observations_vec[indices[1:]]]) goals_i = self._goals[indices] actions_i = self._actions[indices] rewards_i = self._rewards[indices] dones_i = self._dones[indices] env_infos_i = self._env_infos[indices] if include_env_infos else [None] * len(dones_i) if dones_i[0]: continue start_indices.append(start_index) weights.append(1.0) steps.append(np.expand_dims(steps_i, 0)) observations_im.append(np.expand_dims(observations_im_i, 0)) observations_vec.append(np.expand_dims(observations_vec_i, 0)) goals.append(np.expand_dims(goals_i, 0)) actions.append(np.expand_dims(actions_i, 0)) rewards.append(np.expand_dims(rewards_i, 0)) dones.append(np.expand_dims(dones_i, 0)) env_infos.append(np.expand_dims(env_infos_i, 0)) if len(steps) >= batch_size: yield (np.asarray(start_indices), np.asarray(weights), np.vstack(steps), (np.vstack(observations_im), np.vstack(observations_vec)), np.vstack(goals), np.vstack(actions), np.vstack(rewards), np.vstack(dones), np.vstack(env_infos)) (start_indices, weights, steps, observations_im, observations_vec, goals, actions, rewards, dones, env_infos) = ([], [], [], [], [], [], [], [], [], []) if len(start_indices) > 0: yield (np.asarray(start_indices), np.asarray(weights), np.vstack(steps), (np.vstack(observations_im), np.vstack(observations_vec)), np.vstack(goals), np.vstack(actions), np.vstack(rewards), np.vstack(dones), np.vstack(env_infos))
def sample_all_generator(self, batch_size, include_env_infos=False): if not self.can_sample(): return (start_indices, weights, steps, observations_im, observations_vec, goals, actions, rewards, dones, env_infos) = ([], [], [], [], [], [], [], [], [], []) for start_index in range(len(self) - 1): start_index = start_index % self._size (start_index + self._N + 1) % self._curr_size = (start_index + self._N + 1) % self._curr_size % self._size if start_index <= (start_index + self._N + 1) % self._curr_size: indices = np.arange(start_index, (start_index + self._N + 1) % self._curr_size) elif start_index > (start_index + self._N + 1) % self._curr_size: indices = np.arange(start_index - self._size, (start_index + self._N + 1) % self._curr_size) steps_i = self._steps[indices] indices = self._get_indices(start_index - self._obs_history_len + 1, (start_index + 1) % self._size) observations_im = self._observations_im[indices] observations_vec = self._observations_vec[indices] dones = self._dones[indices] encountered_done = False for i in range(len(dones) - 2, -1, -1): encountered_done = encountered_done or dones[i] if encountered_done: observations_im[i, ...] = 0.0 observations_vec[i, ...] = 0.0 (obs_im_i, obs_vec_i) = (observations_im, observations_vec) observations_im_i = np.vstack([obs_im_i, self._observations_im[indices[1:]]]) observations_vec_i = np.vstack([obs_vec_i, self._observations_vec[indices[1:]]]) goals_i = self._goals[indices] actions_i = self._actions[indices] rewards_i = self._rewards[indices] dones_i = self._dones[indices] env_infos_i = self._env_infos[indices] if include_env_infos else [None] * len(dones_i) if dones_i[0]: continue start_indices.append(start_index) weights.append(1.0) steps.append(np.expand_dims(steps_i, 0)) observations_im.append(np.expand_dims(observations_im_i, 0)) observations_vec.append(np.expand_dims(observations_vec_i, 0)) goals.append(np.expand_dims(goals_i, 0)) actions.append(np.expand_dims(actions_i, 0)) rewards.append(np.expand_dims(rewards_i, 0)) dones.append(np.expand_dims(dones_i, 0)) env_infos.append(np.expand_dims(env_infos_i, 0)) if len(steps) >= batch_size: yield (np.asarray(start_indices), np.asarray(weights), np.vstack(steps), (np.vstack(observations_im), np.vstack(observations_vec)), np.vstack(goals), np.vstack(actions), np.vstack(rewards), np.vstack(dones), np.vstack(env_infos)) (start_indices, weights, steps, observations_im, observations_vec, goals, actions, rewards, dones, env_infos) = ([], [], [], [], [], [], [], [], [], []) if len(start_indices) > 0: yield (np.asarray(start_indices), np.asarray(weights), np.vstack(steps), (np.vstack(observations_im), np.vstack(observations_vec)), np.vstack(goals), np.vstack(actions), np.vstack(rewards), np.vstack(dones), np.vstack(env_infos))
CAPs
positive
def __init__(self, redis_connector, host): <DeepExtract> bp = Blueprint('api', __name__) def publish_message(job, request_method, body=None, target='all'): message_uuid = str(uuid.uuid4()) formatted_message = json.dumps({'job': job, 'target': target, 'body': body, 'request_method': request_method, 'message_uuid': message_uuid}) self.redis_connector.message_connection.publish(self.redis_connector.servers_channel, formatted_message) self.logger.info('Published {}'.format(formatted_message)) self.bp = message_uuid def gather_response(target_job, message_uuid, response_number_target=0): if not response_number_target: response_number_target = int(self.redis_connector.message_connection.pubsub_numsub(self.redis_connector.servers_channel)[0][1]) if target_job == 'bundle': countdown = 120 elif target_job == 'status': countdown = 15 else: countdown = 5 for i in range(0, int(countdown / self.interval)): response_num = len(list(self.server_responses.get(message_uuid, {}).keys())) if response_num == response_number_target: break else: time.sleep(self.interval) message = self.redis_connector.pubsub.get_message() if message and type(message.get('data')) == bytes: server_response = json.loads(message.get('data')) self.logger.info(server_response) response_message_uuid = server_response.get('message_uuid') if response_message_uuid: if response_message_uuid not in self.server_responses: self.server_responses[response_message_uuid] = {} self.server_responses[response_message_uuid][server_response['host']] = server_response['response'] self.bp = self.server_responses.get(message_uuid, {}) @bp.route('/index', methods=['GET']) def index(): home_page = '*** Eventgen Controller ***\nHost: {0}\nConnected Servers: {1}\nYou are running Eventgen Controller.\n' host = self.host self.bp = home_page.format(host, self.redis_connector.get_registered_servers()) @bp.route('/status', methods=['GET'], defaults={'target': 'all'}) @bp.route('/status/<string:target>', methods=['GET']) def http_status(target): try: message_uuid = publish_message('status', request.method, target=target) self.bp = Response(json.dumps(gather_response('status', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/conf', methods=['GET', 'POST', 'PUT'], defaults={'target': 'all'}) @bp.route('/conf/<string:target>', methods=['GET', 'POST', 'PUT']) def http_conf(target): try: body = None if request.method == 'GET' else request.get_json(force=True) message_uuid = publish_message('conf', request.method, body=body, target=target) self.bp = Response(json.dumps(gather_response('conf', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/bundle', methods=['POST'], defaults={'target': 'all'}) @bp.route('/bundle/<string:target>', methods=['POST']) def http_bundle(target): try: message_uuid = publish_message('bundle', request.method, body=request.get_json(force=True), target=target) self.bp = Response(json.dumps(gather_response('bundle', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/setup', methods=['POST'], defaults={'target': 'all'}) @bp.route('/setup/<string:target>', methods=['POST']) def http_setup(target): try: message_uuid = publish_message('setup', request.method, body=request.get_json(force=True), target=target) self.bp = Response(json.dumps(gather_response('setup', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/volume', methods=['GET', 'POST'], defaults={'target': 'all'}) @bp.route('/volume/<string:target>', methods=['GET', 'POST']) def http_volume(target): try: body = None if request.method == 'GET' else request.get_json(force=True) message_uuid = publish_message('volume', request.method, body=body, target=target) self.bp = Response(json.dumps(gather_response('volume', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/start', methods=['POST'], defaults={'target': 'all'}) @bp.route('/start/<string:target>', methods=['POST']) def http_start(target): try: message_uuid = publish_message('start', request.method, target=target) self.bp = Response(json.dumps(gather_response('start', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/stop', methods=['POST'], defaults={'target': 'all'}) @bp.route('/stop/<string:target>', methods=['POST']) def http_stop(target): try: message_uuid = publish_message('stop', request.method, target=target) self.bp = Response(json.dumps(gather_response('stop', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/restart', methods=['POST'], defaults={'target': 'all'}) @bp.route('/restart/<string:target>', methods=['POST']) def http_restart(target): try: message_uuid = publish_message('restart', request.method, target=target) self.bp = Response(json.dumps(gather_response('restart', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/reset', methods=['POST'], defaults={'target': 'all'}) @bp.route('/reset/<string:target>', methods=['POST']) def http_reset(target): try: message_uuid = publish_message('reset', request.method, target=target) self.bp = Response(json.dumps(gather_response('reset', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/healthcheck', methods=['GET'], defaults={'target': 'all'}) @bp.route('/healthcheck/<string:target>', methods=['GET']) def http_healthcheck(target): try: self.redis_connector.pubsub.check_health() except Exception as e: self.logger.info('Connection to Redis failed: {}, re-registering'.format(str(e))) try: self.redis_connector.register_myself(hostname=self.host, role='controller') except Exception as connection_error: self.logger.error(connection_error) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) try: message_uuid = publish_message('healthcheck', request.method, target=target) self.bp = Response(json.dumps(gather_response('healthcheck', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) self.bp = bp </DeepExtract> self.redis_connector = redis_connector self.host = host self.logger = logging.getLogger('eventgen_controller') self.logger.info('Initialized the EventgenControllerAPI Blueprint') self.interval = 0.001 self.server_responses = {}
def __init__(self, redis_connector, host): bp = Blueprint('api', __name__) def publish_message(job, request_method, body=None, target='all'): message_uuid = str(uuid.uuid4()) formatted_message = json.dumps({'job': job, 'target': target, 'body': body, 'request_method': request_method, 'message_uuid': message_uuid}) self.redis_connector.message_connection.publish(self.redis_connector.servers_channel, formatted_message) self.logger.info('Published {}'.format(formatted_message)) self.bp = message_uuid def gather_response(target_job, message_uuid, response_number_target=0): if not response_number_target: response_number_target = int(self.redis_connector.message_connection.pubsub_numsub(self.redis_connector.servers_channel)[0][1]) if target_job == 'bundle': countdown = 120 elif target_job == 'status': countdown = 15 else: countdown = 5 for i in range(0, int(countdown / self.interval)): response_num = len(list(self.server_responses.get(message_uuid, {}).keys())) if response_num == response_number_target: break else: time.sleep(self.interval) message = self.redis_connector.pubsub.get_message() if message and type(message.get('data')) == bytes: server_response = json.loads(message.get('data')) self.logger.info(server_response) response_message_uuid = server_response.get('message_uuid') if response_message_uuid: if response_message_uuid not in self.server_responses: self.server_responses[response_message_uuid] = {} self.server_responses[response_message_uuid][server_response['host']] = server_response['response'] self.bp = self.server_responses.get(message_uuid, {}) @bp.route('/index', methods=['GET']) def index(): home_page = '*** Eventgen Controller ***\nHost: {0}\nConnected Servers: {1}\nYou are running Eventgen Controller.\n' host = self.host self.bp = home_page.format(host, self.redis_connector.get_registered_servers()) @bp.route('/status', methods=['GET'], defaults={'target': 'all'}) @bp.route('/status/<string:target>', methods=['GET']) def http_status(target): try: message_uuid = publish_message('status', request.method, target=target) self.bp = Response(json.dumps(gather_response('status', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/conf', methods=['GET', 'POST', 'PUT'], defaults={'target': 'all'}) @bp.route('/conf/<string:target>', methods=['GET', 'POST', 'PUT']) def http_conf(target): try: body = None if request.method == 'GET' else request.get_json(force=True) message_uuid = publish_message('conf', request.method, body=body, target=target) self.bp = Response(json.dumps(gather_response('conf', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/bundle', methods=['POST'], defaults={'target': 'all'}) @bp.route('/bundle/<string:target>', methods=['POST']) def http_bundle(target): try: message_uuid = publish_message('bundle', request.method, body=request.get_json(force=True), target=target) self.bp = Response(json.dumps(gather_response('bundle', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/setup', methods=['POST'], defaults={'target': 'all'}) @bp.route('/setup/<string:target>', methods=['POST']) def http_setup(target): try: message_uuid = publish_message('setup', request.method, body=request.get_json(force=True), target=target) self.bp = Response(json.dumps(gather_response('setup', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/volume', methods=['GET', 'POST'], defaults={'target': 'all'}) @bp.route('/volume/<string:target>', methods=['GET', 'POST']) def http_volume(target): try: body = None if request.method == 'GET' else request.get_json(force=True) message_uuid = publish_message('volume', request.method, body=body, target=target) self.bp = Response(json.dumps(gather_response('volume', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/start', methods=['POST'], defaults={'target': 'all'}) @bp.route('/start/<string:target>', methods=['POST']) def http_start(target): try: message_uuid = publish_message('start', request.method, target=target) self.bp = Response(json.dumps(gather_response('start', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/stop', methods=['POST'], defaults={'target': 'all'}) @bp.route('/stop/<string:target>', methods=['POST']) def http_stop(target): try: message_uuid = publish_message('stop', request.method, target=target) self.bp = Response(json.dumps(gather_response('stop', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/restart', methods=['POST'], defaults={'target': 'all'}) @bp.route('/restart/<string:target>', methods=['POST']) def http_restart(target): try: message_uuid = publish_message('restart', request.method, target=target) self.bp = Response(json.dumps(gather_response('restart', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/reset', methods=['POST'], defaults={'target': 'all'}) @bp.route('/reset/<string:target>', methods=['POST']) def http_reset(target): try: message_uuid = publish_message('reset', request.method, target=target) self.bp = Response(json.dumps(gather_response('reset', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) @bp.route('/healthcheck', methods=['GET'], defaults={'target': 'all'}) @bp.route('/healthcheck/<string:target>', methods=['GET']) def http_healthcheck(target): try: self.redis_connector.pubsub.check_health() except Exception as e: self.logger.info('Connection to Redis failed: {}, re-registering'.format(str(e))) try: self.redis_connector.register_myself(hostname=self.host, role='controller') except Exception as connection_error: self.logger.error(connection_error) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) try: message_uuid = publish_message('healthcheck', request.method, target=target) self.bp = Response(json.dumps(gather_response('healthcheck', message_uuid=message_uuid, response_number_target=0 if target == 'all' else 1)), mimetype='application/json', status=200) except Exception as e: self.logger.error(e) self.bp = Response(INTERNAL_ERROR_RESPONSE, mimetype='application/json', status=500) self.bp = bp self.redis_connector = redis_connector self.host = host self.logger = logging.getLogger('eventgen_controller') self.logger.info('Initialized the EventgenControllerAPI Blueprint') self.interval = 0.001 self.server_responses = {}
eventgen
positive
def next(self): <DeepExtract> lexpos = self.lexpos lexlen = self.lexlen lexignore = self.lexignore lexdata = self.lexdata while lexpos < lexlen: if lexdata[lexpos] in lexignore: lexpos += 1 continue for (lexre, lexindexfunc) in self.lexre: m = lexre.match(lexdata, lexpos) if not m: continue tok = LexToken() tok.value = m.group() tok.lineno = self.lineno tok.lexpos = lexpos i = m.lastindex (func, tok.type) = lexindexfunc[i] if not func: if tok.type: self.lexpos = m.end() t = tok else: lexpos = m.end() break lexpos = m.end() tok.lexer = self self.lexmatch = m self.lexpos = lexpos newtok = func(tok) if not newtok: lexpos = self.lexpos lexignore = self.lexignore break if not self.lexoptimize: if newtok.type not in self.lextokens_all: raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (func.__code__.co_filename, func.__code__.co_firstlineno, func.__name__, newtok.type), lexdata[lexpos:]) t = newtok else: if lexdata[lexpos] in self.lexliterals: tok = LexToken() tok.value = lexdata[lexpos] tok.lineno = self.lineno tok.type = tok.value tok.lexpos = lexpos self.lexpos = lexpos + 1 t = tok if self.lexerrorf: tok = LexToken() tok.value = self.lexdata[lexpos:] tok.lineno = self.lineno tok.type = 'error' tok.lexer = self tok.lexpos = lexpos self.lexpos = lexpos newtok = self.lexerrorf(tok) if lexpos == self.lexpos: raise LexError("Scanning error. Illegal character '%s'" % lexdata[lexpos], lexdata[lexpos:]) lexpos = self.lexpos if not newtok: continue t = newtok self.lexpos = lexpos raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:]) if self.lexeoff: tok = LexToken() tok.type = 'eof' tok.value = '' tok.lineno = self.lineno tok.lexpos = lexpos tok.lexer = self self.lexpos = lexpos newtok = self.lexeoff(tok) t = newtok self.lexpos = lexpos + 1 if self.lexdata is None: raise RuntimeError('No input string given with input()') t = None </DeepExtract> if t is None: raise StopIteration return t
def next(self): lexpos = self.lexpos lexlen = self.lexlen lexignore = self.lexignore lexdata = self.lexdata while lexpos < lexlen: if lexdata[lexpos] in lexignore: lexpos += 1 continue for (lexre, lexindexfunc) in self.lexre: m = lexre.match(lexdata, lexpos) if not m: continue tok = LexToken() tok.value = m.group() tok.lineno = self.lineno tok.lexpos = lexpos i = m.lastindex (func, tok.type) = lexindexfunc[i] if not func: if tok.type: self.lexpos = m.end() t = tok else: lexpos = m.end() break lexpos = m.end() tok.lexer = self self.lexmatch = m self.lexpos = lexpos newtok = func(tok) if not newtok: lexpos = self.lexpos lexignore = self.lexignore break if not self.lexoptimize: if newtok.type not in self.lextokens_all: raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (func.__code__.co_filename, func.__code__.co_firstlineno, func.__name__, newtok.type), lexdata[lexpos:]) t = newtok else: if lexdata[lexpos] in self.lexliterals: tok = LexToken() tok.value = lexdata[lexpos] tok.lineno = self.lineno tok.type = tok.value tok.lexpos = lexpos self.lexpos = lexpos + 1 t = tok if self.lexerrorf: tok = LexToken() tok.value = self.lexdata[lexpos:] tok.lineno = self.lineno tok.type = 'error' tok.lexer = self tok.lexpos = lexpos self.lexpos = lexpos newtok = self.lexerrorf(tok) if lexpos == self.lexpos: raise LexError("Scanning error. Illegal character '%s'" % lexdata[lexpos], lexdata[lexpos:]) lexpos = self.lexpos if not newtok: continue t = newtok self.lexpos = lexpos raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:]) if self.lexeoff: tok = LexToken() tok.type = 'eof' tok.value = '' tok.lineno = self.lineno tok.lexpos = lexpos tok.lexer = self self.lexpos = lexpos newtok = self.lexeoff(tok) t = newtok self.lexpos = lexpos + 1 if self.lexdata is None: raise RuntimeError('No input string given with input()') t = None if t is None: raise StopIteration return t
booleannet
positive
def test_bad_sample_good_sample(): """One bad sample should not prevent good samples from being processed.""" <DeepExtract> _dir = os.path.dirname(os.path.abspath(__file__)) _good_tflite_model = os.path.join(_dir, 'mobilenet_ssd_v2_coco_quant_postprocess.tflite') _good_edgetpu_model = os.path.join(_dir, 'mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite') _good_labels = os.path.join(_dir, 'coco_labels.txt') config = {'model': {'tflite': _good_tflite_model, 'edgetpu': _good_edgetpu_model}, 'labels': _good_labels, 'top_k': 3, 'confidence_threshold': 0.8} config = config </DeepExtract> result = 'nothing passed to me' def sample_callback(image=None, inference_result=None, **kwargs): nonlocal result result = inference_result object_detector = ObjectDetector(**config) output = _OutPipeElement(sample_callback=sample_callback) object_detector.connect_to_next_element(output) object_detector.receive_next_sample(image=None) assert result == 'nothing passed to me' <DeepExtract> assert 'person.jpg' _dir = os.path.dirname(os.path.abspath(__file__)) image_file = os.path.join(_dir, 'person.jpg') img = Image.open(image_file) img = img </DeepExtract> object_detector.receive_next_sample(image=img) assert result assert len(result) == 1 category = result[0]['label'] confidence = result[0]['confidence'] (x0, y0) = (result[0]['box']['xmin'], result[0]['box']['ymin']) (x1, y1) = (result[0]['box']['xmax'], result[0]['box']['ymax']) assert category == 'person' assert confidence > 0.9 assert x0 > 0 and x0 < x1 assert y0 > 0 and y0 < y1
def test_bad_sample_good_sample(): """One bad sample should not prevent good samples from being processed.""" _dir = os.path.dirname(os.path.abspath(__file__)) _good_tflite_model = os.path.join(_dir, 'mobilenet_ssd_v2_coco_quant_postprocess.tflite') _good_edgetpu_model = os.path.join(_dir, 'mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite') _good_labels = os.path.join(_dir, 'coco_labels.txt') config = {'model': {'tflite': _good_tflite_model, 'edgetpu': _good_edgetpu_model}, 'labels': _good_labels, 'top_k': 3, 'confidence_threshold': 0.8} config = config result = 'nothing passed to me' def sample_callback(image=None, inference_result=None, **kwargs): nonlocal result result = inference_result object_detector = ObjectDetector(**config) output = _OutPipeElement(sample_callback=sample_callback) object_detector.connect_to_next_element(output) object_detector.receive_next_sample(image=None) assert result == 'nothing passed to me' assert 'person.jpg' _dir = os.path.dirname(os.path.abspath(__file__)) image_file = os.path.join(_dir, 'person.jpg') img = Image.open(image_file) img = img object_detector.receive_next_sample(image=img) assert result assert len(result) == 1 category = result[0]['label'] confidence = result[0]['confidence'] (x0, y0) = (result[0]['box']['xmin'], result[0]['box']['ymin']) (x1, y1) = (result[0]['box']['xmax'], result[0]['box']['ymax']) assert category == 'person' assert confidence > 0.9 assert x0 > 0 and x0 < x1 assert y0 > 0 and y0 < y1
ambianic-edge
positive
def vecs_robust_normalize(v: Vecs, epsilon: float=1e-08) -> Vecs: """Normalizes vectors 'v'. Args: v: vectors to be normalized. epsilon: small regularizer added to squared norm before taking square root. Returns: normalized vectors """ <DeepExtract> norms = jnp.sqrt(jnp.square(v.x) + jnp.square(v.y) + jnp.square(v.z) + epsilon) </DeepExtract> return Vecs(v.x / norms, v.y / norms, v.z / norms)
def vecs_robust_normalize(v: Vecs, epsilon: float=1e-08) -> Vecs: """Normalizes vectors 'v'. Args: v: vectors to be normalized. epsilon: small regularizer added to squared norm before taking square root. Returns: normalized vectors """ norms = jnp.sqrt(jnp.square(v.x) + jnp.square(v.y) + jnp.square(v.z) + epsilon) return Vecs(v.x / norms, v.y / norms, v.z / norms)
alphafold
positive
def test_random_rotate(self, time, max_cells, crop_size, batch_size, seed, track_length, val_size, test_size): <DeepExtract> X = {} y = {} X['appearances'] = tf.random.uniform([time, max_cells, crop_size, crop_size, 1], 0, 1) X['centroids'] = tf.random.uniform([time, max_cells, 2], 0, 512) X['morphologies'] = tf.sparse.from_dense(tf.random.uniform([time, max_cells, 3], 0, 1)) X['adj_matrices'] = tf.sparse.from_dense(tf.random.uniform([time, max_cells, max_cells], 0, 1)) y['temporal_adj_matrices'] = tf.sparse.from_dense(tf.random.uniform([time - 1, max_cells, max_cells, 3], 0, 1)) (X, y) = (X, y) </DeepExtract> X_apps = X['appearances'] X_cents = X['centroids'] (rotated_X, y) = tracking.random_rotate(X, y, 180) self.assertEqual(rotated_X['appearances'].shape, X_apps.shape) self.assertEqual(rotated_X['centroids'].shape, X_cents.shape) X_apps = rotated_X['appearances'] X_cents = rotated_X['centroids'] (output_X, y) = tracking.random_rotate(rotated_X, y, 0) self.assertAllEqual(output_X['appearances'], X_apps) self.assertAllEqual(output_X['centroids'], X_cents)
def test_random_rotate(self, time, max_cells, crop_size, batch_size, seed, track_length, val_size, test_size): X = {} y = {} X['appearances'] = tf.random.uniform([time, max_cells, crop_size, crop_size, 1], 0, 1) X['centroids'] = tf.random.uniform([time, max_cells, 2], 0, 512) X['morphologies'] = tf.sparse.from_dense(tf.random.uniform([time, max_cells, 3], 0, 1)) X['adj_matrices'] = tf.sparse.from_dense(tf.random.uniform([time, max_cells, max_cells], 0, 1)) y['temporal_adj_matrices'] = tf.sparse.from_dense(tf.random.uniform([time - 1, max_cells, max_cells, 3], 0, 1)) (X, y) = (X, y) X_apps = X['appearances'] X_cents = X['centroids'] (rotated_X, y) = tracking.random_rotate(X, y, 180) self.assertEqual(rotated_X['appearances'].shape, X_apps.shape) self.assertEqual(rotated_X['centroids'].shape, X_cents.shape) X_apps = rotated_X['appearances'] X_cents = rotated_X['centroids'] (output_X, y) = tracking.random_rotate(rotated_X, y, 0) self.assertAllEqual(output_X['appearances'], X_apps) self.assertAllEqual(output_X['centroids'], X_cents)
deepcell-tf
positive
def _get_value(self): if self._value is NotImplemented: <DeepExtract> self.stream.seek(self.pos) self._value = self.subcon._parse(self.stream, self.context) </DeepExtract> return self._value
def _get_value(self): if self._value is NotImplemented: self.stream.seek(self.pos) self._value = self.subcon._parse(self.stream, self.context) return self._value
ARMV8_Simulator
positive
def check_update(repo, current_version, prereleases=False, alfred_version=None): """Check whether a newer release is available on GitHub. Args: repo (unicode): ``username/repo`` for workflow's GitHub repo current_version (unicode): the currently installed version of the workflow. :ref:`Semantic versioning <semver>` is required. prereleases (bool): Whether to include pre-releases. alfred_version (unicode): version of currently-running Alfred. if empty, defaults to ``$alfred_version`` environment variable. Returns: bool: ``True`` if an update is available, else ``False`` If an update is available, its version number and download URL will be cached. """ key = '__workflow_latest_version' no_update = {'available': False, 'download': None, 'version': None} current = Version(current_version) <DeepExtract> url = build_api_url(repo) def _fetch(): wf().logger.info('retrieving releases for %r ...', repo) r = web.get(url) r.raise_for_status() dls = r.content key = 'github-releases-' + repo.replace('/', '-') js = wf().cached_data(key, _fetch, max_age=60) dls = Download.from_releases(js) </DeepExtract> if not len(dls): wf().logger.warning('no valid downloads for %s', repo) wf().cache_data(key, no_update) return False wf().logger.info('%d download(s) for %s', len(dls), repo) <DeepExtract> alfred_version = alfred_version or os.getenv('alfred_version') version = None if alfred_version: version = Version(alfred_version) dls.sort(reverse=True) for dl in dls: if dl.prerelease and (not prereleases): wf().logger.debug('ignored prerelease: %s', dl.version) continue if version and dl.alfred_version > version: wf().logger.debug('ignored incompatible (%s > %s): %s', dl.alfred_version, version, dl.filename) continue wf().logger.debug('latest version: %s (%s)', dl.version, dl.filename) dl = dl dl = None </DeepExtract> if not dl: wf().logger.warning('no compatible downloads for %s', repo) wf().cache_data(key, no_update) return False wf().logger.debug('latest=%r, installed=%r', dl.version, current) if dl.version > current: wf().cache_data(key, {'version': str(dl.version), 'download': dl.dict, 'available': True}) return True wf().cache_data(key, no_update) return False
def check_update(repo, current_version, prereleases=False, alfred_version=None): """Check whether a newer release is available on GitHub. Args: repo (unicode): ``username/repo`` for workflow's GitHub repo current_version (unicode): the currently installed version of the workflow. :ref:`Semantic versioning <semver>` is required. prereleases (bool): Whether to include pre-releases. alfred_version (unicode): version of currently-running Alfred. if empty, defaults to ``$alfred_version`` environment variable. Returns: bool: ``True`` if an update is available, else ``False`` If an update is available, its version number and download URL will be cached. """ key = '__workflow_latest_version' no_update = {'available': False, 'download': None, 'version': None} current = Version(current_version) url = build_api_url(repo) def _fetch(): wf().logger.info('retrieving releases for %r ...', repo) r = web.get(url) r.raise_for_status() dls = r.content key = 'github-releases-' + repo.replace('/', '-') js = wf().cached_data(key, _fetch, max_age=60) dls = Download.from_releases(js) if not len(dls): wf().logger.warning('no valid downloads for %s', repo) wf().cache_data(key, no_update) return False wf().logger.info('%d download(s) for %s', len(dls), repo) alfred_version = alfred_version or os.getenv('alfred_version') version = None if alfred_version: version = Version(alfred_version) dls.sort(reverse=True) for dl in dls: if dl.prerelease and (not prereleases): wf().logger.debug('ignored prerelease: %s', dl.version) continue if version and dl.alfred_version > version: wf().logger.debug('ignored incompatible (%s > %s): %s', dl.alfred_version, version, dl.filename) continue wf().logger.debug('latest version: %s (%s)', dl.version, dl.filename) dl = dl dl = None if not dl: wf().logger.warning('no compatible downloads for %s', repo) wf().cache_data(key, no_update) return False wf().logger.debug('latest=%r, installed=%r', dl.version, current) if dl.version > current: wf().cache_data(key, {'version': str(dl.version), 'download': dl.dict, 'available': True}) return True wf().cache_data(key, no_update) return False
alfred-pocket
positive
def compute_changed_segments(name: str, src_segments: List[Segment], dst_segments: List[Segment], src_duration: float, dst_duration: float, speed_factor: float, **kwargs) -> Tuple[List[Segment], List[Segment]]: """ This function performs the logic of computing the new matching segments based on the old ones, for the set of transforms that temporally change the video. Returns the lists of new src segments & dst segments, respectively. """ (new_src_segments, new_dst_segments) = ([], []) <DeepExtract> transition = kwargs.get('transition') if transition: td = transition.duration td = 0.0 </DeepExtract> for (src_segment, dst_segment) in zip(src_segments, dst_segments): if name == 'insert_in_background': offset = kwargs['offset_factor'] * kwargs['background_video_duration'] transition_before = int(kwargs['transition_before']) transition_after = int(kwargs['transition_after']) new_src_segments.append(src_segment.delta(transition_before * td / 2, -transition_after * td / 2)) new_dst_segments.append(Segment(dst_segment.start + offset - transition_before * td / 2, dst_segment.end + offset - transition_before * td - transition_after * td / 2)) elif name == 'insert_in_background_multiple': <DeepExtract> n = len(kwargs['src_segment_starts']) assert n == len(kwargs['src_segment_ends']), 'Source segment starts and ends lists must have equal length.' assert n == len(kwargs['bkg_insertion_points']), 'Source segment starts and background insertion points lists must have equal length.' assert n == len(kwargs['src_ids']), 'Source segment starts and source ids lists must have equal length.' if n == 0: return dst_cum_dur = 0.0 offset = td / 2.0 prev_bkg = 0.0 for (src_start, src_end, src_id, bkg_pt) in zip(kwargs['src_segment_starts'], kwargs['src_segment_ends'], kwargs['src_ids'], kwargs['bkg_insertion_points']): crop_start = src_start + offset crop_end = src_end - offset dst_start = dst_cum_dur + (bkg_pt - prev_bkg) - offset src_segment = Segment(start=crop_start, end=crop_end, src_id=src_id) dst_segment = Segment(start=dst_start, end=dst_start + (crop_end - crop_start)) new_src_segments.append(src_segment) new_dst_segments.append(dst_segment) dst_cum_dur = dst_segment.end - offset prev_bkg = bkg_pt </DeepExtract> elif name == 'replace_with_background': clip_start = kwargs['starting_background_duration'] duration = kwargs['source_duration'] <DeepExtract> if clip_start >= dst_segment.end or clip_start + duration <= dst_segment.start: return new_start = (dst_segment.start - clip_start) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - clip_start, clip_start + duration - clip_start) if clip_start + duration < dst_segment.end: src_end = src_segment.end - (dst_segment.end - clip_start + duration) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + clip_start, new_end + clip_start)) </DeepExtract> elif name == 'change_video_speed': crt_factor = kwargs['factor'] global_factor = crt_factor * speed_factor new_src_segments.append(src_segment) new_dst_segments.append(Segment(dst_segment.start / global_factor, dst_segment.end / global_factor)) elif name == 'concat': src_index = kwargs['src_video_path_index'] num_videos = len(kwargs['video_paths']) transition_offset_start = td / 2 if src_index > 0 else 0.0 transition_offset_end = td / 2 if src_index < num_videos - 1 else 0.0 new_src_segments.append(src_segment.delta(transition_offset_start, -transition_offset_end)) offset = sum((float(helpers.get_video_info(vp)['duration']) - td for vp in kwargs['video_paths'][:kwargs['src_video_path_index']])) new_dst_segments.append(Segment(dst_segment.start + offset + transition_offset_start, dst_segment.end + offset - transition_offset_end)) elif name == 'loop': new_src_segments.append(src_segment) new_dst_segments.append(dst_segment) for l_idx in range(kwargs['num_loops']): new_src_segments.append(src_segment) new_dst_segments.append(Segment(dst_segment.start + (l_idx + 1) * src_duration, dst_segment.end + (l_idx + 1) * src_duration)) elif name == 'time_crop': crop_start = kwargs['offset_factor'] * src_duration crop_end = crop_start + kwargs['duration_factor'] * src_duration <DeepExtract> if crop_start >= dst_segment.end or crop_end <= dst_segment.start: return new_start = (dst_segment.start - crop_start) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - crop_start, crop_end - crop_start) if crop_end < dst_segment.end: src_end = src_segment.end - (dst_segment.end - crop_end) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) </DeepExtract> elif name == 'time_decimate': <DeepExtract> start_offset = src_duration * kwargs['start_offset_factor'] on_segment = src_duration * kwargs['on_factor'] off_segment = on_segment * kwargs['off_factor'] n = int((src_duration - start_offset) / (on_segment + off_segment)) dst_offset = 0 for i in range(n): crop_start = start_offset + i * on_segment + i * off_segment + (i > 0) * td / 2.0 crop_end = start_offset + (i + 1) * on_segment + i * off_segment - (i < n - 1) * td / 2 crop_end = min(src_duration, crop_end) if crop_start > src_duration: break compute_time_crop_segments(src_segment, dst_segment, speed_factor, crop_start, crop_end, new_src_segments, new_dst_segments, end_dst_offset=dst_offset) dst_offset = new_dst_segments[-1].end </DeepExtract> elif name == 'trim': crop_start = kwargs['start'] or 0.0 crop_end = kwargs['end'] or src_duration <DeepExtract> if crop_start >= dst_segment.end or crop_end <= dst_segment.start: return new_start = (dst_segment.start - crop_start) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - crop_start, crop_end - crop_start) if crop_end < dst_segment.end: src_end = src_segment.end - (dst_segment.end - crop_end) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) </DeepExtract> elif name == 'replace_with_color_frames': offset = kwargs['offset_factor'] * src_duration duration = kwargs['duration_factor'] * src_duration <DeepExtract> if 0.0 >= dst_segment.end or offset <= dst_segment.start: return new_start = (dst_segment.start - 0.0) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - 0.0, offset - 0.0) if offset < dst_segment.end: src_end = src_segment.end - (dst_segment.end - offset) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) </DeepExtract> <DeepExtract> if offset + duration >= dst_segment.end or dst_duration <= dst_segment.start: return new_start = (dst_segment.start - offset + duration) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - offset + duration, dst_duration - offset + duration) if dst_duration < dst_segment.end: src_end = src_segment.end - (dst_segment.end - dst_duration) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) </DeepExtract> return (new_src_segments, new_dst_segments)
def compute_changed_segments(name: str, src_segments: List[Segment], dst_segments: List[Segment], src_duration: float, dst_duration: float, speed_factor: float, **kwargs) -> Tuple[List[Segment], List[Segment]]: """ This function performs the logic of computing the new matching segments based on the old ones, for the set of transforms that temporally change the video. Returns the lists of new src segments & dst segments, respectively. """ (new_src_segments, new_dst_segments) = ([], []) transition = kwargs.get('transition') if transition: td = transition.duration td = 0.0 for (src_segment, dst_segment) in zip(src_segments, dst_segments): if name == 'insert_in_background': offset = kwargs['offset_factor'] * kwargs['background_video_duration'] transition_before = int(kwargs['transition_before']) transition_after = int(kwargs['transition_after']) new_src_segments.append(src_segment.delta(transition_before * td / 2, -transition_after * td / 2)) new_dst_segments.append(Segment(dst_segment.start + offset - transition_before * td / 2, dst_segment.end + offset - transition_before * td - transition_after * td / 2)) elif name == 'insert_in_background_multiple': n = len(kwargs['src_segment_starts']) assert n == len(kwargs['src_segment_ends']), 'Source segment starts and ends lists must have equal length.' assert n == len(kwargs['bkg_insertion_points']), 'Source segment starts and background insertion points lists must have equal length.' assert n == len(kwargs['src_ids']), 'Source segment starts and source ids lists must have equal length.' if n == 0: return dst_cum_dur = 0.0 offset = td / 2.0 prev_bkg = 0.0 for (src_start, src_end, src_id, bkg_pt) in zip(kwargs['src_segment_starts'], kwargs['src_segment_ends'], kwargs['src_ids'], kwargs['bkg_insertion_points']): crop_start = src_start + offset crop_end = src_end - offset dst_start = dst_cum_dur + (bkg_pt - prev_bkg) - offset src_segment = Segment(start=crop_start, end=crop_end, src_id=src_id) dst_segment = Segment(start=dst_start, end=dst_start + (crop_end - crop_start)) new_src_segments.append(src_segment) new_dst_segments.append(dst_segment) dst_cum_dur = dst_segment.end - offset prev_bkg = bkg_pt elif name == 'replace_with_background': clip_start = kwargs['starting_background_duration'] duration = kwargs['source_duration'] if clip_start >= dst_segment.end or clip_start + duration <= dst_segment.start: return new_start = (dst_segment.start - clip_start) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - clip_start, clip_start + duration - clip_start) if clip_start + duration < dst_segment.end: src_end = src_segment.end - (dst_segment.end - clip_start + duration) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + clip_start, new_end + clip_start)) elif name == 'change_video_speed': crt_factor = kwargs['factor'] global_factor = crt_factor * speed_factor new_src_segments.append(src_segment) new_dst_segments.append(Segment(dst_segment.start / global_factor, dst_segment.end / global_factor)) elif name == 'concat': src_index = kwargs['src_video_path_index'] num_videos = len(kwargs['video_paths']) transition_offset_start = td / 2 if src_index > 0 else 0.0 transition_offset_end = td / 2 if src_index < num_videos - 1 else 0.0 new_src_segments.append(src_segment.delta(transition_offset_start, -transition_offset_end)) offset = sum((float(helpers.get_video_info(vp)['duration']) - td for vp in kwargs['video_paths'][:kwargs['src_video_path_index']])) new_dst_segments.append(Segment(dst_segment.start + offset + transition_offset_start, dst_segment.end + offset - transition_offset_end)) elif name == 'loop': new_src_segments.append(src_segment) new_dst_segments.append(dst_segment) for l_idx in range(kwargs['num_loops']): new_src_segments.append(src_segment) new_dst_segments.append(Segment(dst_segment.start + (l_idx + 1) * src_duration, dst_segment.end + (l_idx + 1) * src_duration)) elif name == 'time_crop': crop_start = kwargs['offset_factor'] * src_duration crop_end = crop_start + kwargs['duration_factor'] * src_duration if crop_start >= dst_segment.end or crop_end <= dst_segment.start: return new_start = (dst_segment.start - crop_start) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - crop_start, crop_end - crop_start) if crop_end < dst_segment.end: src_end = src_segment.end - (dst_segment.end - crop_end) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) elif name == 'time_decimate': start_offset = src_duration * kwargs['start_offset_factor'] on_segment = src_duration * kwargs['on_factor'] off_segment = on_segment * kwargs['off_factor'] n = int((src_duration - start_offset) / (on_segment + off_segment)) dst_offset = 0 for i in range(n): crop_start = start_offset + i * on_segment + i * off_segment + (i > 0) * td / 2.0 crop_end = start_offset + (i + 1) * on_segment + i * off_segment - (i < n - 1) * td / 2 crop_end = min(src_duration, crop_end) if crop_start > src_duration: break compute_time_crop_segments(src_segment, dst_segment, speed_factor, crop_start, crop_end, new_src_segments, new_dst_segments, end_dst_offset=dst_offset) dst_offset = new_dst_segments[-1].end elif name == 'trim': crop_start = kwargs['start'] or 0.0 crop_end = kwargs['end'] or src_duration if crop_start >= dst_segment.end or crop_end <= dst_segment.start: return new_start = (dst_segment.start - crop_start) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - crop_start, crop_end - crop_start) if crop_end < dst_segment.end: src_end = src_segment.end - (dst_segment.end - crop_end) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) elif name == 'replace_with_color_frames': offset = kwargs['offset_factor'] * src_duration duration = kwargs['duration_factor'] * src_duration if 0.0 >= dst_segment.end or offset <= dst_segment.start: return new_start = (dst_segment.start - 0.0) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - 0.0, offset - 0.0) if offset < dst_segment.end: src_end = src_segment.end - (dst_segment.end - offset) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) if offset + duration >= dst_segment.end or dst_duration <= dst_segment.start: return new_start = (dst_segment.start - offset + duration) * speed_factor (src_start, src_end, src_id) = src_segment if new_start < 0: src_start = src_segment.start - new_start new_start = 0 new_end = min(dst_segment.end - offset + duration, dst_duration - offset + duration) if dst_duration < dst_segment.end: src_end = src_segment.end - (dst_segment.end - dst_duration) * speed_factor new_src_segments.append(Segment(src_start, src_end, src_id)) new_dst_segments.append(Segment(new_start + end_dst_offset, new_end + end_dst_offset)) return (new_src_segments, new_dst_segments)
AugLy
positive
def handle_block(self, block): <DeepExtract> assert block.proof < POW_TARGET, 'Insufficient Proof-of-Work' assert block.prev_id == self.blocks[-1].id </DeepExtract> <DeepExtract> assert len(block.txns[0].tx_ins) == len(block.txns[0].tx_outs) == 1 assert block.txns[0].tx_outs[0].amount == BLOCK_SUBSIDY </DeepExtract> for tx in block.txns[1:]: <DeepExtract> in_sum = 0 out_sum = 0 for (index, tx_in) in enumerate(tx.tx_ins): assert tx_in.outpoint in self.utxo_set tx_out = self.utxo_set[tx_in.outpoint] public_key = tx_out.public_key tx.verify_input(index, public_key) amount = tx_out.amount in_sum += amount for tx_out in tx.tx_outs: out_sum += tx_out.amount assert in_sum == out_sum </DeepExtract> for tx in block.txns: <DeepExtract> if not tx.is_coinbase: for tx_in in tx.tx_ins: del self.utxo_set[tx_in.outpoint] for tx_out in tx.tx_outs: self.utxo_set[tx_out.outpoint] = tx_out if tx in self.mempool: self.mempool.remove(tx) </DeepExtract> self.blocks.append(block) logger.info(f'Block accepted: height={len(self.blocks) - 1}') for peer in self.peers: <DeepExtract> message = prepare_message('blocks', [block]) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.connect(peer) s.sendall(message) if response: return read_message(s) </DeepExtract>
def handle_block(self, block): assert block.proof < POW_TARGET, 'Insufficient Proof-of-Work' assert block.prev_id == self.blocks[-1].id assert len(block.txns[0].tx_ins) == len(block.txns[0].tx_outs) == 1 assert block.txns[0].tx_outs[0].amount == BLOCK_SUBSIDY for tx in block.txns[1:]: in_sum = 0 out_sum = 0 for (index, tx_in) in enumerate(tx.tx_ins): assert tx_in.outpoint in self.utxo_set tx_out = self.utxo_set[tx_in.outpoint] public_key = tx_out.public_key tx.verify_input(index, public_key) amount = tx_out.amount in_sum += amount for tx_out in tx.tx_outs: out_sum += tx_out.amount assert in_sum == out_sum for tx in block.txns: if not tx.is_coinbase: for tx_in in tx.tx_ins: del self.utxo_set[tx_in.outpoint] for tx_out in tx.tx_outs: self.utxo_set[tx_out.outpoint] = tx_out if tx in self.mempool: self.mempool.remove(tx) self.blocks.append(block) logger.info(f'Block accepted: height={len(self.blocks) - 1}') for peer in self.peers: message = prepare_message('blocks', [block]) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.connect(peer) s.sendall(message) if response: return read_message(s) </DeepExtract>
digital-cash
positive
def plotResidues(self, name=None, format='png'): <DeepExtract> lInput = self.mTrend.mSignalFrame lOutput = self.forecast(lInput, self.mTimeInfo.mHorizon) df = lOutput </DeepExtract> lTime = self.mTimeInfo.mTime lPrefix = self.mSignal + '_' lPrefix2 = str(self.mOriginalSignal) + '_' if name is not None: tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'Trend', lPrefix + 'Trend_residue', name=name + '_trend', format=format) tsplot.decomp_plot(df, lTime, lPrefix + 'Trend_residue', lPrefix + 'Cycle', lPrefix + 'Cycle_residue', name=name + '_cycle', format=format) tsplot.decomp_plot(df, lTime, lPrefix + 'Cycle_residue', lPrefix + 'AR', lPrefix + 'AR_residue', name=name + '_AR', format=format) tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'TransformedForecast', lPrefix + 'TransformedResidue', name=name + '_transformed_forecast', format=format) tsplot.decomp_plot(df, lTime, self.mOriginalSignal, lPrefix2 + 'Forecast', lPrefix2 + 'Residue', name=name + '_forecast', format=format) else: tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'Trend', lPrefix + 'Trend_residue') tsplot.decomp_plot(df, lTime, lPrefix + 'Trend_residue', lPrefix + 'Cycle', lPrefix + 'Cycle_residue') tsplot.decomp_plot(df, lTime, lPrefix + 'Cycle_residue', lPrefix + 'AR', lPrefix + 'AR_residue') tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'TransformedForecast', lPrefix + 'TransformedResidue') tsplot.decomp_plot(df, lTime, self.mOriginalSignal, lPrefix2 + 'Forecast', lPrefix2 + 'Residue')
def plotResidues(self, name=None, format='png'): lInput = self.mTrend.mSignalFrame lOutput = self.forecast(lInput, self.mTimeInfo.mHorizon) df = lOutput lTime = self.mTimeInfo.mTime lPrefix = self.mSignal + '_' lPrefix2 = str(self.mOriginalSignal) + '_' if name is not None: tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'Trend', lPrefix + 'Trend_residue', name=name + '_trend', format=format) tsplot.decomp_plot(df, lTime, lPrefix + 'Trend_residue', lPrefix + 'Cycle', lPrefix + 'Cycle_residue', name=name + '_cycle', format=format) tsplot.decomp_plot(df, lTime, lPrefix + 'Cycle_residue', lPrefix + 'AR', lPrefix + 'AR_residue', name=name + '_AR', format=format) tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'TransformedForecast', lPrefix + 'TransformedResidue', name=name + '_transformed_forecast', format=format) tsplot.decomp_plot(df, lTime, self.mOriginalSignal, lPrefix2 + 'Forecast', lPrefix2 + 'Residue', name=name + '_forecast', format=format) else: tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'Trend', lPrefix + 'Trend_residue') tsplot.decomp_plot(df, lTime, lPrefix + 'Trend_residue', lPrefix + 'Cycle', lPrefix + 'Cycle_residue') tsplot.decomp_plot(df, lTime, lPrefix + 'Cycle_residue', lPrefix + 'AR', lPrefix + 'AR_residue') tsplot.decomp_plot(df, lTime, self.mSignal, lPrefix + 'TransformedForecast', lPrefix + 'TransformedResidue') tsplot.decomp_plot(df, lTime, self.mOriginalSignal, lPrefix2 + 'Forecast', lPrefix2 + 'Residue')
atspy
positive
def _generate_cluster_and_check(self): <DeepExtract> current_cluster = edl_cluster.load_from_etcd(self._etcd, timeout=15) resource_pods = edl_resource_pods.load_from_etcd(self._etcd, timeout=15) if len(resource_pods) <= 0: raise exceptions.EdlTableError('resource pods key={}:[]'.format(self._etcd.get_full_path(constants.ETCD_POD_RESOURCE, self._pod_id))) if current_cluster is None: new_cluster = self._generate_cluster_from_resource(resource_pods) (current_cluster, new_cluster) = (None, new_cluster) current_ids = current_cluster.get_pods_ids_set() resource_ids = set(resource_pods.keys()) (all_inited, all_running, all_succeed, all_failed) = edl_status.load_pods_status_from_etcd(self._etcd, timeout=15) disappeared = current_ids - resource_ids - all_inited - all_running - all_succeed - all_failed failed = current_ids & all_failed if len(disappeared) > 0 or len(failed) > 0: logger.warning('find disappeard pods:{} failed_pods:{}'.format(disappeared, failed)) (current_cluster, new_cluster) = (current_cluster, self._generate_cluster_from_resource(resource_pods)) succeed = current_ids & all_succeed if len(succeed) > 0: logger.debug('find succeed pods:{}'.format(succeed)) new_cluster = copy.copy(current_cluster) (current_cluster, new_cluster) = (current_cluster, new_cluster) running = current_ids & all_running inited = current_ids & all_inited if len(inited) > 0 and current_cluster.get_pods_nranks() < self._job_env.max_nodes: train_status = edl_train_status.load_from_etcd(self._etcd, timeout=30) if train_status == edl_train_status.TrainStatus.INITIAL or train_status == edl_train_status.TrainStatus.RUNNING: logger.info('find running pods:{} and init pods{}'.format(inited, running)) self._append_inited_pods(current_cluster, resource_pods, new_cluster) (current_cluster, new_cluster) = (current_cluster, new_cluster) if len(succeed) > 0: logger.debug('find succeed pods:{}'.format(succeed)) new_cluster = copy.copy(current_cluster) (current_cluster, new_cluster) = (current_cluster, new_cluster) </DeepExtract> if new_cluster.get_pods_nranks() < self._job_env.min_nodes: message = 'new cluster pods size:{} ids:{} wait job_env range:[{}:{}]'.format(new_cluster.get_pods_nranks(), new_cluster.get_pods_ids_set(), self._job_env.min_nodes, self._job_env.max_nodes) raise exceptions.EdlGenerateClusterError(message) if current_cluster is None or current_cluster.stage != new_cluster.stage: logger.info('current_cluster:{} to new_cluster:{}'.format(current_cluster, new_cluster)) <DeepExtract> leader_key = self._etcd.get_full_path(constants.ETCD_POD_RANK, constants.ETCD_POD_LEADER) cluster_key = self._etcd.get_full_path(constants.ETCD_CLUSTER, constants.ETCD_CLUSTER) etcd = self._etcd._etcd (status, _) = etcd.transaction(compare=[etcd.transactions.value(leader_key) == self._pod_id], success=[etcd.transactions.put(cluster_key, new_cluster.to_json())], failure=[]) message = 'pod_id:{} leader_id:{} _set_cluster_if_leader status:{}'.format(self._pod_id, leader_pod.get_pod_leader_id(self._etcd, timeout=15), status) if not status: raise exceptions.EdlEtcdIOError(message) return status </DeepExtract>
def _generate_cluster_and_check(self): current_cluster = edl_cluster.load_from_etcd(self._etcd, timeout=15) resource_pods = edl_resource_pods.load_from_etcd(self._etcd, timeout=15) if len(resource_pods) <= 0: raise exceptions.EdlTableError('resource pods key={}:[]'.format(self._etcd.get_full_path(constants.ETCD_POD_RESOURCE, self._pod_id))) if current_cluster is None: new_cluster = self._generate_cluster_from_resource(resource_pods) (current_cluster, new_cluster) = (None, new_cluster) current_ids = current_cluster.get_pods_ids_set() resource_ids = set(resource_pods.keys()) (all_inited, all_running, all_succeed, all_failed) = edl_status.load_pods_status_from_etcd(self._etcd, timeout=15) disappeared = current_ids - resource_ids - all_inited - all_running - all_succeed - all_failed failed = current_ids & all_failed if len(disappeared) > 0 or len(failed) > 0: logger.warning('find disappeard pods:{} failed_pods:{}'.format(disappeared, failed)) (current_cluster, new_cluster) = (current_cluster, self._generate_cluster_from_resource(resource_pods)) succeed = current_ids & all_succeed if len(succeed) > 0: logger.debug('find succeed pods:{}'.format(succeed)) new_cluster = copy.copy(current_cluster) (current_cluster, new_cluster) = (current_cluster, new_cluster) running = current_ids & all_running inited = current_ids & all_inited if len(inited) > 0 and current_cluster.get_pods_nranks() < self._job_env.max_nodes: train_status = edl_train_status.load_from_etcd(self._etcd, timeout=30) if train_status == edl_train_status.TrainStatus.INITIAL or train_status == edl_train_status.TrainStatus.RUNNING: logger.info('find running pods:{} and init pods{}'.format(inited, running)) self._append_inited_pods(current_cluster, resource_pods, new_cluster) (current_cluster, new_cluster) = (current_cluster, new_cluster) if len(succeed) > 0: logger.debug('find succeed pods:{}'.format(succeed)) new_cluster = copy.copy(current_cluster) (current_cluster, new_cluster) = (current_cluster, new_cluster) if new_cluster.get_pods_nranks() < self._job_env.min_nodes: message = 'new cluster pods size:{} ids:{} wait job_env range:[{}:{}]'.format(new_cluster.get_pods_nranks(), new_cluster.get_pods_ids_set(), self._job_env.min_nodes, self._job_env.max_nodes) raise exceptions.EdlGenerateClusterError(message) if current_cluster is None or current_cluster.stage != new_cluster.stage: logger.info('current_cluster:{} to new_cluster:{}'.format(current_cluster, new_cluster)) leader_key = self._etcd.get_full_path(constants.ETCD_POD_RANK, constants.ETCD_POD_LEADER) cluster_key = self._etcd.get_full_path(constants.ETCD_CLUSTER, constants.ETCD_CLUSTER) etcd = self._etcd._etcd (status, _) = etcd.transaction(compare=[etcd.transactions.value(leader_key) == self._pod_id], success=[etcd.transactions.put(cluster_key, new_cluster.to_json())], failure=[]) message = 'pod_id:{} leader_id:{} _set_cluster_if_leader status:{}'.format(self._pod_id, leader_pod.get_pod_leader_id(self._etcd, timeout=15), status) if not status: raise exceptions.EdlEtcdIOError(message) return status </DeepExtract>
edl
positive
def __init__(self): QtWidgets.QMainWindow.__init__(self) Ui_MainWindow.__init__(self) <DeepExtract> self.setObjectName('MainWindow') self.resize(500, 350) self.setMinimumSize(QtCore.QSize(500, 350)) self.centralwidget = QtWidgets.QWidget(self) self.centralwidget.setObjectName('centralwidget') self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget) self.verticalLayout.setObjectName('verticalLayout') self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget) self.groupBox_3.setTitle('') self.groupBox_3.setObjectName('groupBox_3') self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.groupBox_3) self.horizontalLayout_4.setObjectName('horizontalLayout_4') self.logo_label = QtWidgets.QLabel(self.groupBox_3) self.logo_label.setText('') self.logo_label.setObjectName('logo_label') self.horizontalLayout_4.addWidget(self.logo_label) self.label_2 = QtWidgets.QLabel(self.groupBox_3) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_2.setFont(font) self.label_2.setObjectName('label_2') self.horizontalLayout_4.addWidget(self.label_2) spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_4.addItem(spacerItem) self.email_label = QtWidgets.QLabel(self.groupBox_3) self.email_label.setObjectName('email_label') self.horizontalLayout_4.addWidget(self.email_label) self.verticalLayout.addWidget(self.groupBox_3) self.groupBox = QtWidgets.QGroupBox(self.centralwidget) self.groupBox.setTitle('') self.groupBox.setObjectName('groupBox') self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.groupBox) self.horizontalLayout_2.setObjectName('horizontalLayout_2') self.label = QtWidgets.QLabel(self.groupBox) font = QtGui.QFont() font.setBold(False) font.setWeight(50) self.label.setFont(font) self.label.setObjectName('label') self.horizontalLayout_2.addWidget(self.label) self.verticalLayout.addWidget(self.groupBox) self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget) self.groupBox_2.setTitle('') self.groupBox_2.setObjectName('groupBox_2') self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.groupBox_2) self.horizontalLayout_3.setObjectName('horizontalLayout_3') self.verticalLayout_2 = QtWidgets.QVBoxLayout() self.verticalLayout_2.setObjectName('verticalLayout_2') self.pushButton = QtWidgets.QPushButton(self.groupBox_2) self.pushButton.setMinimumSize(QtCore.QSize(150, 0)) self.pushButton.setObjectName('pushButton') self.verticalLayout_2.addWidget(self.pushButton) self.pushButton_2 = QtWidgets.QPushButton(self.groupBox_2) self.pushButton_2.setMinimumSize(QtCore.QSize(150, 0)) self.pushButton_2.setObjectName('pushButton_2') self.verticalLayout_2.addWidget(self.pushButton_2) self.horizontalLayout_3.addLayout(self.verticalLayout_2) spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_3.addItem(spacerItem1) self.computer_label = QtWidgets.QLabel(self.groupBox_2) self.computer_label.setText('') self.computer_label.setObjectName('computer_label') self.horizontalLayout_3.addWidget(self.computer_label) self.arrow_label = QtWidgets.QLabel(self.groupBox_2) self.arrow_label.setMinimumSize(QtCore.QSize(64, 0)) self.arrow_label.setText('') self.arrow_label.setAlignment(QtCore.Qt.AlignCenter) self.arrow_label.setObjectName('arrow_label') self.horizontalLayout_3.addWidget(self.arrow_label) self.cloud_label = QtWidgets.QLabel(self.groupBox_2) self.cloud_label.setMinimumSize(QtCore.QSize(64, 0)) self.cloud_label.setText('') self.cloud_label.setObjectName('cloud_label') self.horizontalLayout_3.addWidget(self.cloud_label) self.verticalLayout.addWidget(self.groupBox_2) self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget) self.groupBox_4.setTitle('') self.groupBox_4.setObjectName('groupBox_4') self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_4) self.horizontalLayout.setObjectName('horizontalLayout') self.verticalLayout_3 = QtWidgets.QVBoxLayout() self.verticalLayout_3.setObjectName('verticalLayout_3') spacerItem2 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_3.addItem(spacerItem2) self.horizontalLayout_8 = QtWidgets.QHBoxLayout() self.horizontalLayout_8.setObjectName('horizontalLayout_8') self.label_3 = QtWidgets.QLabel(self.groupBox_4) self.label_3.setMinimumSize(QtCore.QSize(150, 0)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_3.setFont(font) self.label_3.setAlignment(QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter) self.label_3.setObjectName('label_3') self.horizontalLayout_8.addWidget(self.label_3) self.label_7 = QtWidgets.QLabel(self.groupBox_4) self.label_7.setObjectName('label_7') self.horizontalLayout_8.addWidget(self.label_7) self.verticalLayout_3.addLayout(self.horizontalLayout_8) self.horizontalLayout_9 = QtWidgets.QHBoxLayout() self.horizontalLayout_9.setObjectName('horizontalLayout_9') self.label_4 = QtWidgets.QLabel(self.groupBox_4) self.label_4.setMinimumSize(QtCore.QSize(150, 0)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_4.setFont(font) self.label_4.setAlignment(QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter) self.label_4.setObjectName('label_4') self.horizontalLayout_9.addWidget(self.label_4) self.label_8 = QtWidgets.QLabel(self.groupBox_4) self.label_8.setObjectName('label_8') self.horizontalLayout_9.addWidget(self.label_8) self.verticalLayout_3.addLayout(self.horizontalLayout_9) self.horizontalLayout_7 = QtWidgets.QHBoxLayout() self.horizontalLayout_7.setObjectName('horizontalLayout_7') self.label_5 = QtWidgets.QLabel(self.groupBox_4) self.label_5.setMinimumSize(QtCore.QSize(150, 0)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_5.setFont(font) self.label_5.setAlignment(QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter) self.label_5.setObjectName('label_5') self.horizontalLayout_7.addWidget(self.label_5) self.label_9 = QtWidgets.QLabel(self.groupBox_4) self.label_9.setObjectName('label_9') self.horizontalLayout_7.addWidget(self.label_9) self.verticalLayout_3.addLayout(self.horizontalLayout_7) spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_3.addItem(spacerItem3) self.horizontalLayout.addLayout(self.verticalLayout_3) spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem4) self.verticalLayout_4 = QtWidgets.QVBoxLayout() self.verticalLayout_4.setObjectName('verticalLayout_4') spacerItem5 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_4.addItem(spacerItem5) self.pushButton_3 = QtWidgets.QPushButton(self.groupBox_4) self.pushButton_3.setMinimumSize(QtCore.QSize(150, 0)) self.pushButton_3.setObjectName('pushButton_3') self.verticalLayout_4.addWidget(self.pushButton_3) self.label_10 = QtWidgets.QLabel(self.groupBox_4) self.label_10.setTextFormat(QtCore.Qt.RichText) self.label_10.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter) self.label_10.setObjectName('label_10') self.verticalLayout_4.addWidget(self.label_10) self.label_6 = QtWidgets.QLabel(self.groupBox_4) self.label_6.setTextFormat(QtCore.Qt.RichText) self.label_6.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter) self.label_6.setObjectName('label_6') self.verticalLayout_4.addWidget(self.label_6) spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_4.addItem(spacerItem6) self.horizontalLayout.addLayout(self.verticalLayout_4) self.verticalLayout.addWidget(self.groupBox_4) self.setCentralWidget(self.centralwidget) self.retranslateUi(self) QtCore.QMetaObject.connectSlotsByName(self) </DeepExtract> pixmap = QtGui.QPixmap('img/logo.png') pixmap = pixmap.scaled(20, 20) self.logo_label.setPixmap(pixmap) pixmap = QtGui.QPixmap('img/computer.png') pixmap = pixmap.scaled(32, 32) self.computer_label.setPixmap(pixmap) pixmap = QtGui.QPixmap('img/arrow.png') pixmap = pixmap.scaled(20, 20) self.arrow_label.setPixmap(pixmap) pixmap = QtGui.QPixmap('img/cloud.png') pixmap = pixmap.scaled(32, 32) self.cloud_label.setPixmap(pixmap)
def __init__(self): QtWidgets.QMainWindow.__init__(self) Ui_MainWindow.__init__(self) self.setObjectName('MainWindow') self.resize(500, 350) self.setMinimumSize(QtCore.QSize(500, 350)) self.centralwidget = QtWidgets.QWidget(self) self.centralwidget.setObjectName('centralwidget') self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget) self.verticalLayout.setObjectName('verticalLayout') self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget) self.groupBox_3.setTitle('') self.groupBox_3.setObjectName('groupBox_3') self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.groupBox_3) self.horizontalLayout_4.setObjectName('horizontalLayout_4') self.logo_label = QtWidgets.QLabel(self.groupBox_3) self.logo_label.setText('') self.logo_label.setObjectName('logo_label') self.horizontalLayout_4.addWidget(self.logo_label) self.label_2 = QtWidgets.QLabel(self.groupBox_3) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_2.setFont(font) self.label_2.setObjectName('label_2') self.horizontalLayout_4.addWidget(self.label_2) spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_4.addItem(spacerItem) self.email_label = QtWidgets.QLabel(self.groupBox_3) self.email_label.setObjectName('email_label') self.horizontalLayout_4.addWidget(self.email_label) self.verticalLayout.addWidget(self.groupBox_3) self.groupBox = QtWidgets.QGroupBox(self.centralwidget) self.groupBox.setTitle('') self.groupBox.setObjectName('groupBox') self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.groupBox) self.horizontalLayout_2.setObjectName('horizontalLayout_2') self.label = QtWidgets.QLabel(self.groupBox) font = QtGui.QFont() font.setBold(False) font.setWeight(50) self.label.setFont(font) self.label.setObjectName('label') self.horizontalLayout_2.addWidget(self.label) self.verticalLayout.addWidget(self.groupBox) self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget) self.groupBox_2.setTitle('') self.groupBox_2.setObjectName('groupBox_2') self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.groupBox_2) self.horizontalLayout_3.setObjectName('horizontalLayout_3') self.verticalLayout_2 = QtWidgets.QVBoxLayout() self.verticalLayout_2.setObjectName('verticalLayout_2') self.pushButton = QtWidgets.QPushButton(self.groupBox_2) self.pushButton.setMinimumSize(QtCore.QSize(150, 0)) self.pushButton.setObjectName('pushButton') self.verticalLayout_2.addWidget(self.pushButton) self.pushButton_2 = QtWidgets.QPushButton(self.groupBox_2) self.pushButton_2.setMinimumSize(QtCore.QSize(150, 0)) self.pushButton_2.setObjectName('pushButton_2') self.verticalLayout_2.addWidget(self.pushButton_2) self.horizontalLayout_3.addLayout(self.verticalLayout_2) spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_3.addItem(spacerItem1) self.computer_label = QtWidgets.QLabel(self.groupBox_2) self.computer_label.setText('') self.computer_label.setObjectName('computer_label') self.horizontalLayout_3.addWidget(self.computer_label) self.arrow_label = QtWidgets.QLabel(self.groupBox_2) self.arrow_label.setMinimumSize(QtCore.QSize(64, 0)) self.arrow_label.setText('') self.arrow_label.setAlignment(QtCore.Qt.AlignCenter) self.arrow_label.setObjectName('arrow_label') self.horizontalLayout_3.addWidget(self.arrow_label) self.cloud_label = QtWidgets.QLabel(self.groupBox_2) self.cloud_label.setMinimumSize(QtCore.QSize(64, 0)) self.cloud_label.setText('') self.cloud_label.setObjectName('cloud_label') self.horizontalLayout_3.addWidget(self.cloud_label) self.verticalLayout.addWidget(self.groupBox_2) self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget) self.groupBox_4.setTitle('') self.groupBox_4.setObjectName('groupBox_4') self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_4) self.horizontalLayout.setObjectName('horizontalLayout') self.verticalLayout_3 = QtWidgets.QVBoxLayout() self.verticalLayout_3.setObjectName('verticalLayout_3') spacerItem2 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_3.addItem(spacerItem2) self.horizontalLayout_8 = QtWidgets.QHBoxLayout() self.horizontalLayout_8.setObjectName('horizontalLayout_8') self.label_3 = QtWidgets.QLabel(self.groupBox_4) self.label_3.setMinimumSize(QtCore.QSize(150, 0)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_3.setFont(font) self.label_3.setAlignment(QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter) self.label_3.setObjectName('label_3') self.horizontalLayout_8.addWidget(self.label_3) self.label_7 = QtWidgets.QLabel(self.groupBox_4) self.label_7.setObjectName('label_7') self.horizontalLayout_8.addWidget(self.label_7) self.verticalLayout_3.addLayout(self.horizontalLayout_8) self.horizontalLayout_9 = QtWidgets.QHBoxLayout() self.horizontalLayout_9.setObjectName('horizontalLayout_9') self.label_4 = QtWidgets.QLabel(self.groupBox_4) self.label_4.setMinimumSize(QtCore.QSize(150, 0)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_4.setFont(font) self.label_4.setAlignment(QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter) self.label_4.setObjectName('label_4') self.horizontalLayout_9.addWidget(self.label_4) self.label_8 = QtWidgets.QLabel(self.groupBox_4) self.label_8.setObjectName('label_8') self.horizontalLayout_9.addWidget(self.label_8) self.verticalLayout_3.addLayout(self.horizontalLayout_9) self.horizontalLayout_7 = QtWidgets.QHBoxLayout() self.horizontalLayout_7.setObjectName('horizontalLayout_7') self.label_5 = QtWidgets.QLabel(self.groupBox_4) self.label_5.setMinimumSize(QtCore.QSize(150, 0)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_5.setFont(font) self.label_5.setAlignment(QtCore.Qt.AlignLeading | QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter) self.label_5.setObjectName('label_5') self.horizontalLayout_7.addWidget(self.label_5) self.label_9 = QtWidgets.QLabel(self.groupBox_4) self.label_9.setObjectName('label_9') self.horizontalLayout_7.addWidget(self.label_9) self.verticalLayout_3.addLayout(self.horizontalLayout_7) spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_3.addItem(spacerItem3) self.horizontalLayout.addLayout(self.verticalLayout_3) spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem4) self.verticalLayout_4 = QtWidgets.QVBoxLayout() self.verticalLayout_4.setObjectName('verticalLayout_4') spacerItem5 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_4.addItem(spacerItem5) self.pushButton_3 = QtWidgets.QPushButton(self.groupBox_4) self.pushButton_3.setMinimumSize(QtCore.QSize(150, 0)) self.pushButton_3.setObjectName('pushButton_3') self.verticalLayout_4.addWidget(self.pushButton_3) self.label_10 = QtWidgets.QLabel(self.groupBox_4) self.label_10.setTextFormat(QtCore.Qt.RichText) self.label_10.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter) self.label_10.setObjectName('label_10') self.verticalLayout_4.addWidget(self.label_10) self.label_6 = QtWidgets.QLabel(self.groupBox_4) self.label_6.setTextFormat(QtCore.Qt.RichText) self.label_6.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter) self.label_6.setObjectName('label_6') self.verticalLayout_4.addWidget(self.label_6) spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout_4.addItem(spacerItem6) self.horizontalLayout.addLayout(self.verticalLayout_4) self.verticalLayout.addWidget(self.groupBox_4) self.setCentralWidget(self.centralwidget) self.retranslateUi(self) QtCore.QMetaObject.connectSlotsByName(self) pixmap = QtGui.QPixmap('img/logo.png') pixmap = pixmap.scaled(20, 20) self.logo_label.setPixmap(pixmap) pixmap = QtGui.QPixmap('img/computer.png') pixmap = pixmap.scaled(32, 32) self.computer_label.setPixmap(pixmap) pixmap = QtGui.QPixmap('img/arrow.png') pixmap = pixmap.scaled(20, 20) self.arrow_label.setPixmap(pixmap) pixmap = QtGui.QPixmap('img/cloud.png') pixmap = pixmap.scaled(32, 32) self.cloud_label.setPixmap(pixmap)
BlobBackup
positive
@mock.patch.object(civis.parallel, 'civis') @mock.patch('civis.futures.time.sleep', side_effect=lambda x: None) def test_result_exception(m_sleep, mock_civis): callback = mock.MagicMock() exc = ZeroDivisionError() <DeepExtract> cnt = {'err': 0} def mock_civis_to_file(file_id, buf, client=None): if cnt['err'] < max_n_err: cnt['err'] += 1 raise exc else: buf.write(pickle.dumps(exc)) mock_civis.io.civis_to_file.side_effect = mock_civis_to_file </DeepExtract> mock_client = create_client_mock_for_container_tests(1, 2, state='failed', run_outputs=mock.MagicMock()) fut = ContainerFuture(1, 2, client=mock_client) res = civis.parallel._CivisBackendResult(fut, callback) with pytest.raises(ZeroDivisionError): res.get() assert callback.call_count == 0
@mock.patch.object(civis.parallel, 'civis') @mock.patch('civis.futures.time.sleep', side_effect=lambda x: None) def test_result_exception(m_sleep, mock_civis): callback = mock.MagicMock() exc = ZeroDivisionError() cnt = {'err': 0} def mock_civis_to_file(file_id, buf, client=None): if cnt['err'] < max_n_err: cnt['err'] += 1 raise exc else: buf.write(pickle.dumps(exc)) mock_civis.io.civis_to_file.side_effect = mock_civis_to_file mock_client = create_client_mock_for_container_tests(1, 2, state='failed', run_outputs=mock.MagicMock()) fut = ContainerFuture(1, 2, client=mock_client) res = civis.parallel._CivisBackendResult(fut, callback) with pytest.raises(ZeroDivisionError): res.get() assert callback.call_count == 0
civis-python
positive
def export_records(log, dm_records_rfa, dm_records_tfa, output_dir, plot_options): figure_dir = '{}/figures'.format(output_dir) <DeepExtract> try: os.makedirs(figure_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(figure_dir): pass else: err_quit('{}. Aborting!'.format(exc)) </DeepExtract> log(1, "[Info] Saving figures to directory '{}'".format(figure_dir)) dm_dir = '{}/dm'.format(output_dir) <DeepExtract> try: os.makedirs(dm_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(dm_dir): pass else: err_quit('{}. Aborting!'.format(exc)) </DeepExtract> log(1, "[Info] Saving dm files to directory '{}'".format(dm_dir)) def _export_records(records, prefix): if len(records) > 0: <DeepExtract> dc_dict = {} for (activity, records) in records.items(): fa_array = [e[1] for e in records] fn_array = [e[2] for e in records] threshold = [e[0] for e in records] dc = DataContainer(fa_array, fn_array, threshold, label=activity) dc.line_options['color'] = None dc_dict[activity] = dc dc_dict = dc_dict </DeepExtract> for (activity, dc) in dc_dict.items(): dc.activity = activity dc.fa_label = prefix dc.fn_label = 'PMISS' <DeepExtract> dc.dump('{}/{}'.format(dm_dir, '{}_{}.dm'.format(prefix, activity))) </DeepExtract> log(1, '[Info] Plotting {} DET curve for {}'.format(prefix, activity)) plot_options['title'] = activity <DeepExtract> if type(dc) is {}.values().__class__: dc = list(dc) if isinstance(dc, DataContainer): dc = [dc] rd = Render(plot_type='det') fig = rd.plot(dc, display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}_{}.png'.format(prefix, activity))) rd.close_fig(fig) </DeepExtract> mean_label = '{}_mean_byfa'.format(prefix) xscale = plot_options['xscale'] if 'xscale' in plot_options else 'linear' xmin = plot_options['xlim'][0] if 'xlim' in plot_options else 0 dc_agg = DataContainer.aggregate(dc_dict.values(), output_label=mean_label, average_resolution=500, xscale=xscale, xmin=xmin) dc_agg.activity = 'AGGREGATED' dc_agg.fa_label = prefix dc_agg.fn_label = 'PMISS' <DeepExtract> dc_agg.dump('{}/{}'.format(dm_dir, '{}.dm'.format(mean_label))) </DeepExtract> log(1, '[Info] Plotting mean {} curve for {} activities'.format(prefix, len(dc_dict.values()))) <DeepExtract> if type(dc_agg) is {}.values().__class__: dc_agg = list(dc_agg) if isinstance(dc_agg, DataContainer): dc_agg = [dc_agg] rd = Render(plot_type='det') fig = rd.plot(dc_agg, display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}.png'.format(mean_label))) rd.close_fig(fig) </DeepExtract> log(1, '[Info] Plotting combined {} DET curves'.format(prefix)) plot_options['title'] = 'All Activities' <DeepExtract> if type(dc_dict.values()) is {}.values().__class__: dc_dict.values() = list(dc_dict.values()) if isinstance(dc_dict.values(), DataContainer): dc_dict.values() = [dc_dict.values()] rd = Render(plot_type='det') fig = rd.plot(dc_dict.values(), display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}_{}.png'.format(prefix, 'COMBINED'))) rd.close_fig(fig) </DeepExtract> plot_options['title'] = 'All Activities and Aggregate' <DeepExtract> if type(list(dc_dict.values()) + [dc_agg]) is {}.values().__class__: list(dc_dict.values()) + [dc_agg] = list(list(dc_dict.values()) + [dc_agg]) if isinstance(list(dc_dict.values()) + [dc_agg], DataContainer): list(dc_dict.values()) + [dc_agg] = [list(dc_dict.values()) + [dc_agg]] rd = Render(plot_type='det') fig = rd.plot(list(dc_dict.values()) + [dc_agg], display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}_{}.png'.format(prefix, 'COMBINEDAGG'))) rd.close_fig(fig) </DeepExtract> <DeepExtract> if len(dm_records_rfa) > 0: dc_dict = records_to_dm(dm_records_rfa) for (activity, dc) in dc_dict.items(): dc.activity = activity dc.fa_label = 'RFA' dc.fn_label = 'PMISS' save_dm(dc, dm_dir, '{}_{}.dm'.format('RFA', activity)) log(1, '[Info] Plotting {} DET curve for {}'.format('RFA', activity)) plot_options['title'] = activity save_DET(dc, figure_dir, 'DET_{}_{}.png'.format('RFA', activity), plot_options) mean_label = '{}_mean_byfa'.format('RFA') xscale = plot_options['xscale'] if 'xscale' in plot_options else 'linear' xmin = plot_options['xlim'][0] if 'xlim' in plot_options else 0 dc_agg = DataContainer.aggregate(dc_dict.values(), output_label=mean_label, average_resolution=500, xscale=xscale, xmin=xmin) dc_agg.activity = 'AGGREGATED' dc_agg.fa_label = 'RFA' dc_agg.fn_label = 'PMISS' save_dm(dc_agg, dm_dir, '{}.dm'.format(mean_label)) log(1, '[Info] Plotting mean {} curve for {} activities'.format('RFA', len(dc_dict.values()))) save_DET(dc_agg, figure_dir, 'DET_{}.png'.format(mean_label), plot_options) log(1, '[Info] Plotting combined {} DET curves'.format('RFA')) plot_options['title'] = 'All Activities' save_DET(dc_dict.values(), figure_dir, 'DET_{}_{}.png'.format('RFA', 'COMBINED'), plot_options) plot_options['title'] = 'All Activities and Aggregate' save_DET(list(dc_dict.values()) + [dc_agg], figure_dir, 'DET_{}_{}.png'.format('RFA', 'COMBINEDAGG'), plot_options) </DeepExtract> <DeepExtract> if len(dm_records_tfa) > 0: dc_dict = records_to_dm(dm_records_tfa) for (activity, dc) in dc_dict.items(): dc.activity = activity dc.fa_label = 'TFA' dc.fn_label = 'PMISS' save_dm(dc, dm_dir, '{}_{}.dm'.format('TFA', activity)) log(1, '[Info] Plotting {} DET curve for {}'.format('TFA', activity)) plot_options['title'] = activity save_DET(dc, figure_dir, 'DET_{}_{}.png'.format('TFA', activity), plot_options) mean_label = '{}_mean_byfa'.format('TFA') xscale = plot_options['xscale'] if 'xscale' in plot_options else 'linear' xmin = plot_options['xlim'][0] if 'xlim' in plot_options else 0 dc_agg = DataContainer.aggregate(dc_dict.values(), output_label=mean_label, average_resolution=500, xscale=xscale, xmin=xmin) dc_agg.activity = 'AGGREGATED' dc_agg.fa_label = 'TFA' dc_agg.fn_label = 'PMISS' save_dm(dc_agg, dm_dir, '{}.dm'.format(mean_label)) log(1, '[Info] Plotting mean {} curve for {} activities'.format('TFA', len(dc_dict.values()))) save_DET(dc_agg, figure_dir, 'DET_{}.png'.format(mean_label), plot_options) log(1, '[Info] Plotting combined {} DET curves'.format('TFA')) plot_options['title'] = 'All Activities' save_DET(dc_dict.values(), figure_dir, 'DET_{}_{}.png'.format('TFA', 'COMBINED'), plot_options) plot_options['title'] = 'All Activities and Aggregate' save_DET(list(dc_dict.values()) + [dc_agg], figure_dir, 'DET_{}_{}.png'.format('TFA', 'COMBINEDAGG'), plot_options) </DeepExtract>
def export_records(log, dm_records_rfa, dm_records_tfa, output_dir, plot_options): figure_dir = '{}/figures'.format(output_dir) try: os.makedirs(figure_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(figure_dir): pass else: err_quit('{}. Aborting!'.format(exc)) log(1, "[Info] Saving figures to directory '{}'".format(figure_dir)) dm_dir = '{}/dm'.format(output_dir) try: os.makedirs(dm_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(dm_dir): pass else: err_quit('{}. Aborting!'.format(exc)) log(1, "[Info] Saving dm files to directory '{}'".format(dm_dir)) def _export_records(records, prefix): if len(records) > 0: dc_dict = {} for (activity, records) in records.items(): fa_array = [e[1] for e in records] fn_array = [e[2] for e in records] threshold = [e[0] for e in records] dc = DataContainer(fa_array, fn_array, threshold, label=activity) dc.line_options['color'] = None dc_dict[activity] = dc dc_dict = dc_dict for (activity, dc) in dc_dict.items(): dc.activity = activity dc.fa_label = prefix dc.fn_label = 'PMISS' dc.dump('{}/{}'.format(dm_dir, '{}_{}.dm'.format(prefix, activity))) log(1, '[Info] Plotting {} DET curve for {}'.format(prefix, activity)) plot_options['title'] = activity if type(dc) is {}.values().__class__: dc = list(dc) if isinstance(dc, DataContainer): dc = [dc] rd = Render(plot_type='det') fig = rd.plot(dc, display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}_{}.png'.format(prefix, activity))) rd.close_fig(fig) mean_label = '{}_mean_byfa'.format(prefix) xscale = plot_options['xscale'] if 'xscale' in plot_options else 'linear' xmin = plot_options['xlim'][0] if 'xlim' in plot_options else 0 dc_agg = DataContainer.aggregate(dc_dict.values(), output_label=mean_label, average_resolution=500, xscale=xscale, xmin=xmin) dc_agg.activity = 'AGGREGATED' dc_agg.fa_label = prefix dc_agg.fn_label = 'PMISS' dc_agg.dump('{}/{}'.format(dm_dir, '{}.dm'.format(mean_label))) log(1, '[Info] Plotting mean {} curve for {} activities'.format(prefix, len(dc_dict.values()))) if type(dc_agg) is {}.values().__class__: dc_agg = list(dc_agg) if isinstance(dc_agg, DataContainer): dc_agg = [dc_agg] rd = Render(plot_type='det') fig = rd.plot(dc_agg, display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}.png'.format(mean_label))) rd.close_fig(fig) log(1, '[Info] Plotting combined {} DET curves'.format(prefix)) plot_options['title'] = 'All Activities' if type(dc_dict.values()) is {}.values().__class__: dc_dict.values() = list(dc_dict.values()) if isinstance(dc_dict.values(), DataContainer): dc_dict.values() = [dc_dict.values()] rd = Render(plot_type='det') fig = rd.plot(dc_dict.values(), display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}_{}.png'.format(prefix, 'COMBINED'))) rd.close_fig(fig) plot_options['title'] = 'All Activities and Aggregate' if type(list(dc_dict.values()) + [dc_agg]) is {}.values().__class__: list(dc_dict.values()) + [dc_agg] = list(list(dc_dict.values()) + [dc_agg]) if isinstance(list(dc_dict.values()) + [dc_agg], DataContainer): list(dc_dict.values()) + [dc_agg] = [list(dc_dict.values()) + [dc_agg]] rd = Render(plot_type='det') fig = rd.plot(list(dc_dict.values()) + [dc_agg], display=False, plot_options=plot_options) fig.savefig('{}/{}'.format(figure_dir, 'DET_{}_{}.png'.format(prefix, 'COMBINEDAGG'))) rd.close_fig(fig) if len(dm_records_rfa) > 0: dc_dict = records_to_dm(dm_records_rfa) for (activity, dc) in dc_dict.items(): dc.activity = activity dc.fa_label = 'RFA' dc.fn_label = 'PMISS' save_dm(dc, dm_dir, '{}_{}.dm'.format('RFA', activity)) log(1, '[Info] Plotting {} DET curve for {}'.format('RFA', activity)) plot_options['title'] = activity save_DET(dc, figure_dir, 'DET_{}_{}.png'.format('RFA', activity), plot_options) mean_label = '{}_mean_byfa'.format('RFA') xscale = plot_options['xscale'] if 'xscale' in plot_options else 'linear' xmin = plot_options['xlim'][0] if 'xlim' in plot_options else 0 dc_agg = DataContainer.aggregate(dc_dict.values(), output_label=mean_label, average_resolution=500, xscale=xscale, xmin=xmin) dc_agg.activity = 'AGGREGATED' dc_agg.fa_label = 'RFA' dc_agg.fn_label = 'PMISS' save_dm(dc_agg, dm_dir, '{}.dm'.format(mean_label)) log(1, '[Info] Plotting mean {} curve for {} activities'.format('RFA', len(dc_dict.values()))) save_DET(dc_agg, figure_dir, 'DET_{}.png'.format(mean_label), plot_options) log(1, '[Info] Plotting combined {} DET curves'.format('RFA')) plot_options['title'] = 'All Activities' save_DET(dc_dict.values(), figure_dir, 'DET_{}_{}.png'.format('RFA', 'COMBINED'), plot_options) plot_options['title'] = 'All Activities and Aggregate' save_DET(list(dc_dict.values()) + [dc_agg], figure_dir, 'DET_{}_{}.png'.format('RFA', 'COMBINEDAGG'), plot_options) if len(dm_records_tfa) > 0: dc_dict = records_to_dm(dm_records_tfa) for (activity, dc) in dc_dict.items(): dc.activity = activity dc.fa_label = 'TFA' dc.fn_label = 'PMISS' save_dm(dc, dm_dir, '{}_{}.dm'.format('TFA', activity)) log(1, '[Info] Plotting {} DET curve for {}'.format('TFA', activity)) plot_options['title'] = activity save_DET(dc, figure_dir, 'DET_{}_{}.png'.format('TFA', activity), plot_options) mean_label = '{}_mean_byfa'.format('TFA') xscale = plot_options['xscale'] if 'xscale' in plot_options else 'linear' xmin = plot_options['xlim'][0] if 'xlim' in plot_options else 0 dc_agg = DataContainer.aggregate(dc_dict.values(), output_label=mean_label, average_resolution=500, xscale=xscale, xmin=xmin) dc_agg.activity = 'AGGREGATED' dc_agg.fa_label = 'TFA' dc_agg.fn_label = 'PMISS' save_dm(dc_agg, dm_dir, '{}.dm'.format(mean_label)) log(1, '[Info] Plotting mean {} curve for {} activities'.format('TFA', len(dc_dict.values()))) save_DET(dc_agg, figure_dir, 'DET_{}.png'.format(mean_label), plot_options) log(1, '[Info] Plotting combined {} DET curves'.format('TFA')) plot_options['title'] = 'All Activities' save_DET(dc_dict.values(), figure_dir, 'DET_{}_{}.png'.format('TFA', 'COMBINED'), plot_options) plot_options['title'] = 'All Activities and Aggregate' save_DET(list(dc_dict.values()) + [dc_agg], figure_dir, 'DET_{}_{}.png'.format('TFA', 'COMBINEDAGG'), plot_options) </DeepExtract>
ActEV_Scorer
positive
def signed_angle_between(v1, v2): """ Returns the angle in radians between vectors 'v1' and 'v2':: Taken from https://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python/13849249#13849249. """ <DeepExtract> v1_u = v1 / np.linalg.norm(v1) </DeepExtract> <DeepExtract> v2_u = v2 / np.linalg.norm(v2) </DeepExtract> minor = np.linalg.det(np.stack((v1_u[-2:], v2_u[-2:]))) angle = np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)) if round(angle, 8) == 0: return angle if minor == 0: raise NotImplementedError('Too odd vectors =(') return np.sign(minor) * angle
def signed_angle_between(v1, v2): """ Returns the angle in radians between vectors 'v1' and 'v2':: Taken from https://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python/13849249#13849249. """ v1_u = v1 / np.linalg.norm(v1) v2_u = v2 / np.linalg.norm(v2) minor = np.linalg.det(np.stack((v1_u[-2:], v2_u[-2:]))) angle = np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)) if round(angle, 8) == 0: return angle if minor == 0: raise NotImplementedError('Too odd vectors =(') return np.sign(minor) * angle
cordial-sync
positive
def __init__(self, vocab_file, do_lower_case=True): <DeepExtract> vocab = collections.OrderedDict() index = 0 with tf.gfile.GFile(vocab_file, 'r') as reader: while True: token = convert_to_unicode(reader.readline()) if not token: break token = token.strip() vocab[token] = index index += 1 self.vocab = vocab </DeepExtract> self.inv_vocab = {v: k for (k, v) in self.vocab.items()} self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case) self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
def __init__(self, vocab_file, do_lower_case=True): vocab = collections.OrderedDict() index = 0 with tf.gfile.GFile(vocab_file, 'r') as reader: while True: token = convert_to_unicode(reader.readline()) if not token: break token = token.strip() vocab[token] = index index += 1 self.vocab = vocab self.inv_vocab = {v: k for (k, v) in self.vocab.items()} self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case) self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
bluebert
positive