code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def disconnected(self, client): """Call this method when a client disconnected.""" if client not in self.clients: # already disconnected. return self.clients.remove(client) self._log_disconnected(client) self._close(client)
def function[disconnected, parameter[self, client]]: constant[Call this method when a client disconnected.] if compare[name[client] <ast.NotIn object at 0x7da2590d7190> name[self].clients] begin[:] return[None] call[name[self].clients.remove, parameter[name[client]]] call[name[self]._log_disconnected, parameter[name[client]]] call[name[self]._close, parameter[name[client]]]
keyword[def] identifier[disconnected] ( identifier[self] , identifier[client] ): literal[string] keyword[if] identifier[client] keyword[not] keyword[in] identifier[self] . identifier[clients] : keyword[return] identifier[self] . identifier[clients] . identifier[remove] ( identifier[client] ) identifier[self] . identifier[_log_disconnected] ( identifier[client] ) identifier[self] . identifier[_close] ( identifier[client] )
def disconnected(self, client): """Call this method when a client disconnected.""" if client not in self.clients: # already disconnected. return # depends on [control=['if'], data=[]] self.clients.remove(client) self._log_disconnected(client) self._close(client)
def set_restriction(self, command, user, event_types): """ Adds restriction for given `command`. :param command: command on which the restriction should be set. :type command: str :param user: username for which the restriction applies. :type user: str :param event_types: types of events for which the command is allowed. :type event_types: list """ self.commands_rights[command][user.lower()] = event_types if command not in self.triggers: self.triggers[command] = [EVT_PUBLIC, EVT_PRIVATE, EVT_NOTICE] if not hasattr(self, command): setattr(self, command, lambda msg: self.handle_rights(msg))
def function[set_restriction, parameter[self, command, user, event_types]]: constant[ Adds restriction for given `command`. :param command: command on which the restriction should be set. :type command: str :param user: username for which the restriction applies. :type user: str :param event_types: types of events for which the command is allowed. :type event_types: list ] call[call[name[self].commands_rights][name[command]]][call[name[user].lower, parameter[]]] assign[=] name[event_types] if compare[name[command] <ast.NotIn object at 0x7da2590d7190> name[self].triggers] begin[:] call[name[self].triggers][name[command]] assign[=] list[[<ast.Name object at 0x7da20c6ab310>, <ast.Name object at 0x7da20c6abd00>, <ast.Name object at 0x7da20c6a84f0>]] if <ast.UnaryOp object at 0x7da20c6a9630> begin[:] call[name[setattr], parameter[name[self], name[command], <ast.Lambda object at 0x7da20c6a8580>]]
keyword[def] identifier[set_restriction] ( identifier[self] , identifier[command] , identifier[user] , identifier[event_types] ): literal[string] identifier[self] . identifier[commands_rights] [ identifier[command] ][ identifier[user] . identifier[lower] ()]= identifier[event_types] keyword[if] identifier[command] keyword[not] keyword[in] identifier[self] . identifier[triggers] : identifier[self] . identifier[triggers] [ identifier[command] ]=[ identifier[EVT_PUBLIC] , identifier[EVT_PRIVATE] , identifier[EVT_NOTICE] ] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[command] ): identifier[setattr] ( identifier[self] , identifier[command] , keyword[lambda] identifier[msg] : identifier[self] . identifier[handle_rights] ( identifier[msg] ))
def set_restriction(self, command, user, event_types): """ Adds restriction for given `command`. :param command: command on which the restriction should be set. :type command: str :param user: username for which the restriction applies. :type user: str :param event_types: types of events for which the command is allowed. :type event_types: list """ self.commands_rights[command][user.lower()] = event_types if command not in self.triggers: self.triggers[command] = [EVT_PUBLIC, EVT_PRIVATE, EVT_NOTICE] # depends on [control=['if'], data=['command']] if not hasattr(self, command): setattr(self, command, lambda msg: self.handle_rights(msg)) # depends on [control=['if'], data=[]]
def use_active_sequence_rule_enabler_view(self): """Pass through to provider SequenceRuleEnablerLookupSession.use_active_sequence_rule_enabler_view""" self._operable_views['sequence_rule_enabler'] = ACTIVE # self._get_provider_session('sequence_rule_enabler_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_active_sequence_rule_enabler_view() except AttributeError: pass
def function[use_active_sequence_rule_enabler_view, parameter[self]]: constant[Pass through to provider SequenceRuleEnablerLookupSession.use_active_sequence_rule_enabler_view] call[name[self]._operable_views][constant[sequence_rule_enabler]] assign[=] name[ACTIVE] for taget[name[session]] in starred[call[name[self]._get_provider_sessions, parameter[]]] begin[:] <ast.Try object at 0x7da204961d80>
keyword[def] identifier[use_active_sequence_rule_enabler_view] ( identifier[self] ): literal[string] identifier[self] . identifier[_operable_views] [ literal[string] ]= identifier[ACTIVE] keyword[for] identifier[session] keyword[in] identifier[self] . identifier[_get_provider_sessions] (): keyword[try] : identifier[session] . identifier[use_active_sequence_rule_enabler_view] () keyword[except] identifier[AttributeError] : keyword[pass]
def use_active_sequence_rule_enabler_view(self): """Pass through to provider SequenceRuleEnablerLookupSession.use_active_sequence_rule_enabler_view""" self._operable_views['sequence_rule_enabler'] = ACTIVE # self._get_provider_session('sequence_rule_enabler_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_active_sequence_rule_enabler_view() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['session']]
def validate_model(cursor, model): """Validates the model using a series of checks on bits of the data.""" # Check the license is one valid for publication. _validate_license(model) _validate_roles(model) # Other required metadata includes: title, summary required_metadata = ('title', 'summary',) for metadata_key in required_metadata: if model.metadata.get(metadata_key) in [None, '', []]: raise exceptions.MissingRequiredMetadata(metadata_key) # Ensure that derived-from values are either None # or point at a live record in the archive. _validate_derived_from(cursor, model) # FIXME Valid language code? # Are the given 'subjects' _validate_subjects(cursor, model)
def function[validate_model, parameter[cursor, model]]: constant[Validates the model using a series of checks on bits of the data.] call[name[_validate_license], parameter[name[model]]] call[name[_validate_roles], parameter[name[model]]] variable[required_metadata] assign[=] tuple[[<ast.Constant object at 0x7da1b00d91b0>, <ast.Constant object at 0x7da1b00db310>]] for taget[name[metadata_key]] in starred[name[required_metadata]] begin[:] if compare[call[name[model].metadata.get, parameter[name[metadata_key]]] in list[[<ast.Constant object at 0x7da1b00b5c90>, <ast.Constant object at 0x7da1b00b5450>, <ast.List object at 0x7da1b00b6140>]]] begin[:] <ast.Raise object at 0x7da1b00b7880> call[name[_validate_derived_from], parameter[name[cursor], name[model]]] call[name[_validate_subjects], parameter[name[cursor], name[model]]]
keyword[def] identifier[validate_model] ( identifier[cursor] , identifier[model] ): literal[string] identifier[_validate_license] ( identifier[model] ) identifier[_validate_roles] ( identifier[model] ) identifier[required_metadata] =( literal[string] , literal[string] ,) keyword[for] identifier[metadata_key] keyword[in] identifier[required_metadata] : keyword[if] identifier[model] . identifier[metadata] . identifier[get] ( identifier[metadata_key] ) keyword[in] [ keyword[None] , literal[string] ,[]]: keyword[raise] identifier[exceptions] . identifier[MissingRequiredMetadata] ( identifier[metadata_key] ) identifier[_validate_derived_from] ( identifier[cursor] , identifier[model] ) identifier[_validate_subjects] ( identifier[cursor] , identifier[model] )
def validate_model(cursor, model): """Validates the model using a series of checks on bits of the data.""" # Check the license is one valid for publication. _validate_license(model) _validate_roles(model) # Other required metadata includes: title, summary required_metadata = ('title', 'summary') for metadata_key in required_metadata: if model.metadata.get(metadata_key) in [None, '', []]: raise exceptions.MissingRequiredMetadata(metadata_key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['metadata_key']] # Ensure that derived-from values are either None # or point at a live record in the archive. _validate_derived_from(cursor, model) # FIXME Valid language code? # Are the given 'subjects' _validate_subjects(cursor, model)
def get_template_id(template_name, auth, url): """ Helper function takes str input of folder name and returns str numerical id of the folder. :param folder_name: str name of the folder :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: str numerical id of the folder :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.icc import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> file_id = get_template_id('CW7SNMP.cfg', auth.creds, auth.url) >>> assert type(file_id) is str """ object_list = get_cfg_template(auth=auth, url=url) for object in object_list: if object['confFileName'] == template_name: return object['confFileId'] return "template not found"
def function[get_template_id, parameter[template_name, auth, url]]: constant[ Helper function takes str input of folder name and returns str numerical id of the folder. :param folder_name: str name of the folder :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: str numerical id of the folder :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.icc import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> file_id = get_template_id('CW7SNMP.cfg', auth.creds, auth.url) >>> assert type(file_id) is str ] variable[object_list] assign[=] call[name[get_cfg_template], parameter[]] for taget[name[object]] in starred[name[object_list]] begin[:] if compare[call[name[object]][constant[confFileName]] equal[==] name[template_name]] begin[:] return[call[name[object]][constant[confFileId]]] return[constant[template not found]]
keyword[def] identifier[get_template_id] ( identifier[template_name] , identifier[auth] , identifier[url] ): literal[string] identifier[object_list] = identifier[get_cfg_template] ( identifier[auth] = identifier[auth] , identifier[url] = identifier[url] ) keyword[for] identifier[object] keyword[in] identifier[object_list] : keyword[if] identifier[object] [ literal[string] ]== identifier[template_name] : keyword[return] identifier[object] [ literal[string] ] keyword[return] literal[string]
def get_template_id(template_name, auth, url): """ Helper function takes str input of folder name and returns str numerical id of the folder. :param folder_name: str name of the folder :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: str numerical id of the folder :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.icc import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> file_id = get_template_id('CW7SNMP.cfg', auth.creds, auth.url) >>> assert type(file_id) is str """ object_list = get_cfg_template(auth=auth, url=url) for object in object_list: if object['confFileName'] == template_name: return object['confFileId'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['object']] return 'template not found'
def StoreResults(self, responses): """Stores the responses.""" client_id = responses.request.client_id if responses.success: logging.info("Client %s has a file %s.", client_id, self.args.filename) else: logging.info("Client %s has no file %s.", client_id, self.args.filename) self.MarkClientDone(client_id)
def function[StoreResults, parameter[self, responses]]: constant[Stores the responses.] variable[client_id] assign[=] name[responses].request.client_id if name[responses].success begin[:] call[name[logging].info, parameter[constant[Client %s has a file %s.], name[client_id], name[self].args.filename]] call[name[self].MarkClientDone, parameter[name[client_id]]]
keyword[def] identifier[StoreResults] ( identifier[self] , identifier[responses] ): literal[string] identifier[client_id] = identifier[responses] . identifier[request] . identifier[client_id] keyword[if] identifier[responses] . identifier[success] : identifier[logging] . identifier[info] ( literal[string] , identifier[client_id] , identifier[self] . identifier[args] . identifier[filename] ) keyword[else] : identifier[logging] . identifier[info] ( literal[string] , identifier[client_id] , identifier[self] . identifier[args] . identifier[filename] ) identifier[self] . identifier[MarkClientDone] ( identifier[client_id] )
def StoreResults(self, responses): """Stores the responses.""" client_id = responses.request.client_id if responses.success: logging.info('Client %s has a file %s.', client_id, self.args.filename) # depends on [control=['if'], data=[]] else: logging.info('Client %s has no file %s.', client_id, self.args.filename) self.MarkClientDone(client_id)
def get(self, request, *args, **kwargs): """ Return a :class:`.django.http.JsonResponse`. Example:: { 'results': [ { 'text': "foo", 'id': 123 } ], 'more': true } """ self.widget = self.get_widget_or_404() self.term = kwargs.get('term', request.GET.get('term', '')) self.object_list = self.get_queryset() context = self.get_context_data() return JsonResponse({ 'results': [ { 'text': self.widget.label_from_instance(obj), 'id': obj.pk, } for obj in context['object_list'] ], 'more': context['page_obj'].has_next() })
def function[get, parameter[self, request]]: constant[ Return a :class:`.django.http.JsonResponse`. Example:: { 'results': [ { 'text': "foo", 'id': 123 } ], 'more': true } ] name[self].widget assign[=] call[name[self].get_widget_or_404, parameter[]] name[self].term assign[=] call[name[kwargs].get, parameter[constant[term], call[name[request].GET.get, parameter[constant[term], constant[]]]]] name[self].object_list assign[=] call[name[self].get_queryset, parameter[]] variable[context] assign[=] call[name[self].get_context_data, parameter[]] return[call[name[JsonResponse], parameter[dictionary[[<ast.Constant object at 0x7da1b074a410>, <ast.Constant object at 0x7da1b074b5b0>], [<ast.ListComp object at 0x7da1b074a590>, <ast.Call object at 0x7da1b074b400>]]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[widget] = identifier[self] . identifier[get_widget_or_404] () identifier[self] . identifier[term] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[request] . identifier[GET] . identifier[get] ( literal[string] , literal[string] )) identifier[self] . identifier[object_list] = identifier[self] . identifier[get_queryset] () identifier[context] = identifier[self] . identifier[get_context_data] () keyword[return] identifier[JsonResponse] ({ literal[string] :[ { literal[string] : identifier[self] . identifier[widget] . identifier[label_from_instance] ( identifier[obj] ), literal[string] : identifier[obj] . identifier[pk] , } keyword[for] identifier[obj] keyword[in] identifier[context] [ literal[string] ] ], literal[string] : identifier[context] [ literal[string] ]. identifier[has_next] () })
def get(self, request, *args, **kwargs): """ Return a :class:`.django.http.JsonResponse`. Example:: { 'results': [ { 'text': "foo", 'id': 123 } ], 'more': true } """ self.widget = self.get_widget_or_404() self.term = kwargs.get('term', request.GET.get('term', '')) self.object_list = self.get_queryset() context = self.get_context_data() return JsonResponse({'results': [{'text': self.widget.label_from_instance(obj), 'id': obj.pk} for obj in context['object_list']], 'more': context['page_obj'].has_next()})
def send_rally_point(self, i): '''send rally points from fenceloader''' p = self.rallyloader.rally_point(i) p.target_system = self.target_system p.target_component = self.target_component self.master.mav.send(p)
def function[send_rally_point, parameter[self, i]]: constant[send rally points from fenceloader] variable[p] assign[=] call[name[self].rallyloader.rally_point, parameter[name[i]]] name[p].target_system assign[=] name[self].target_system name[p].target_component assign[=] name[self].target_component call[name[self].master.mav.send, parameter[name[p]]]
keyword[def] identifier[send_rally_point] ( identifier[self] , identifier[i] ): literal[string] identifier[p] = identifier[self] . identifier[rallyloader] . identifier[rally_point] ( identifier[i] ) identifier[p] . identifier[target_system] = identifier[self] . identifier[target_system] identifier[p] . identifier[target_component] = identifier[self] . identifier[target_component] identifier[self] . identifier[master] . identifier[mav] . identifier[send] ( identifier[p] )
def send_rally_point(self, i): """send rally points from fenceloader""" p = self.rallyloader.rally_point(i) p.target_system = self.target_system p.target_component = self.target_component self.master.mav.send(p)
def reconnect(): """ reconnect signals """ post_save.connect(node_created_handler, sender=Node) node_status_changed.connect(node_status_changed_handler) pre_delete.connect(node_deleted_handler, sender=Node)
def function[reconnect, parameter[]]: constant[ reconnect signals ] call[name[post_save].connect, parameter[name[node_created_handler]]] call[name[node_status_changed].connect, parameter[name[node_status_changed_handler]]] call[name[pre_delete].connect, parameter[name[node_deleted_handler]]]
keyword[def] identifier[reconnect] (): literal[string] identifier[post_save] . identifier[connect] ( identifier[node_created_handler] , identifier[sender] = identifier[Node] ) identifier[node_status_changed] . identifier[connect] ( identifier[node_status_changed_handler] ) identifier[pre_delete] . identifier[connect] ( identifier[node_deleted_handler] , identifier[sender] = identifier[Node] )
def reconnect(): """ reconnect signals """ post_save.connect(node_created_handler, sender=Node) node_status_changed.connect(node_status_changed_handler) pre_delete.connect(node_deleted_handler, sender=Node)
def time_sp(self): """ Writing specifies the amount of time the motor will run when using the `run-timed` command. Reading returns the current value. Units are in milliseconds. """ self._time_sp, value = self.get_attr_int(self._time_sp, 'time_sp') return value
def function[time_sp, parameter[self]]: constant[ Writing specifies the amount of time the motor will run when using the `run-timed` command. Reading returns the current value. Units are in milliseconds. ] <ast.Tuple object at 0x7da1b1646e60> assign[=] call[name[self].get_attr_int, parameter[name[self]._time_sp, constant[time_sp]]] return[name[value]]
keyword[def] identifier[time_sp] ( identifier[self] ): literal[string] identifier[self] . identifier[_time_sp] , identifier[value] = identifier[self] . identifier[get_attr_int] ( identifier[self] . identifier[_time_sp] , literal[string] ) keyword[return] identifier[value]
def time_sp(self): """ Writing specifies the amount of time the motor will run when using the `run-timed` command. Reading returns the current value. Units are in milliseconds. """ (self._time_sp, value) = self.get_attr_int(self._time_sp, 'time_sp') return value
def add_path(self, nodes, t=None): """Add a path at time t. Parameters ---------- nodes : iterable container A container of nodes. t : snapshot id (default=None) See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2,3], t=0) """ nlist = list(nodes) interaction = zip(nlist[:-1], nlist[1:]) self.add_interactions_from(interaction, t)
def function[add_path, parameter[self, nodes, t]]: constant[Add a path at time t. Parameters ---------- nodes : iterable container A container of nodes. t : snapshot id (default=None) See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2,3], t=0) ] variable[nlist] assign[=] call[name[list], parameter[name[nodes]]] variable[interaction] assign[=] call[name[zip], parameter[call[name[nlist]][<ast.Slice object at 0x7da1b0569720>], call[name[nlist]][<ast.Slice object at 0x7da1b0569630>]]] call[name[self].add_interactions_from, parameter[name[interaction], name[t]]]
keyword[def] identifier[add_path] ( identifier[self] , identifier[nodes] , identifier[t] = keyword[None] ): literal[string] identifier[nlist] = identifier[list] ( identifier[nodes] ) identifier[interaction] = identifier[zip] ( identifier[nlist] [:- literal[int] ], identifier[nlist] [ literal[int] :]) identifier[self] . identifier[add_interactions_from] ( identifier[interaction] , identifier[t] )
def add_path(self, nodes, t=None): """Add a path at time t. Parameters ---------- nodes : iterable container A container of nodes. t : snapshot id (default=None) See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2,3], t=0) """ nlist = list(nodes) interaction = zip(nlist[:-1], nlist[1:]) self.add_interactions_from(interaction, t)
def _iter_names(self): """ Generate a key/value pair for each name in this table. The key is a (platform_id, name_id) 2-tuple and the value is the unicode text corresponding to that key. """ table_format, count, strings_offset = self._table_header table_bytes = self._table_bytes for idx in range(count): platform_id, name_id, name = self._read_name( table_bytes, idx, strings_offset ) if name is None: continue yield ((platform_id, name_id), name)
def function[_iter_names, parameter[self]]: constant[ Generate a key/value pair for each name in this table. The key is a (platform_id, name_id) 2-tuple and the value is the unicode text corresponding to that key. ] <ast.Tuple object at 0x7da20c991e40> assign[=] name[self]._table_header variable[table_bytes] assign[=] name[self]._table_bytes for taget[name[idx]] in starred[call[name[range], parameter[name[count]]]] begin[:] <ast.Tuple object at 0x7da20c993c40> assign[=] call[name[self]._read_name, parameter[name[table_bytes], name[idx], name[strings_offset]]] if compare[name[name] is constant[None]] begin[:] continue <ast.Yield object at 0x7da20c990c70>
keyword[def] identifier[_iter_names] ( identifier[self] ): literal[string] identifier[table_format] , identifier[count] , identifier[strings_offset] = identifier[self] . identifier[_table_header] identifier[table_bytes] = identifier[self] . identifier[_table_bytes] keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[count] ): identifier[platform_id] , identifier[name_id] , identifier[name] = identifier[self] . identifier[_read_name] ( identifier[table_bytes] , identifier[idx] , identifier[strings_offset] ) keyword[if] identifier[name] keyword[is] keyword[None] : keyword[continue] keyword[yield] (( identifier[platform_id] , identifier[name_id] ), identifier[name] )
def _iter_names(self): """ Generate a key/value pair for each name in this table. The key is a (platform_id, name_id) 2-tuple and the value is the unicode text corresponding to that key. """ (table_format, count, strings_offset) = self._table_header table_bytes = self._table_bytes for idx in range(count): (platform_id, name_id, name) = self._read_name(table_bytes, idx, strings_offset) if name is None: continue # depends on [control=['if'], data=[]] yield ((platform_id, name_id), name) # depends on [control=['for'], data=['idx']]
def generate_minion_id(): ''' Return only first element of the hostname from all possible list. :return: ''' try: ret = salt.utils.stringutils.to_unicode(_generate_minion_id().first()) except TypeError: ret = None return ret or 'localhost'
def function[generate_minion_id, parameter[]]: constant[ Return only first element of the hostname from all possible list. :return: ] <ast.Try object at 0x7da1b2089270> return[<ast.BoolOp object at 0x7da1b208aec0>]
keyword[def] identifier[generate_minion_id] (): literal[string] keyword[try] : identifier[ret] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[_generate_minion_id] (). identifier[first] ()) keyword[except] identifier[TypeError] : identifier[ret] = keyword[None] keyword[return] identifier[ret] keyword[or] literal[string]
def generate_minion_id(): """ Return only first element of the hostname from all possible list. :return: """ try: ret = salt.utils.stringutils.to_unicode(_generate_minion_id().first()) # depends on [control=['try'], data=[]] except TypeError: ret = None # depends on [control=['except'], data=[]] return ret or 'localhost'
def get_next_name(old, fmt='%i'): """Return the next name that numerically follows `old`""" nums = re.findall('\d+', old) if not nums: raise ValueError("Could not get the next name because the old name " "has no numbers in it") num0 = nums[-1] num1 = str(int(num0) + 1) return old[::-1].replace(num0[::-1], num1[::-1], 1)[::-1]
def function[get_next_name, parameter[old, fmt]]: constant[Return the next name that numerically follows `old`] variable[nums] assign[=] call[name[re].findall, parameter[constant[\d+], name[old]]] if <ast.UnaryOp object at 0x7da1b2372140> begin[:] <ast.Raise object at 0x7da1b2372530> variable[num0] assign[=] call[name[nums]][<ast.UnaryOp object at 0x7da2054a7610>] variable[num1] assign[=] call[name[str], parameter[binary_operation[call[name[int], parameter[name[num0]]] + constant[1]]]] return[call[call[call[name[old]][<ast.Slice object at 0x7da2054a7910>].replace, parameter[call[name[num0]][<ast.Slice object at 0x7da2054a54e0>], call[name[num1]][<ast.Slice object at 0x7da2054a6b30>], constant[1]]]][<ast.Slice object at 0x7da2054a5b70>]]
keyword[def] identifier[get_next_name] ( identifier[old] , identifier[fmt] = literal[string] ): literal[string] identifier[nums] = identifier[re] . identifier[findall] ( literal[string] , identifier[old] ) keyword[if] keyword[not] identifier[nums] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[num0] = identifier[nums] [- literal[int] ] identifier[num1] = identifier[str] ( identifier[int] ( identifier[num0] )+ literal[int] ) keyword[return] identifier[old] [::- literal[int] ]. identifier[replace] ( identifier[num0] [::- literal[int] ], identifier[num1] [::- literal[int] ], literal[int] )[::- literal[int] ]
def get_next_name(old, fmt='%i'): """Return the next name that numerically follows `old`""" nums = re.findall('\\d+', old) if not nums: raise ValueError('Could not get the next name because the old name has no numbers in it') # depends on [control=['if'], data=[]] num0 = nums[-1] num1 = str(int(num0) + 1) return old[::-1].replace(num0[::-1], num1[::-1], 1)[::-1]
def where_earliest(cls, user_id): """ Get earilest session by created_at timestamp """ return cls.query.filter_by(user_id=user_id)\ .order_by(cls.created_at.asc()).first()
def function[where_earliest, parameter[cls, user_id]]: constant[ Get earilest session by created_at timestamp ] return[call[call[call[name[cls].query.filter_by, parameter[]].order_by, parameter[call[name[cls].created_at.asc, parameter[]]]].first, parameter[]]]
keyword[def] identifier[where_earliest] ( identifier[cls] , identifier[user_id] ): literal[string] keyword[return] identifier[cls] . identifier[query] . identifier[filter_by] ( identifier[user_id] = identifier[user_id] ). identifier[order_by] ( identifier[cls] . identifier[created_at] . identifier[asc] ()). identifier[first] ()
def where_earliest(cls, user_id): """ Get earilest session by created_at timestamp """ return cls.query.filter_by(user_id=user_id).order_by(cls.created_at.asc()).first()
def get_brightness(self, refresh=False): """Get dimmer brightness. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions. Converts the Vera level property for dimmable lights from a percentage to the 0 - 255 scale used by HA. """ if refresh: self.refresh() brightness = 0 percent = self.level if percent > 0: brightness = round(percent * 2.55) return int(brightness)
def function[get_brightness, parameter[self, refresh]]: constant[Get dimmer brightness. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions. Converts the Vera level property for dimmable lights from a percentage to the 0 - 255 scale used by HA. ] if name[refresh] begin[:] call[name[self].refresh, parameter[]] variable[brightness] assign[=] constant[0] variable[percent] assign[=] name[self].level if compare[name[percent] greater[>] constant[0]] begin[:] variable[brightness] assign[=] call[name[round], parameter[binary_operation[name[percent] * constant[2.55]]]] return[call[name[int], parameter[name[brightness]]]]
keyword[def] identifier[get_brightness] ( identifier[self] , identifier[refresh] = keyword[False] ): literal[string] keyword[if] identifier[refresh] : identifier[self] . identifier[refresh] () identifier[brightness] = literal[int] identifier[percent] = identifier[self] . identifier[level] keyword[if] identifier[percent] > literal[int] : identifier[brightness] = identifier[round] ( identifier[percent] * literal[int] ) keyword[return] identifier[int] ( identifier[brightness] )
def get_brightness(self, refresh=False): """Get dimmer brightness. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions. Converts the Vera level property for dimmable lights from a percentage to the 0 - 255 scale used by HA. """ if refresh: self.refresh() # depends on [control=['if'], data=[]] brightness = 0 percent = self.level if percent > 0: brightness = round(percent * 2.55) # depends on [control=['if'], data=['percent']] return int(brightness)
def vb_get_network_addresses(machine_name=None, machine=None, wait_for_pattern=None): ''' TODO distinguish between private and public addresses A valid machine_name or a machine is needed to make this work! !!! Guest prerequisite: GuestAddition !!! Thanks to Shrikant Havale for the StackOverflow answer http://stackoverflow.com/a/29335390 More information on guest properties: https://www.virtualbox.org/manual/ch04.html#guestadd-guestprops @param machine_name: @type machine_name: str @param machine: @type machine: IMachine @return: All the IPv4 addresses we could get @rtype: str[] ''' if machine_name: machine = vb_get_box().findMachine(machine_name) ip_addresses = [] log.debug("checking for power on:") if machine.state == _virtualboxManager.constants.MachineState_Running: log.debug("got power on:") #wait on an arbitrary named property #for instance use a dhcp client script to set a property via VBoxControl guestproperty set dhcp_done 1 if wait_for_pattern and not machine.getGuestPropertyValue(wait_for_pattern): log.debug("waiting for pattern:%s:", wait_for_pattern) return None _total_slots = machine.getGuestPropertyValue('/VirtualBox/GuestInfo/Net/Count') #upon dhcp the net count drops to 0 and it takes some seconds for it to be set again if not _total_slots: log.debug("waiting for net count:%s:", wait_for_pattern) return None try: total_slots = int(_total_slots) for i in range(total_slots): try: address = machine.getGuestPropertyValue('/VirtualBox/GuestInfo/Net/{0}/V4/IP'.format(i)) if address: ip_addresses.append(address) except Exception as e: log.debug(e.message) except ValueError as e: log.debug(e.message) return None log.debug("returning ip_addresses:%s:", ip_addresses) return ip_addresses
def function[vb_get_network_addresses, parameter[machine_name, machine, wait_for_pattern]]: constant[ TODO distinguish between private and public addresses A valid machine_name or a machine is needed to make this work! !!! Guest prerequisite: GuestAddition !!! Thanks to Shrikant Havale for the StackOverflow answer http://stackoverflow.com/a/29335390 More information on guest properties: https://www.virtualbox.org/manual/ch04.html#guestadd-guestprops @param machine_name: @type machine_name: str @param machine: @type machine: IMachine @return: All the IPv4 addresses we could get @rtype: str[] ] if name[machine_name] begin[:] variable[machine] assign[=] call[call[name[vb_get_box], parameter[]].findMachine, parameter[name[machine_name]]] variable[ip_addresses] assign[=] list[[]] call[name[log].debug, parameter[constant[checking for power on:]]] if compare[name[machine].state equal[==] name[_virtualboxManager].constants.MachineState_Running] begin[:] call[name[log].debug, parameter[constant[got power on:]]] if <ast.BoolOp object at 0x7da1b1c49a20> begin[:] call[name[log].debug, parameter[constant[waiting for pattern:%s:], name[wait_for_pattern]]] return[constant[None]] variable[_total_slots] assign[=] call[name[machine].getGuestPropertyValue, parameter[constant[/VirtualBox/GuestInfo/Net/Count]]] if <ast.UnaryOp object at 0x7da1b1c64a90> begin[:] call[name[log].debug, parameter[constant[waiting for net count:%s:], name[wait_for_pattern]]] return[constant[None]] <ast.Try object at 0x7da1b1c679d0> call[name[log].debug, parameter[constant[returning ip_addresses:%s:], name[ip_addresses]]] return[name[ip_addresses]]
keyword[def] identifier[vb_get_network_addresses] ( identifier[machine_name] = keyword[None] , identifier[machine] = keyword[None] , identifier[wait_for_pattern] = keyword[None] ): literal[string] keyword[if] identifier[machine_name] : identifier[machine] = identifier[vb_get_box] (). identifier[findMachine] ( identifier[machine_name] ) identifier[ip_addresses] =[] identifier[log] . identifier[debug] ( literal[string] ) keyword[if] identifier[machine] . identifier[state] == identifier[_virtualboxManager] . identifier[constants] . identifier[MachineState_Running] : identifier[log] . identifier[debug] ( literal[string] ) keyword[if] identifier[wait_for_pattern] keyword[and] keyword[not] identifier[machine] . identifier[getGuestPropertyValue] ( identifier[wait_for_pattern] ): identifier[log] . identifier[debug] ( literal[string] , identifier[wait_for_pattern] ) keyword[return] keyword[None] identifier[_total_slots] = identifier[machine] . identifier[getGuestPropertyValue] ( literal[string] ) keyword[if] keyword[not] identifier[_total_slots] : identifier[log] . identifier[debug] ( literal[string] , identifier[wait_for_pattern] ) keyword[return] keyword[None] keyword[try] : identifier[total_slots] = identifier[int] ( identifier[_total_slots] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[total_slots] ): keyword[try] : identifier[address] = identifier[machine] . identifier[getGuestPropertyValue] ( literal[string] . identifier[format] ( identifier[i] )) keyword[if] identifier[address] : identifier[ip_addresses] . identifier[append] ( identifier[address] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[log] . identifier[debug] ( identifier[e] . identifier[message] ) keyword[except] identifier[ValueError] keyword[as] identifier[e] : identifier[log] . identifier[debug] ( identifier[e] . identifier[message] ) keyword[return] keyword[None] identifier[log] . identifier[debug] ( literal[string] , identifier[ip_addresses] ) keyword[return] identifier[ip_addresses]
def vb_get_network_addresses(machine_name=None, machine=None, wait_for_pattern=None): """ TODO distinguish between private and public addresses A valid machine_name or a machine is needed to make this work! !!! Guest prerequisite: GuestAddition !!! Thanks to Shrikant Havale for the StackOverflow answer http://stackoverflow.com/a/29335390 More information on guest properties: https://www.virtualbox.org/manual/ch04.html#guestadd-guestprops @param machine_name: @type machine_name: str @param machine: @type machine: IMachine @return: All the IPv4 addresses we could get @rtype: str[] """ if machine_name: machine = vb_get_box().findMachine(machine_name) # depends on [control=['if'], data=[]] ip_addresses = [] log.debug('checking for power on:') if machine.state == _virtualboxManager.constants.MachineState_Running: log.debug('got power on:') #wait on an arbitrary named property #for instance use a dhcp client script to set a property via VBoxControl guestproperty set dhcp_done 1 if wait_for_pattern and (not machine.getGuestPropertyValue(wait_for_pattern)): log.debug('waiting for pattern:%s:', wait_for_pattern) return None # depends on [control=['if'], data=[]] _total_slots = machine.getGuestPropertyValue('/VirtualBox/GuestInfo/Net/Count') #upon dhcp the net count drops to 0 and it takes some seconds for it to be set again if not _total_slots: log.debug('waiting for net count:%s:', wait_for_pattern) return None # depends on [control=['if'], data=[]] try: total_slots = int(_total_slots) for i in range(total_slots): try: address = machine.getGuestPropertyValue('/VirtualBox/GuestInfo/Net/{0}/V4/IP'.format(i)) if address: ip_addresses.append(address) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: log.debug(e.message) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['i']] # depends on [control=['try'], data=[]] except ValueError as e: log.debug(e.message) return None # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] log.debug('returning ip_addresses:%s:', ip_addresses) return ip_addresses
def tile_to_quadkey(tile, level): """Transform tile coordinates to a quadkey""" tile_x = tile[0] tile_y = tile[1] quadkey = "" for i in xrange(level): bit = level - i digit = ord('0') mask = 1 << (bit - 1) # if (bit - 1) > 0 else 1 >> (bit - 1) if (tile_x & mask) is not 0: digit += 1 if (tile_y & mask) is not 0: digit += 2 quadkey += chr(digit) return quadkey
def function[tile_to_quadkey, parameter[tile, level]]: constant[Transform tile coordinates to a quadkey] variable[tile_x] assign[=] call[name[tile]][constant[0]] variable[tile_y] assign[=] call[name[tile]][constant[1]] variable[quadkey] assign[=] constant[] for taget[name[i]] in starred[call[name[xrange], parameter[name[level]]]] begin[:] variable[bit] assign[=] binary_operation[name[level] - name[i]] variable[digit] assign[=] call[name[ord], parameter[constant[0]]] variable[mask] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> binary_operation[name[bit] - constant[1]]] if compare[binary_operation[name[tile_x] <ast.BitAnd object at 0x7da2590d6b60> name[mask]] is_not constant[0]] begin[:] <ast.AugAssign object at 0x7da1b254f820> if compare[binary_operation[name[tile_y] <ast.BitAnd object at 0x7da2590d6b60> name[mask]] is_not constant[0]] begin[:] <ast.AugAssign object at 0x7da1b254f370> <ast.AugAssign object at 0x7da1b254ea40> return[name[quadkey]]
keyword[def] identifier[tile_to_quadkey] ( identifier[tile] , identifier[level] ): literal[string] identifier[tile_x] = identifier[tile] [ literal[int] ] identifier[tile_y] = identifier[tile] [ literal[int] ] identifier[quadkey] = literal[string] keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[level] ): identifier[bit] = identifier[level] - identifier[i] identifier[digit] = identifier[ord] ( literal[string] ) identifier[mask] = literal[int] <<( identifier[bit] - literal[int] ) keyword[if] ( identifier[tile_x] & identifier[mask] ) keyword[is] keyword[not] literal[int] : identifier[digit] += literal[int] keyword[if] ( identifier[tile_y] & identifier[mask] ) keyword[is] keyword[not] literal[int] : identifier[digit] += literal[int] identifier[quadkey] += identifier[chr] ( identifier[digit] ) keyword[return] identifier[quadkey]
def tile_to_quadkey(tile, level): """Transform tile coordinates to a quadkey""" tile_x = tile[0] tile_y = tile[1] quadkey = '' for i in xrange(level): bit = level - i digit = ord('0') mask = 1 << bit - 1 # if (bit - 1) > 0 else 1 >> (bit - 1) if tile_x & mask is not 0: digit += 1 # depends on [control=['if'], data=[]] if tile_y & mask is not 0: digit += 2 # depends on [control=['if'], data=[]] quadkey += chr(digit) # depends on [control=['for'], data=['i']] return quadkey
def parse_bind(bind): """Parses a connection string and creates SQL trace metadata""" if isinstance(bind, Connection): engine = bind.engine else: engine = bind m = re.match(r"Engine\((.*?)\)", str(engine)) if m is not None: u = urlparse(m.group(1)) # Add Scheme to uses_netloc or // will be missing from url. uses_netloc.append(u.scheme) safe_url = "" if u.password is None: safe_url = u.geturl() else: # Strip password from URL host_info = u.netloc.rpartition('@')[-1] parts = u._replace(netloc='{}@{}'.format(u.username, host_info)) safe_url = parts.geturl() sql = {} sql['database_type'] = u.scheme sql['url'] = safe_url if u.username is not None: sql['user'] = "{}".format(u.username) return sql
def function[parse_bind, parameter[bind]]: constant[Parses a connection string and creates SQL trace metadata] if call[name[isinstance], parameter[name[bind], name[Connection]]] begin[:] variable[engine] assign[=] name[bind].engine variable[m] assign[=] call[name[re].match, parameter[constant[Engine\((.*?)\)], call[name[str], parameter[name[engine]]]]] if compare[name[m] is_not constant[None]] begin[:] variable[u] assign[=] call[name[urlparse], parameter[call[name[m].group, parameter[constant[1]]]]] call[name[uses_netloc].append, parameter[name[u].scheme]] variable[safe_url] assign[=] constant[] if compare[name[u].password is constant[None]] begin[:] variable[safe_url] assign[=] call[name[u].geturl, parameter[]] variable[sql] assign[=] dictionary[[], []] call[name[sql]][constant[database_type]] assign[=] name[u].scheme call[name[sql]][constant[url]] assign[=] name[safe_url] if compare[name[u].username is_not constant[None]] begin[:] call[name[sql]][constant[user]] assign[=] call[constant[{}].format, parameter[name[u].username]] return[name[sql]]
keyword[def] identifier[parse_bind] ( identifier[bind] ): literal[string] keyword[if] identifier[isinstance] ( identifier[bind] , identifier[Connection] ): identifier[engine] = identifier[bind] . identifier[engine] keyword[else] : identifier[engine] = identifier[bind] identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[str] ( identifier[engine] )) keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] : identifier[u] = identifier[urlparse] ( identifier[m] . identifier[group] ( literal[int] )) identifier[uses_netloc] . identifier[append] ( identifier[u] . identifier[scheme] ) identifier[safe_url] = literal[string] keyword[if] identifier[u] . identifier[password] keyword[is] keyword[None] : identifier[safe_url] = identifier[u] . identifier[geturl] () keyword[else] : identifier[host_info] = identifier[u] . identifier[netloc] . identifier[rpartition] ( literal[string] )[- literal[int] ] identifier[parts] = identifier[u] . identifier[_replace] ( identifier[netloc] = literal[string] . identifier[format] ( identifier[u] . identifier[username] , identifier[host_info] )) identifier[safe_url] = identifier[parts] . identifier[geturl] () identifier[sql] ={} identifier[sql] [ literal[string] ]= identifier[u] . identifier[scheme] identifier[sql] [ literal[string] ]= identifier[safe_url] keyword[if] identifier[u] . identifier[username] keyword[is] keyword[not] keyword[None] : identifier[sql] [ literal[string] ]= literal[string] . identifier[format] ( identifier[u] . identifier[username] ) keyword[return] identifier[sql]
def parse_bind(bind): """Parses a connection string and creates SQL trace metadata""" if isinstance(bind, Connection): engine = bind.engine # depends on [control=['if'], data=[]] else: engine = bind m = re.match('Engine\\((.*?)\\)', str(engine)) if m is not None: u = urlparse(m.group(1)) # Add Scheme to uses_netloc or // will be missing from url. uses_netloc.append(u.scheme) safe_url = '' if u.password is None: safe_url = u.geturl() # depends on [control=['if'], data=[]] else: # Strip password from URL host_info = u.netloc.rpartition('@')[-1] parts = u._replace(netloc='{}@{}'.format(u.username, host_info)) safe_url = parts.geturl() sql = {} sql['database_type'] = u.scheme sql['url'] = safe_url if u.username is not None: sql['user'] = '{}'.format(u.username) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['m']] return sql
def process_placeholder_image(self): """ Process the field's placeholder image. Ensures the placeholder image has been saved to the same storage class as the field in a top level folder with a name specified by settings.VERSATILEIMAGEFIELD_SETTINGS['placeholder_directory_name'] This should be called by the VersatileImageFileDescriptor __get__. If self.placeholder_image_name is already set it just returns right away. """ if self.placeholder_image_name: return placeholder_image_name = None placeholder_image = self.placeholder_image if placeholder_image: if isinstance(placeholder_image, OnStoragePlaceholderImage): name = placeholder_image.path else: name = placeholder_image.image_data.name placeholder_image_name = os.path.join( VERSATILEIMAGEFIELD_PLACEHOLDER_DIRNAME, name ) if not self.storage.exists(placeholder_image_name): self.storage.save( placeholder_image_name, placeholder_image.image_data ) self.placeholder_image_name = placeholder_image_name
def function[process_placeholder_image, parameter[self]]: constant[ Process the field's placeholder image. Ensures the placeholder image has been saved to the same storage class as the field in a top level folder with a name specified by settings.VERSATILEIMAGEFIELD_SETTINGS['placeholder_directory_name'] This should be called by the VersatileImageFileDescriptor __get__. If self.placeholder_image_name is already set it just returns right away. ] if name[self].placeholder_image_name begin[:] return[None] variable[placeholder_image_name] assign[=] constant[None] variable[placeholder_image] assign[=] name[self].placeholder_image if name[placeholder_image] begin[:] if call[name[isinstance], parameter[name[placeholder_image], name[OnStoragePlaceholderImage]]] begin[:] variable[name] assign[=] name[placeholder_image].path variable[placeholder_image_name] assign[=] call[name[os].path.join, parameter[name[VERSATILEIMAGEFIELD_PLACEHOLDER_DIRNAME], name[name]]] if <ast.UnaryOp object at 0x7da1b26ae3e0> begin[:] call[name[self].storage.save, parameter[name[placeholder_image_name], name[placeholder_image].image_data]] name[self].placeholder_image_name assign[=] name[placeholder_image_name]
keyword[def] identifier[process_placeholder_image] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[placeholder_image_name] : keyword[return] identifier[placeholder_image_name] = keyword[None] identifier[placeholder_image] = identifier[self] . identifier[placeholder_image] keyword[if] identifier[placeholder_image] : keyword[if] identifier[isinstance] ( identifier[placeholder_image] , identifier[OnStoragePlaceholderImage] ): identifier[name] = identifier[placeholder_image] . identifier[path] keyword[else] : identifier[name] = identifier[placeholder_image] . identifier[image_data] . identifier[name] identifier[placeholder_image_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[VERSATILEIMAGEFIELD_PLACEHOLDER_DIRNAME] , identifier[name] ) keyword[if] keyword[not] identifier[self] . identifier[storage] . identifier[exists] ( identifier[placeholder_image_name] ): identifier[self] . identifier[storage] . identifier[save] ( identifier[placeholder_image_name] , identifier[placeholder_image] . identifier[image_data] ) identifier[self] . identifier[placeholder_image_name] = identifier[placeholder_image_name]
def process_placeholder_image(self): """ Process the field's placeholder image. Ensures the placeholder image has been saved to the same storage class as the field in a top level folder with a name specified by settings.VERSATILEIMAGEFIELD_SETTINGS['placeholder_directory_name'] This should be called by the VersatileImageFileDescriptor __get__. If self.placeholder_image_name is already set it just returns right away. """ if self.placeholder_image_name: return # depends on [control=['if'], data=[]] placeholder_image_name = None placeholder_image = self.placeholder_image if placeholder_image: if isinstance(placeholder_image, OnStoragePlaceholderImage): name = placeholder_image.path # depends on [control=['if'], data=[]] else: name = placeholder_image.image_data.name placeholder_image_name = os.path.join(VERSATILEIMAGEFIELD_PLACEHOLDER_DIRNAME, name) if not self.storage.exists(placeholder_image_name): self.storage.save(placeholder_image_name, placeholder_image.image_data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.placeholder_image_name = placeholder_image_name
def compute_err_score(true_positives, n_ref, n_est): """Compute error score metrics. Parameters ---------- true_positives : np.ndarray Array containing the number of true positives at each time point. n_ref : np.ndarray Array containing the number of reference frequencies at each time point. n_est : np.ndarray Array containing the number of estimate frequencies at each time point. Returns ------- e_sub : float Substitution error e_miss : float Miss error e_fa : float False alarm error e_tot : float Total error """ n_ref_sum = float(n_ref.sum()) if n_ref_sum == 0: warnings.warn("Reference frequencies are all empty.") return 0., 0., 0., 0. # Substitution error e_sub = (np.min([n_ref, n_est], axis=0) - true_positives).sum()/n_ref_sum # compute the max of (n_ref - n_est) and 0 e_miss_numerator = n_ref - n_est e_miss_numerator[e_miss_numerator < 0] = 0 # Miss error e_miss = e_miss_numerator.sum()/n_ref_sum # compute the max of (n_est - n_ref) and 0 e_fa_numerator = n_est - n_ref e_fa_numerator[e_fa_numerator < 0] = 0 # False alarm error e_fa = e_fa_numerator.sum()/n_ref_sum # total error e_tot = (np.max([n_ref, n_est], axis=0) - true_positives).sum()/n_ref_sum return e_sub, e_miss, e_fa, e_tot
def function[compute_err_score, parameter[true_positives, n_ref, n_est]]: constant[Compute error score metrics. Parameters ---------- true_positives : np.ndarray Array containing the number of true positives at each time point. n_ref : np.ndarray Array containing the number of reference frequencies at each time point. n_est : np.ndarray Array containing the number of estimate frequencies at each time point. Returns ------- e_sub : float Substitution error e_miss : float Miss error e_fa : float False alarm error e_tot : float Total error ] variable[n_ref_sum] assign[=] call[name[float], parameter[call[name[n_ref].sum, parameter[]]]] if compare[name[n_ref_sum] equal[==] constant[0]] begin[:] call[name[warnings].warn, parameter[constant[Reference frequencies are all empty.]]] return[tuple[[<ast.Constant object at 0x7da1b0ff2440>, <ast.Constant object at 0x7da1b0ff3490>, <ast.Constant object at 0x7da1b0ff3e50>, <ast.Constant object at 0x7da1b0ff1a50>]]] variable[e_sub] assign[=] binary_operation[call[binary_operation[call[name[np].min, parameter[list[[<ast.Name object at 0x7da1b0ff31c0>, <ast.Name object at 0x7da1b0ff0f10>]]]] - name[true_positives]].sum, parameter[]] / name[n_ref_sum]] variable[e_miss_numerator] assign[=] binary_operation[name[n_ref] - name[n_est]] call[name[e_miss_numerator]][compare[name[e_miss_numerator] less[<] constant[0]]] assign[=] constant[0] variable[e_miss] assign[=] binary_operation[call[name[e_miss_numerator].sum, parameter[]] / name[n_ref_sum]] variable[e_fa_numerator] assign[=] binary_operation[name[n_est] - name[n_ref]] call[name[e_fa_numerator]][compare[name[e_fa_numerator] less[<] constant[0]]] assign[=] constant[0] variable[e_fa] assign[=] binary_operation[call[name[e_fa_numerator].sum, parameter[]] / name[n_ref_sum]] variable[e_tot] assign[=] binary_operation[call[binary_operation[call[name[np].max, parameter[list[[<ast.Name object at 0x7da1b0fcce80>, <ast.Name object at 0x7da1b0fcebc0>]]]] - name[true_positives]].sum, parameter[]] / name[n_ref_sum]] return[tuple[[<ast.Name object at 0x7da1b0fcd030>, <ast.Name object at 0x7da1b0fce950>, <ast.Name object at 0x7da1b0fcc190>, <ast.Name object at 0x7da1b0fcf8b0>]]]
keyword[def] identifier[compute_err_score] ( identifier[true_positives] , identifier[n_ref] , identifier[n_est] ): literal[string] identifier[n_ref_sum] = identifier[float] ( identifier[n_ref] . identifier[sum] ()) keyword[if] identifier[n_ref_sum] == literal[int] : identifier[warnings] . identifier[warn] ( literal[string] ) keyword[return] literal[int] , literal[int] , literal[int] , literal[int] identifier[e_sub] =( identifier[np] . identifier[min] ([ identifier[n_ref] , identifier[n_est] ], identifier[axis] = literal[int] )- identifier[true_positives] ). identifier[sum] ()/ identifier[n_ref_sum] identifier[e_miss_numerator] = identifier[n_ref] - identifier[n_est] identifier[e_miss_numerator] [ identifier[e_miss_numerator] < literal[int] ]= literal[int] identifier[e_miss] = identifier[e_miss_numerator] . identifier[sum] ()/ identifier[n_ref_sum] identifier[e_fa_numerator] = identifier[n_est] - identifier[n_ref] identifier[e_fa_numerator] [ identifier[e_fa_numerator] < literal[int] ]= literal[int] identifier[e_fa] = identifier[e_fa_numerator] . identifier[sum] ()/ identifier[n_ref_sum] identifier[e_tot] =( identifier[np] . identifier[max] ([ identifier[n_ref] , identifier[n_est] ], identifier[axis] = literal[int] )- identifier[true_positives] ). identifier[sum] ()/ identifier[n_ref_sum] keyword[return] identifier[e_sub] , identifier[e_miss] , identifier[e_fa] , identifier[e_tot]
def compute_err_score(true_positives, n_ref, n_est): """Compute error score metrics. Parameters ---------- true_positives : np.ndarray Array containing the number of true positives at each time point. n_ref : np.ndarray Array containing the number of reference frequencies at each time point. n_est : np.ndarray Array containing the number of estimate frequencies at each time point. Returns ------- e_sub : float Substitution error e_miss : float Miss error e_fa : float False alarm error e_tot : float Total error """ n_ref_sum = float(n_ref.sum()) if n_ref_sum == 0: warnings.warn('Reference frequencies are all empty.') return (0.0, 0.0, 0.0, 0.0) # depends on [control=['if'], data=[]] # Substitution error e_sub = (np.min([n_ref, n_est], axis=0) - true_positives).sum() / n_ref_sum # compute the max of (n_ref - n_est) and 0 e_miss_numerator = n_ref - n_est e_miss_numerator[e_miss_numerator < 0] = 0 # Miss error e_miss = e_miss_numerator.sum() / n_ref_sum # compute the max of (n_est - n_ref) and 0 e_fa_numerator = n_est - n_ref e_fa_numerator[e_fa_numerator < 0] = 0 # False alarm error e_fa = e_fa_numerator.sum() / n_ref_sum # total error e_tot = (np.max([n_ref, n_est], axis=0) - true_positives).sum() / n_ref_sum return (e_sub, e_miss, e_fa, e_tot)
def setCheckedDetails(self, checked): """Sets which components are checked :param checked: dictionary of stimtype:list<attribute names> for which components and their attributes should be checked :type checked: dict """ layout = self.layout() for i in range(layout.count()): w = layout.itemAt(i).widget() if w.stimType in checked: w.setChecked(checked[w.stimType])
def function[setCheckedDetails, parameter[self, checked]]: constant[Sets which components are checked :param checked: dictionary of stimtype:list<attribute names> for which components and their attributes should be checked :type checked: dict ] variable[layout] assign[=] call[name[self].layout, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[layout].count, parameter[]]]]] begin[:] variable[w] assign[=] call[call[name[layout].itemAt, parameter[name[i]]].widget, parameter[]] if compare[name[w].stimType in name[checked]] begin[:] call[name[w].setChecked, parameter[call[name[checked]][name[w].stimType]]]
keyword[def] identifier[setCheckedDetails] ( identifier[self] , identifier[checked] ): literal[string] identifier[layout] = identifier[self] . identifier[layout] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[layout] . identifier[count] ()): identifier[w] = identifier[layout] . identifier[itemAt] ( identifier[i] ). identifier[widget] () keyword[if] identifier[w] . identifier[stimType] keyword[in] identifier[checked] : identifier[w] . identifier[setChecked] ( identifier[checked] [ identifier[w] . identifier[stimType] ])
def setCheckedDetails(self, checked): """Sets which components are checked :param checked: dictionary of stimtype:list<attribute names> for which components and their attributes should be checked :type checked: dict """ layout = self.layout() for i in range(layout.count()): w = layout.itemAt(i).widget() if w.stimType in checked: w.setChecked(checked[w.stimType]) # depends on [control=['if'], data=['checked']] # depends on [control=['for'], data=['i']]
def name(self): """Class name.""" return ffi.string(lib.EnvGetDefclassName(self._env, self._cls)).decode()
def function[name, parameter[self]]: constant[Class name.] return[call[call[name[ffi].string, parameter[call[name[lib].EnvGetDefclassName, parameter[name[self]._env, name[self]._cls]]]].decode, parameter[]]]
keyword[def] identifier[name] ( identifier[self] ): literal[string] keyword[return] identifier[ffi] . identifier[string] ( identifier[lib] . identifier[EnvGetDefclassName] ( identifier[self] . identifier[_env] , identifier[self] . identifier[_cls] )). identifier[decode] ()
def name(self): """Class name.""" return ffi.string(lib.EnvGetDefclassName(self._env, self._cls)).decode()
def registerWebAdaptor(self, webAdaptorURL, machineName, machineIP, isAdminEnabled, description, httpPort, httpsPort): """ You can use this operation to register the ArcGIS Web Adaptor from your ArcGIS Server. By registering the Web Adaptor with the server, you are telling the server to trust requests (including security credentials) that have been submitted through this Web Adaptor. Inputs: webAdaptorURL - The URL of the web adaptor through which ArcGIS resources will be accessed. machineName - The machine name on which the web adaptor is installed. machineIP - The local IP address of the machine on which the web adaptor is installed. isAdminEnabled - A boolean flag to indicate if administrative access is allowed through the web adaptor. The default is false. description - An optional description for the web adaptor. httpPort - An optional parameter to indicate the HTTP port of the web adaptor. If this parameter is not provided, it is derived from the URL. httpsPort - An optional parameter to indicate the HTTPS port of the web adaptor. If this parameter is not provided, it is derived from the URL. """ url = self._url + "/webadaptors/register" params = { "f" : "json", "webAdaptorURL" : webAdaptorURL, "machineName" : machineName, "machineIP" : machineIP, "isAdminEnabled" : isAdminEnabled, "description" : description, "httpPort" : httpPort, "httpsPort" : httpsPort } return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
def function[registerWebAdaptor, parameter[self, webAdaptorURL, machineName, machineIP, isAdminEnabled, description, httpPort, httpsPort]]: constant[ You can use this operation to register the ArcGIS Web Adaptor from your ArcGIS Server. By registering the Web Adaptor with the server, you are telling the server to trust requests (including security credentials) that have been submitted through this Web Adaptor. Inputs: webAdaptorURL - The URL of the web adaptor through which ArcGIS resources will be accessed. machineName - The machine name on which the web adaptor is installed. machineIP - The local IP address of the machine on which the web adaptor is installed. isAdminEnabled - A boolean flag to indicate if administrative access is allowed through the web adaptor. The default is false. description - An optional description for the web adaptor. httpPort - An optional parameter to indicate the HTTP port of the web adaptor. If this parameter is not provided, it is derived from the URL. httpsPort - An optional parameter to indicate the HTTPS port of the web adaptor. If this parameter is not provided, it is derived from the URL. ] variable[url] assign[=] binary_operation[name[self]._url + constant[/webadaptors/register]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b124e7a0>, <ast.Constant object at 0x7da1b124d3f0>, <ast.Constant object at 0x7da1b124d720>, <ast.Constant object at 0x7da1b124d360>, <ast.Constant object at 0x7da1b124ff70>, <ast.Constant object at 0x7da1b124fac0>, <ast.Constant object at 0x7da1b124c700>, <ast.Constant object at 0x7da1b124c580>], [<ast.Constant object at 0x7da1b124c250>, <ast.Name object at 0x7da1b124c280>, <ast.Name object at 0x7da1b124c130>, <ast.Name object at 0x7da1b124c310>, <ast.Name object at 0x7da1b124d540>, <ast.Name object at 0x7da1b124f610>, <ast.Name object at 0x7da1b124ec80>, <ast.Name object at 0x7da1b124f070>]] return[call[name[self]._post, parameter[]]]
keyword[def] identifier[registerWebAdaptor] ( identifier[self] , identifier[webAdaptorURL] , identifier[machineName] , identifier[machineIP] , identifier[isAdminEnabled] , identifier[description] , identifier[httpPort] , identifier[httpsPort] ): literal[string] identifier[url] = identifier[self] . identifier[_url] + literal[string] identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[webAdaptorURL] , literal[string] : identifier[machineName] , literal[string] : identifier[machineIP] , literal[string] : identifier[isAdminEnabled] , literal[string] : identifier[description] , literal[string] : identifier[httpPort] , literal[string] : identifier[httpsPort] } keyword[return] identifier[self] . identifier[_post] ( identifier[url] = identifier[url] , identifier[param_dict] = identifier[params] , identifier[securityHandler] = identifier[self] . identifier[_securityHandler] , identifier[proxy_port] = identifier[self] . identifier[_proxy_port] , identifier[proxy_url] = identifier[self] . identifier[_proxy_url] )
def registerWebAdaptor(self, webAdaptorURL, machineName, machineIP, isAdminEnabled, description, httpPort, httpsPort): """ You can use this operation to register the ArcGIS Web Adaptor from your ArcGIS Server. By registering the Web Adaptor with the server, you are telling the server to trust requests (including security credentials) that have been submitted through this Web Adaptor. Inputs: webAdaptorURL - The URL of the web adaptor through which ArcGIS resources will be accessed. machineName - The machine name on which the web adaptor is installed. machineIP - The local IP address of the machine on which the web adaptor is installed. isAdminEnabled - A boolean flag to indicate if administrative access is allowed through the web adaptor. The default is false. description - An optional description for the web adaptor. httpPort - An optional parameter to indicate the HTTP port of the web adaptor. If this parameter is not provided, it is derived from the URL. httpsPort - An optional parameter to indicate the HTTPS port of the web adaptor. If this parameter is not provided, it is derived from the URL. """ url = self._url + '/webadaptors/register' params = {'f': 'json', 'webAdaptorURL': webAdaptorURL, 'machineName': machineName, 'machineIP': machineIP, 'isAdminEnabled': isAdminEnabled, 'description': description, 'httpPort': httpPort, 'httpsPort': httpsPort} return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
def method_name_exist(self, meth_name): """Check if there is already a meth_name method in the current class It is useful before allowing to rename a method to check name does not already exist. """ methods = self.current_class.get_methods() for m in methods: if m.name == meth_name: return True return False
def function[method_name_exist, parameter[self, meth_name]]: constant[Check if there is already a meth_name method in the current class It is useful before allowing to rename a method to check name does not already exist. ] variable[methods] assign[=] call[name[self].current_class.get_methods, parameter[]] for taget[name[m]] in starred[name[methods]] begin[:] if compare[name[m].name equal[==] name[meth_name]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[method_name_exist] ( identifier[self] , identifier[meth_name] ): literal[string] identifier[methods] = identifier[self] . identifier[current_class] . identifier[get_methods] () keyword[for] identifier[m] keyword[in] identifier[methods] : keyword[if] identifier[m] . identifier[name] == identifier[meth_name] : keyword[return] keyword[True] keyword[return] keyword[False]
def method_name_exist(self, meth_name): """Check if there is already a meth_name method in the current class It is useful before allowing to rename a method to check name does not already exist. """ methods = self.current_class.get_methods() for m in methods: if m.name == meth_name: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] return False
def plot_point(self, x, y, visible=True, color='black', size=5): """ Places a single point on the grid :param x: the x coordinate :param y: the y coordinate :param visible: True if the individual point should be visible :param color: the color of the point :param size: the point size in pixels :return: The absolute coordinates as a tuple """ xp = (self.px_x * (x - self.x_min)) / self.x_tick yp = (self.px_y * (self.y_max - y)) / self.y_tick coord = 50 + xp, 50 + yp if visible: # divide down to an appropriate size size = int(size/2) if int(size/2) > 1 else 1 x, y = coord self.canvas.create_oval( x-size, y-size, x+size, y+size, fill=color ) return coord
def function[plot_point, parameter[self, x, y, visible, color, size]]: constant[ Places a single point on the grid :param x: the x coordinate :param y: the y coordinate :param visible: True if the individual point should be visible :param color: the color of the point :param size: the point size in pixels :return: The absolute coordinates as a tuple ] variable[xp] assign[=] binary_operation[binary_operation[name[self].px_x * binary_operation[name[x] - name[self].x_min]] / name[self].x_tick] variable[yp] assign[=] binary_operation[binary_operation[name[self].px_y * binary_operation[name[self].y_max - name[y]]] / name[self].y_tick] variable[coord] assign[=] tuple[[<ast.BinOp object at 0x7da1b0f38850>, <ast.BinOp object at 0x7da1b0f387f0>]] if name[visible] begin[:] variable[size] assign[=] <ast.IfExp object at 0x7da1b0f398d0> <ast.Tuple object at 0x7da1b0f3b6d0> assign[=] name[coord] call[name[self].canvas.create_oval, parameter[binary_operation[name[x] - name[size]], binary_operation[name[y] - name[size]], binary_operation[name[x] + name[size]], binary_operation[name[y] + name[size]]]] return[name[coord]]
keyword[def] identifier[plot_point] ( identifier[self] , identifier[x] , identifier[y] , identifier[visible] = keyword[True] , identifier[color] = literal[string] , identifier[size] = literal[int] ): literal[string] identifier[xp] =( identifier[self] . identifier[px_x] *( identifier[x] - identifier[self] . identifier[x_min] ))/ identifier[self] . identifier[x_tick] identifier[yp] =( identifier[self] . identifier[px_y] *( identifier[self] . identifier[y_max] - identifier[y] ))/ identifier[self] . identifier[y_tick] identifier[coord] = literal[int] + identifier[xp] , literal[int] + identifier[yp] keyword[if] identifier[visible] : identifier[size] = identifier[int] ( identifier[size] / literal[int] ) keyword[if] identifier[int] ( identifier[size] / literal[int] )> literal[int] keyword[else] literal[int] identifier[x] , identifier[y] = identifier[coord] identifier[self] . identifier[canvas] . identifier[create_oval] ( identifier[x] - identifier[size] , identifier[y] - identifier[size] , identifier[x] + identifier[size] , identifier[y] + identifier[size] , identifier[fill] = identifier[color] ) keyword[return] identifier[coord]
def plot_point(self, x, y, visible=True, color='black', size=5): """ Places a single point on the grid :param x: the x coordinate :param y: the y coordinate :param visible: True if the individual point should be visible :param color: the color of the point :param size: the point size in pixels :return: The absolute coordinates as a tuple """ xp = self.px_x * (x - self.x_min) / self.x_tick yp = self.px_y * (self.y_max - y) / self.y_tick coord = (50 + xp, 50 + yp) if visible: # divide down to an appropriate size size = int(size / 2) if int(size / 2) > 1 else 1 (x, y) = coord self.canvas.create_oval(x - size, y - size, x + size, y + size, fill=color) # depends on [control=['if'], data=[]] return coord
def approximate_size(size, a_kilobyte_is_1024_bytes=True): ''' Humansize.py from Dive into Python3 Mark Pilgrim - http://www.diveintopython3.net/ Copyright (c) 2009, Mark Pilgrim, All rights reserved. Convert a file size to human-readable form. Keyword arguments: size -- file size in bytes a_kilobyte_is_1024_bytes -- if True (default), use multiples of 1024 if False, use multiples of 1000 Returns: string ''' size = float(size) if size < 0: raise ValueError('number must be non-negative') multiple = 1024 if a_kilobyte_is_1024_bytes else 1000 for suffix in SUFFIXES[multiple]: size /= multiple if size < multiple: return '{0:.1f}{1}'.format(size, suffix) raise ValueError('number too large')
def function[approximate_size, parameter[size, a_kilobyte_is_1024_bytes]]: constant[ Humansize.py from Dive into Python3 Mark Pilgrim - http://www.diveintopython3.net/ Copyright (c) 2009, Mark Pilgrim, All rights reserved. Convert a file size to human-readable form. Keyword arguments: size -- file size in bytes a_kilobyte_is_1024_bytes -- if True (default), use multiples of 1024 if False, use multiples of 1000 Returns: string ] variable[size] assign[=] call[name[float], parameter[name[size]]] if compare[name[size] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da20c7cb0d0> variable[multiple] assign[=] <ast.IfExp object at 0x7da20c7ca530> for taget[name[suffix]] in starred[call[name[SUFFIXES]][name[multiple]]] begin[:] <ast.AugAssign object at 0x7da20c7cb940> if compare[name[size] less[<] name[multiple]] begin[:] return[call[constant[{0:.1f}{1}].format, parameter[name[size], name[suffix]]]] <ast.Raise object at 0x7da20c7ca050>
keyword[def] identifier[approximate_size] ( identifier[size] , identifier[a_kilobyte_is_1024_bytes] = keyword[True] ): literal[string] identifier[size] = identifier[float] ( identifier[size] ) keyword[if] identifier[size] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[multiple] = literal[int] keyword[if] identifier[a_kilobyte_is_1024_bytes] keyword[else] literal[int] keyword[for] identifier[suffix] keyword[in] identifier[SUFFIXES] [ identifier[multiple] ]: identifier[size] /= identifier[multiple] keyword[if] identifier[size] < identifier[multiple] : keyword[return] literal[string] . identifier[format] ( identifier[size] , identifier[suffix] ) keyword[raise] identifier[ValueError] ( literal[string] )
def approximate_size(size, a_kilobyte_is_1024_bytes=True): """ Humansize.py from Dive into Python3 Mark Pilgrim - http://www.diveintopython3.net/ Copyright (c) 2009, Mark Pilgrim, All rights reserved. Convert a file size to human-readable form. Keyword arguments: size -- file size in bytes a_kilobyte_is_1024_bytes -- if True (default), use multiples of 1024 if False, use multiples of 1000 Returns: string """ size = float(size) if size < 0: raise ValueError('number must be non-negative') # depends on [control=['if'], data=[]] multiple = 1024 if a_kilobyte_is_1024_bytes else 1000 for suffix in SUFFIXES[multiple]: size /= multiple if size < multiple: return '{0:.1f}{1}'.format(size, suffix) # depends on [control=['if'], data=['size']] # depends on [control=['for'], data=['suffix']] raise ValueError('number too large')
def update_pypsa_storage(pypsa, storages, storages_lines): """ Adds storages and their lines to pypsa representation of the edisgo graph. This function effects the following attributes of the pypsa network: components ('StorageUnit'), storage_units, storage_units_t (p_set, q_set), buses, lines Parameters ----------- pypsa : :pypsa:`pypsa.Network<network>` storages : :obj:`list` List with storages of type :class:`~.grid.components.Storage` to add to pypsa network. storages_lines : :obj:`list` List with lines of type :class:`~.grid.components.Line` that connect storages to the grid. """ bus = {'name': [], 'v_nom': [], 'x': [], 'y': []} line = {'name': [], 'bus0': [], 'bus1': [], 'type': [], 'x': [], 'r': [], 's_nom': [], 'length': []} storage = { 'name': [], 'bus': [], 'p_nom': [], 'state_of_charge_initial': [], 'efficiency_store': [], 'efficiency_dispatch': [], 'standing_loss': []} for s in storages: bus_name = '_'.join(['Bus', repr(s)]) storage['name'].append(repr(s)) storage['bus'].append(bus_name) storage['p_nom'].append(s.nominal_power / 1e3) storage['state_of_charge_initial'].append(s.soc_initial) storage['efficiency_store'].append(s.efficiency_in) storage['efficiency_dispatch'].append(s.efficiency_out) storage['standing_loss'].append(s.standing_loss) bus['name'].append(bus_name) bus['v_nom'].append(s.grid.voltage_nom) bus['x'].append(s.geom.x) bus['y'].append(s.geom.y) omega = 2 * pi * 50 for l in storages_lines: line['name'].append(repr(l)) adj_nodes = l.grid.graph.nodes_from_line(l) if isinstance(l.grid, LVGrid): if isinstance(adj_nodes[0], LVStation): line['bus0'].append( '_'.join(['Bus', adj_nodes[0].__repr__(side='lv')])) else: line['bus0'].append('_'.join(['Bus', repr(adj_nodes[0])])) if isinstance(adj_nodes[1], LVStation): line['bus1'].append( '_'.join(['Bus', adj_nodes[1].__repr__(side='lv')])) else: line['bus1'].append('_'.join(['Bus', repr(adj_nodes[1])])) else: if isinstance(adj_nodes[0], LVStation): line['bus0'].append( '_'.join(['Bus', adj_nodes[0].__repr__(side='mv')])) elif isinstance(adj_nodes[0], MVStation): line['bus0'].append( '_'.join(['Bus', adj_nodes[0].__repr__(side='lv')])) else: line['bus0'].append('_'.join(['Bus', repr(adj_nodes[0])])) if isinstance(adj_nodes[1], LVStation): line['bus1'].append( '_'.join(['Bus', adj_nodes[1].__repr__(side='mv')])) elif isinstance(adj_nodes[1], MVStation): line['bus1'].append( '_'.join(['Bus', adj_nodes[1].__repr__(side='lv')])) else: line['bus1'].append('_'.join(['Bus', repr(adj_nodes[1])])) line['type'].append("") line['x'].append(l.type['L'] * omega / 1e3 * l.length) line['r'].append(l.type['R'] * l.length) line['s_nom'].append( sqrt(3) * l.type['I_max_th'] * l.type['U_n'] / 1e3) line['length'].append(l.length) # import new components to pypsa pypsa.import_components_from_dataframe( pd.DataFrame(bus).set_index('name'), 'Bus') pypsa.import_components_from_dataframe( pd.DataFrame(storage).set_index('name'), 'StorageUnit') pypsa.import_components_from_dataframe( pd.DataFrame(line).set_index('name'), 'Line') # import time series of storages and buses to pypsa timeseries_storage_p = pd.DataFrame() timeseries_storage_q = pd.DataFrame() for s in storages: timeseries_storage_p[repr(s)] = s.pypsa_timeseries('p').loc[ pypsa.storage_units_t.p_set.index] timeseries_storage_q[repr(s)] = s.pypsa_timeseries('q').loc[ pypsa.storage_units_t.q_set.index] import_series_from_dataframe(pypsa, timeseries_storage_p, 'StorageUnit', 'p_set') import_series_from_dataframe(pypsa, timeseries_storage_q, 'StorageUnit', 'q_set')
def function[update_pypsa_storage, parameter[pypsa, storages, storages_lines]]: constant[ Adds storages and their lines to pypsa representation of the edisgo graph. This function effects the following attributes of the pypsa network: components ('StorageUnit'), storage_units, storage_units_t (p_set, q_set), buses, lines Parameters ----------- pypsa : :pypsa:`pypsa.Network<network>` storages : :obj:`list` List with storages of type :class:`~.grid.components.Storage` to add to pypsa network. storages_lines : :obj:`list` List with lines of type :class:`~.grid.components.Line` that connect storages to the grid. ] variable[bus] assign[=] dictionary[[<ast.Constant object at 0x7da1b03fb070>, <ast.Constant object at 0x7da1b03fafe0>, <ast.Constant object at 0x7da1b03fb010>, <ast.Constant object at 0x7da1b03fb340>], [<ast.List object at 0x7da1b03fb8b0>, <ast.List object at 0x7da1b0337bb0>, <ast.List object at 0x7da1b0335030>, <ast.List object at 0x7da1b0337a60>]] variable[line] assign[=] dictionary[[<ast.Constant object at 0x7da1b0337eb0>, <ast.Constant object at 0x7da1b0337850>, <ast.Constant object at 0x7da1b03369e0>, <ast.Constant object at 0x7da1b0334ac0>, <ast.Constant object at 0x7da1b0335e40>, <ast.Constant object at 0x7da1b0335420>, <ast.Constant object at 0x7da1b054a560>, <ast.Constant object at 0x7da1b054ae90>], [<ast.List object at 0x7da1b0548550>, <ast.List object at 0x7da1b0548b80>, <ast.List object at 0x7da1b054bf70>, <ast.List object at 0x7da1b0549870>, <ast.List object at 0x7da1b0548310>, <ast.List object at 0x7da1b0549fc0>, <ast.List object at 0x7da1b054b5e0>, <ast.List object at 0x7da1b05487c0>]] variable[storage] assign[=] dictionary[[<ast.Constant object at 0x7da1b0548370>, <ast.Constant object at 0x7da1b0549390>, <ast.Constant object at 0x7da1b0549480>, <ast.Constant object at 0x7da1b0549b70>, <ast.Constant object at 0x7da1b0549e10>, <ast.Constant object at 0x7da1b054b610>, <ast.Constant object at 0x7da1b0549cf0>], [<ast.List object at 0x7da1b0548eb0>, <ast.List object at 0x7da1b054a650>, <ast.List object at 0x7da1b0549f30>, <ast.List object at 0x7da1b054a140>, <ast.List object at 0x7da1b054a080>, <ast.List object at 0x7da1b054ab90>, <ast.List object at 0x7da1b054a410>]] for taget[name[s]] in starred[name[storages]] begin[:] variable[bus_name] assign[=] call[constant[_].join, parameter[list[[<ast.Constant object at 0x7da1b054b400>, <ast.Call object at 0x7da1b0549120>]]]] call[call[name[storage]][constant[name]].append, parameter[call[name[repr], parameter[name[s]]]]] call[call[name[storage]][constant[bus]].append, parameter[name[bus_name]]] call[call[name[storage]][constant[p_nom]].append, parameter[binary_operation[name[s].nominal_power / constant[1000.0]]]] call[call[name[storage]][constant[state_of_charge_initial]].append, parameter[name[s].soc_initial]] call[call[name[storage]][constant[efficiency_store]].append, parameter[name[s].efficiency_in]] call[call[name[storage]][constant[efficiency_dispatch]].append, parameter[name[s].efficiency_out]] call[call[name[storage]][constant[standing_loss]].append, parameter[name[s].standing_loss]] call[call[name[bus]][constant[name]].append, parameter[name[bus_name]]] call[call[name[bus]][constant[v_nom]].append, parameter[name[s].grid.voltage_nom]] call[call[name[bus]][constant[x]].append, parameter[name[s].geom.x]] call[call[name[bus]][constant[y]].append, parameter[name[s].geom.y]] variable[omega] assign[=] binary_operation[binary_operation[constant[2] * name[pi]] * constant[50]] for taget[name[l]] in starred[name[storages_lines]] begin[:] call[call[name[line]][constant[name]].append, parameter[call[name[repr], parameter[name[l]]]]] variable[adj_nodes] assign[=] call[name[l].grid.graph.nodes_from_line, parameter[name[l]]] if call[name[isinstance], parameter[name[l].grid, name[LVGrid]]] begin[:] if call[name[isinstance], parameter[call[name[adj_nodes]][constant[0]], name[LVStation]]] begin[:] call[call[name[line]][constant[bus0]].append, parameter[call[constant[_].join, parameter[list[[<ast.Constant object at 0x7da1b03529e0>, <ast.Call object at 0x7da1b0352a10>]]]]]] if call[name[isinstance], parameter[call[name[adj_nodes]][constant[1]], name[LVStation]]] begin[:] call[call[name[line]][constant[bus1]].append, parameter[call[constant[_].join, parameter[list[[<ast.Constant object at 0x7da1b03db160>, <ast.Call object at 0x7da1b03db640>]]]]]] call[call[name[line]][constant[type]].append, parameter[constant[]]] call[call[name[line]][constant[x]].append, parameter[binary_operation[binary_operation[binary_operation[call[name[l].type][constant[L]] * name[omega]] / constant[1000.0]] * name[l].length]]] call[call[name[line]][constant[r]].append, parameter[binary_operation[call[name[l].type][constant[R]] * name[l].length]]] call[call[name[line]][constant[s_nom]].append, parameter[binary_operation[binary_operation[binary_operation[call[name[sqrt], parameter[constant[3]]] * call[name[l].type][constant[I_max_th]]] * call[name[l].type][constant[U_n]]] / constant[1000.0]]]] call[call[name[line]][constant[length]].append, parameter[name[l].length]] call[name[pypsa].import_components_from_dataframe, parameter[call[call[name[pd].DataFrame, parameter[name[bus]]].set_index, parameter[constant[name]]], constant[Bus]]] call[name[pypsa].import_components_from_dataframe, parameter[call[call[name[pd].DataFrame, parameter[name[storage]]].set_index, parameter[constant[name]]], constant[StorageUnit]]] call[name[pypsa].import_components_from_dataframe, parameter[call[call[name[pd].DataFrame, parameter[name[line]]].set_index, parameter[constant[name]]], constant[Line]]] variable[timeseries_storage_p] assign[=] call[name[pd].DataFrame, parameter[]] variable[timeseries_storage_q] assign[=] call[name[pd].DataFrame, parameter[]] for taget[name[s]] in starred[name[storages]] begin[:] call[name[timeseries_storage_p]][call[name[repr], parameter[name[s]]]] assign[=] call[call[name[s].pypsa_timeseries, parameter[constant[p]]].loc][name[pypsa].storage_units_t.p_set.index] call[name[timeseries_storage_q]][call[name[repr], parameter[name[s]]]] assign[=] call[call[name[s].pypsa_timeseries, parameter[constant[q]]].loc][name[pypsa].storage_units_t.q_set.index] call[name[import_series_from_dataframe], parameter[name[pypsa], name[timeseries_storage_p], constant[StorageUnit], constant[p_set]]] call[name[import_series_from_dataframe], parameter[name[pypsa], name[timeseries_storage_q], constant[StorageUnit], constant[q_set]]]
keyword[def] identifier[update_pypsa_storage] ( identifier[pypsa] , identifier[storages] , identifier[storages_lines] ): literal[string] identifier[bus] ={ literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[]} identifier[line] ={ literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[]} identifier[storage] ={ literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[], literal[string] :[]} keyword[for] identifier[s] keyword[in] identifier[storages] : identifier[bus_name] = literal[string] . identifier[join] ([ literal[string] , identifier[repr] ( identifier[s] )]) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[repr] ( identifier[s] )) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[bus_name] ) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[nominal_power] / literal[int] ) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[soc_initial] ) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[efficiency_in] ) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[efficiency_out] ) identifier[storage] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[standing_loss] ) identifier[bus] [ literal[string] ]. identifier[append] ( identifier[bus_name] ) identifier[bus] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[grid] . identifier[voltage_nom] ) identifier[bus] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[geom] . identifier[x] ) identifier[bus] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[geom] . identifier[y] ) identifier[omega] = literal[int] * identifier[pi] * literal[int] keyword[for] identifier[l] keyword[in] identifier[storages_lines] : identifier[line] [ literal[string] ]. identifier[append] ( identifier[repr] ( identifier[l] )) identifier[adj_nodes] = identifier[l] . identifier[grid] . identifier[graph] . identifier[nodes_from_line] ( identifier[l] ) keyword[if] identifier[isinstance] ( identifier[l] . identifier[grid] , identifier[LVGrid] ): keyword[if] identifier[isinstance] ( identifier[adj_nodes] [ literal[int] ], identifier[LVStation] ): identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[adj_nodes] [ literal[int] ]. identifier[__repr__] ( identifier[side] = literal[string] )])) keyword[else] : identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[repr] ( identifier[adj_nodes] [ literal[int] ])])) keyword[if] identifier[isinstance] ( identifier[adj_nodes] [ literal[int] ], identifier[LVStation] ): identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[adj_nodes] [ literal[int] ]. identifier[__repr__] ( identifier[side] = literal[string] )])) keyword[else] : identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[repr] ( identifier[adj_nodes] [ literal[int] ])])) keyword[else] : keyword[if] identifier[isinstance] ( identifier[adj_nodes] [ literal[int] ], identifier[LVStation] ): identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[adj_nodes] [ literal[int] ]. identifier[__repr__] ( identifier[side] = literal[string] )])) keyword[elif] identifier[isinstance] ( identifier[adj_nodes] [ literal[int] ], identifier[MVStation] ): identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[adj_nodes] [ literal[int] ]. identifier[__repr__] ( identifier[side] = literal[string] )])) keyword[else] : identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[repr] ( identifier[adj_nodes] [ literal[int] ])])) keyword[if] identifier[isinstance] ( identifier[adj_nodes] [ literal[int] ], identifier[LVStation] ): identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[adj_nodes] [ literal[int] ]. identifier[__repr__] ( identifier[side] = literal[string] )])) keyword[elif] identifier[isinstance] ( identifier[adj_nodes] [ literal[int] ], identifier[MVStation] ): identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[adj_nodes] [ literal[int] ]. identifier[__repr__] ( identifier[side] = literal[string] )])) keyword[else] : identifier[line] [ literal[string] ]. identifier[append] ( literal[string] . identifier[join] ([ literal[string] , identifier[repr] ( identifier[adj_nodes] [ literal[int] ])])) identifier[line] [ literal[string] ]. identifier[append] ( literal[string] ) identifier[line] [ literal[string] ]. identifier[append] ( identifier[l] . identifier[type] [ literal[string] ]* identifier[omega] / literal[int] * identifier[l] . identifier[length] ) identifier[line] [ literal[string] ]. identifier[append] ( identifier[l] . identifier[type] [ literal[string] ]* identifier[l] . identifier[length] ) identifier[line] [ literal[string] ]. identifier[append] ( identifier[sqrt] ( literal[int] )* identifier[l] . identifier[type] [ literal[string] ]* identifier[l] . identifier[type] [ literal[string] ]/ literal[int] ) identifier[line] [ literal[string] ]. identifier[append] ( identifier[l] . identifier[length] ) identifier[pypsa] . identifier[import_components_from_dataframe] ( identifier[pd] . identifier[DataFrame] ( identifier[bus] ). identifier[set_index] ( literal[string] ), literal[string] ) identifier[pypsa] . identifier[import_components_from_dataframe] ( identifier[pd] . identifier[DataFrame] ( identifier[storage] ). identifier[set_index] ( literal[string] ), literal[string] ) identifier[pypsa] . identifier[import_components_from_dataframe] ( identifier[pd] . identifier[DataFrame] ( identifier[line] ). identifier[set_index] ( literal[string] ), literal[string] ) identifier[timeseries_storage_p] = identifier[pd] . identifier[DataFrame] () identifier[timeseries_storage_q] = identifier[pd] . identifier[DataFrame] () keyword[for] identifier[s] keyword[in] identifier[storages] : identifier[timeseries_storage_p] [ identifier[repr] ( identifier[s] )]= identifier[s] . identifier[pypsa_timeseries] ( literal[string] ). identifier[loc] [ identifier[pypsa] . identifier[storage_units_t] . identifier[p_set] . identifier[index] ] identifier[timeseries_storage_q] [ identifier[repr] ( identifier[s] )]= identifier[s] . identifier[pypsa_timeseries] ( literal[string] ). identifier[loc] [ identifier[pypsa] . identifier[storage_units_t] . identifier[q_set] . identifier[index] ] identifier[import_series_from_dataframe] ( identifier[pypsa] , identifier[timeseries_storage_p] , literal[string] , literal[string] ) identifier[import_series_from_dataframe] ( identifier[pypsa] , identifier[timeseries_storage_q] , literal[string] , literal[string] )
def update_pypsa_storage(pypsa, storages, storages_lines): """ Adds storages and their lines to pypsa representation of the edisgo graph. This function effects the following attributes of the pypsa network: components ('StorageUnit'), storage_units, storage_units_t (p_set, q_set), buses, lines Parameters ----------- pypsa : :pypsa:`pypsa.Network<network>` storages : :obj:`list` List with storages of type :class:`~.grid.components.Storage` to add to pypsa network. storages_lines : :obj:`list` List with lines of type :class:`~.grid.components.Line` that connect storages to the grid. """ bus = {'name': [], 'v_nom': [], 'x': [], 'y': []} line = {'name': [], 'bus0': [], 'bus1': [], 'type': [], 'x': [], 'r': [], 's_nom': [], 'length': []} storage = {'name': [], 'bus': [], 'p_nom': [], 'state_of_charge_initial': [], 'efficiency_store': [], 'efficiency_dispatch': [], 'standing_loss': []} for s in storages: bus_name = '_'.join(['Bus', repr(s)]) storage['name'].append(repr(s)) storage['bus'].append(bus_name) storage['p_nom'].append(s.nominal_power / 1000.0) storage['state_of_charge_initial'].append(s.soc_initial) storage['efficiency_store'].append(s.efficiency_in) storage['efficiency_dispatch'].append(s.efficiency_out) storage['standing_loss'].append(s.standing_loss) bus['name'].append(bus_name) bus['v_nom'].append(s.grid.voltage_nom) bus['x'].append(s.geom.x) bus['y'].append(s.geom.y) # depends on [control=['for'], data=['s']] omega = 2 * pi * 50 for l in storages_lines: line['name'].append(repr(l)) adj_nodes = l.grid.graph.nodes_from_line(l) if isinstance(l.grid, LVGrid): if isinstance(adj_nodes[0], LVStation): line['bus0'].append('_'.join(['Bus', adj_nodes[0].__repr__(side='lv')])) # depends on [control=['if'], data=[]] else: line['bus0'].append('_'.join(['Bus', repr(adj_nodes[0])])) if isinstance(adj_nodes[1], LVStation): line['bus1'].append('_'.join(['Bus', adj_nodes[1].__repr__(side='lv')])) # depends on [control=['if'], data=[]] else: line['bus1'].append('_'.join(['Bus', repr(adj_nodes[1])])) # depends on [control=['if'], data=[]] else: if isinstance(adj_nodes[0], LVStation): line['bus0'].append('_'.join(['Bus', adj_nodes[0].__repr__(side='mv')])) # depends on [control=['if'], data=[]] elif isinstance(adj_nodes[0], MVStation): line['bus0'].append('_'.join(['Bus', adj_nodes[0].__repr__(side='lv')])) # depends on [control=['if'], data=[]] else: line['bus0'].append('_'.join(['Bus', repr(adj_nodes[0])])) if isinstance(adj_nodes[1], LVStation): line['bus1'].append('_'.join(['Bus', adj_nodes[1].__repr__(side='mv')])) # depends on [control=['if'], data=[]] elif isinstance(adj_nodes[1], MVStation): line['bus1'].append('_'.join(['Bus', adj_nodes[1].__repr__(side='lv')])) # depends on [control=['if'], data=[]] else: line['bus1'].append('_'.join(['Bus', repr(adj_nodes[1])])) line['type'].append('') line['x'].append(l.type['L'] * omega / 1000.0 * l.length) line['r'].append(l.type['R'] * l.length) line['s_nom'].append(sqrt(3) * l.type['I_max_th'] * l.type['U_n'] / 1000.0) line['length'].append(l.length) # depends on [control=['for'], data=['l']] # import new components to pypsa pypsa.import_components_from_dataframe(pd.DataFrame(bus).set_index('name'), 'Bus') pypsa.import_components_from_dataframe(pd.DataFrame(storage).set_index('name'), 'StorageUnit') pypsa.import_components_from_dataframe(pd.DataFrame(line).set_index('name'), 'Line') # import time series of storages and buses to pypsa timeseries_storage_p = pd.DataFrame() timeseries_storage_q = pd.DataFrame() for s in storages: timeseries_storage_p[repr(s)] = s.pypsa_timeseries('p').loc[pypsa.storage_units_t.p_set.index] timeseries_storage_q[repr(s)] = s.pypsa_timeseries('q').loc[pypsa.storage_units_t.q_set.index] # depends on [control=['for'], data=['s']] import_series_from_dataframe(pypsa, timeseries_storage_p, 'StorageUnit', 'p_set') import_series_from_dataframe(pypsa, timeseries_storage_q, 'StorageUnit', 'q_set')
def leave_group(self, group_jid): """ Leaves a specific group :param group_jid: The JID of the group to leave """ log.info("[+] Leaving group {}".format(group_jid)) return self._send_xmpp_element(group_adminship.LeaveGroupRequest(group_jid))
def function[leave_group, parameter[self, group_jid]]: constant[ Leaves a specific group :param group_jid: The JID of the group to leave ] call[name[log].info, parameter[call[constant[[+] Leaving group {}].format, parameter[name[group_jid]]]]] return[call[name[self]._send_xmpp_element, parameter[call[name[group_adminship].LeaveGroupRequest, parameter[name[group_jid]]]]]]
keyword[def] identifier[leave_group] ( identifier[self] , identifier[group_jid] ): literal[string] identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[group_jid] )) keyword[return] identifier[self] . identifier[_send_xmpp_element] ( identifier[group_adminship] . identifier[LeaveGroupRequest] ( identifier[group_jid] ))
def leave_group(self, group_jid): """ Leaves a specific group :param group_jid: The JID of the group to leave """ log.info('[+] Leaving group {}'.format(group_jid)) return self._send_xmpp_element(group_adminship.LeaveGroupRequest(group_jid))
def add(self, data_bytes): '''Feed ASCII string or bytes to the signature function''' try: if isinstance(data_bytes, basestring): # Python 2.7 compatibility data_bytes = map(ord, data_bytes) except NameError: if isinstance(data_bytes, str): # This branch will be taken on Python 3 data_bytes = map(ord, data_bytes) for b in data_bytes: self._crc ^= (b << 56) & Signature.MASK64 for _ in range(8): if self._crc & (1 << 63): self._crc = ((self._crc << 1) & Signature.MASK64) ^ Signature.POLY else: self._crc <<= 1
def function[add, parameter[self, data_bytes]]: constant[Feed ASCII string or bytes to the signature function] <ast.Try object at 0x7da18bc715a0> for taget[name[b]] in starred[name[data_bytes]] begin[:] <ast.AugAssign object at 0x7da204620d00> for taget[name[_]] in starred[call[name[range], parameter[constant[8]]]] begin[:] if binary_operation[name[self]._crc <ast.BitAnd object at 0x7da2590d6b60> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> constant[63]]] begin[:] name[self]._crc assign[=] binary_operation[binary_operation[binary_operation[name[self]._crc <ast.LShift object at 0x7da2590d69e0> constant[1]] <ast.BitAnd object at 0x7da2590d6b60> name[Signature].MASK64] <ast.BitXor object at 0x7da2590d6b00> name[Signature].POLY]
keyword[def] identifier[add] ( identifier[self] , identifier[data_bytes] ): literal[string] keyword[try] : keyword[if] identifier[isinstance] ( identifier[data_bytes] , identifier[basestring] ): identifier[data_bytes] = identifier[map] ( identifier[ord] , identifier[data_bytes] ) keyword[except] identifier[NameError] : keyword[if] identifier[isinstance] ( identifier[data_bytes] , identifier[str] ): identifier[data_bytes] = identifier[map] ( identifier[ord] , identifier[data_bytes] ) keyword[for] identifier[b] keyword[in] identifier[data_bytes] : identifier[self] . identifier[_crc] ^=( identifier[b] << literal[int] )& identifier[Signature] . identifier[MASK64] keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] ): keyword[if] identifier[self] . identifier[_crc] &( literal[int] << literal[int] ): identifier[self] . identifier[_crc] =(( identifier[self] . identifier[_crc] << literal[int] )& identifier[Signature] . identifier[MASK64] )^ identifier[Signature] . identifier[POLY] keyword[else] : identifier[self] . identifier[_crc] <<= literal[int]
def add(self, data_bytes): """Feed ASCII string or bytes to the signature function""" try: if isinstance(data_bytes, basestring): # Python 2.7 compatibility data_bytes = map(ord, data_bytes) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except NameError: if isinstance(data_bytes, str): # This branch will be taken on Python 3 data_bytes = map(ord, data_bytes) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] for b in data_bytes: self._crc ^= b << 56 & Signature.MASK64 for _ in range(8): if self._crc & 1 << 63: self._crc = self._crc << 1 & Signature.MASK64 ^ Signature.POLY # depends on [control=['if'], data=[]] else: self._crc <<= 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['b']]
def set_version(self, version): """ Set the version number of the certificate. Note that the version value is zero-based, eg. a value of 0 is V1. :param version: The version number of the certificate. :type version: :py:class:`int` :return: ``None`` """ if not isinstance(version, int): raise TypeError("version must be an integer") _lib.X509_set_version(self._x509, version)
def function[set_version, parameter[self, version]]: constant[ Set the version number of the certificate. Note that the version value is zero-based, eg. a value of 0 is V1. :param version: The version number of the certificate. :type version: :py:class:`int` :return: ``None`` ] if <ast.UnaryOp object at 0x7da1b025b7f0> begin[:] <ast.Raise object at 0x7da1b025a050> call[name[_lib].X509_set_version, parameter[name[self]._x509, name[version]]]
keyword[def] identifier[set_version] ( identifier[self] , identifier[version] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[version] , identifier[int] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[_lib] . identifier[X509_set_version] ( identifier[self] . identifier[_x509] , identifier[version] )
def set_version(self, version): """ Set the version number of the certificate. Note that the version value is zero-based, eg. a value of 0 is V1. :param version: The version number of the certificate. :type version: :py:class:`int` :return: ``None`` """ if not isinstance(version, int): raise TypeError('version must be an integer') # depends on [control=['if'], data=[]] _lib.X509_set_version(self._x509, version)
def main(): """Builds a yaml file""" parser = argparse.ArgumentParser(description='Compose a yaml file.') parser.add_argument( 'root', type=argparse.FileType('r'), help='The root yaml file to compose.' ) args = parser.parse_args() result = yaml.load(args.root, Loader=ComposeLoader) print(yaml.dump(result))
def function[main, parameter[]]: constant[Builds a yaml file] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[root]]] variable[args] assign[=] call[name[parser].parse_args, parameter[]] variable[result] assign[=] call[name[yaml].load, parameter[name[args].root]] call[name[print], parameter[call[name[yaml].dump, parameter[name[result]]]]]
keyword[def] identifier[main] (): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[argparse] . identifier[FileType] ( literal[string] ), identifier[help] = literal[string] ) identifier[args] = identifier[parser] . identifier[parse_args] () identifier[result] = identifier[yaml] . identifier[load] ( identifier[args] . identifier[root] , identifier[Loader] = identifier[ComposeLoader] ) identifier[print] ( identifier[yaml] . identifier[dump] ( identifier[result] ))
def main(): """Builds a yaml file""" parser = argparse.ArgumentParser(description='Compose a yaml file.') parser.add_argument('root', type=argparse.FileType('r'), help='The root yaml file to compose.') args = parser.parse_args() result = yaml.load(args.root, Loader=ComposeLoader) print(yaml.dump(result))
def get_largest_component(G, strongly=False): """ Return a subgraph of the largest weakly or strongly connected component from a directed graph. Parameters ---------- G : networkx multidigraph strongly : bool if True, return the largest strongly instead of weakly connected component Returns ------- G : networkx multidigraph the largest connected component subgraph from the original graph """ start_time = time.time() original_len = len(list(G.nodes())) if strongly: # if the graph is not connected retain only the largest strongly connected component if not nx.is_strongly_connected(G): # get all the strongly connected components in graph then identify the largest sccs = nx.strongly_connected_components(G) largest_scc = max(sccs, key=len) G = induce_subgraph(G, largest_scc) msg = ('Graph was not connected, retained only the largest strongly ' 'connected component ({:,} of {:,} total nodes) in {:.2f} seconds') log(msg.format(len(list(G.nodes())), original_len, time.time()-start_time)) else: # if the graph is not connected retain only the largest weakly connected component if not nx.is_weakly_connected(G): # get all the weakly connected components in graph then identify the largest wccs = nx.weakly_connected_components(G) largest_wcc = max(wccs, key=len) G = induce_subgraph(G, largest_wcc) msg = ('Graph was not connected, retained only the largest weakly ' 'connected component ({:,} of {:,} total nodes) in {:.2f} seconds') log(msg.format(len(list(G.nodes())), original_len, time.time()-start_time)) return G
def function[get_largest_component, parameter[G, strongly]]: constant[ Return a subgraph of the largest weakly or strongly connected component from a directed graph. Parameters ---------- G : networkx multidigraph strongly : bool if True, return the largest strongly instead of weakly connected component Returns ------- G : networkx multidigraph the largest connected component subgraph from the original graph ] variable[start_time] assign[=] call[name[time].time, parameter[]] variable[original_len] assign[=] call[name[len], parameter[call[name[list], parameter[call[name[G].nodes, parameter[]]]]]] if name[strongly] begin[:] if <ast.UnaryOp object at 0x7da1b216fa60> begin[:] variable[sccs] assign[=] call[name[nx].strongly_connected_components, parameter[name[G]]] variable[largest_scc] assign[=] call[name[max], parameter[name[sccs]]] variable[G] assign[=] call[name[induce_subgraph], parameter[name[G], name[largest_scc]]] variable[msg] assign[=] constant[Graph was not connected, retained only the largest strongly connected component ({:,} of {:,} total nodes) in {:.2f} seconds] call[name[log], parameter[call[name[msg].format, parameter[call[name[len], parameter[call[name[list], parameter[call[name[G].nodes, parameter[]]]]]], name[original_len], binary_operation[call[name[time].time, parameter[]] - name[start_time]]]]]] return[name[G]]
keyword[def] identifier[get_largest_component] ( identifier[G] , identifier[strongly] = keyword[False] ): literal[string] identifier[start_time] = identifier[time] . identifier[time] () identifier[original_len] = identifier[len] ( identifier[list] ( identifier[G] . identifier[nodes] ())) keyword[if] identifier[strongly] : keyword[if] keyword[not] identifier[nx] . identifier[is_strongly_connected] ( identifier[G] ): identifier[sccs] = identifier[nx] . identifier[strongly_connected_components] ( identifier[G] ) identifier[largest_scc] = identifier[max] ( identifier[sccs] , identifier[key] = identifier[len] ) identifier[G] = identifier[induce_subgraph] ( identifier[G] , identifier[largest_scc] ) identifier[msg] =( literal[string] literal[string] ) identifier[log] ( identifier[msg] . identifier[format] ( identifier[len] ( identifier[list] ( identifier[G] . identifier[nodes] ())), identifier[original_len] , identifier[time] . identifier[time] ()- identifier[start_time] )) keyword[else] : keyword[if] keyword[not] identifier[nx] . identifier[is_weakly_connected] ( identifier[G] ): identifier[wccs] = identifier[nx] . identifier[weakly_connected_components] ( identifier[G] ) identifier[largest_wcc] = identifier[max] ( identifier[wccs] , identifier[key] = identifier[len] ) identifier[G] = identifier[induce_subgraph] ( identifier[G] , identifier[largest_wcc] ) identifier[msg] =( literal[string] literal[string] ) identifier[log] ( identifier[msg] . identifier[format] ( identifier[len] ( identifier[list] ( identifier[G] . identifier[nodes] ())), identifier[original_len] , identifier[time] . identifier[time] ()- identifier[start_time] )) keyword[return] identifier[G]
def get_largest_component(G, strongly=False): """ Return a subgraph of the largest weakly or strongly connected component from a directed graph. Parameters ---------- G : networkx multidigraph strongly : bool if True, return the largest strongly instead of weakly connected component Returns ------- G : networkx multidigraph the largest connected component subgraph from the original graph """ start_time = time.time() original_len = len(list(G.nodes())) if strongly: # if the graph is not connected retain only the largest strongly connected component if not nx.is_strongly_connected(G): # get all the strongly connected components in graph then identify the largest sccs = nx.strongly_connected_components(G) largest_scc = max(sccs, key=len) G = induce_subgraph(G, largest_scc) msg = 'Graph was not connected, retained only the largest strongly connected component ({:,} of {:,} total nodes) in {:.2f} seconds' log(msg.format(len(list(G.nodes())), original_len, time.time() - start_time)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # if the graph is not connected retain only the largest weakly connected component elif not nx.is_weakly_connected(G): # get all the weakly connected components in graph then identify the largest wccs = nx.weakly_connected_components(G) largest_wcc = max(wccs, key=len) G = induce_subgraph(G, largest_wcc) msg = 'Graph was not connected, retained only the largest weakly connected component ({:,} of {:,} total nodes) in {:.2f} seconds' log(msg.format(len(list(G.nodes())), original_len, time.time() - start_time)) # depends on [control=['if'], data=[]] return G
def print_boolean_net(self, out_file=None): """Return a Boolean network from the assembled graph. See https://github.com/ialbert/booleannet for details about the format used to encode the Boolean rules. Parameters ---------- out_file : Optional[str] A file name in which the Boolean network is saved. Returns ------- full_str : str The string representing the Boolean network. """ init_str = '' for node_key in self.graph.nodes(): node_name = self.graph.node[node_key]['name'] init_str += '%s = False\n' % node_name rule_str = '' for node_key in self.graph.nodes(): node_name = self.graph.node[node_key]['name'] in_edges = self.graph.in_edges(node_key) if not in_edges: continue parents = [e[0] for e in in_edges] polarities = [self.graph.edge[e[0]][node_key]['polarity'] for e in in_edges] pos_parents = [par for par, pol in zip(parents, polarities) if pol == 'positive'] neg_parents = [par for par, pol in zip(parents, polarities) if pol == 'negative'] rhs_pos_parts = [] for par in pos_parents: rhs_pos_parts.append(self.graph.node[par]['name']) rhs_pos_str = ' or '.join(rhs_pos_parts) rhs_neg_parts = [] for par in neg_parents: rhs_neg_parts.append(self.graph.node[par]['name']) rhs_neg_str = ' or '.join(rhs_neg_parts) if rhs_pos_str: if rhs_neg_str: rhs_str = '(' + rhs_pos_str + \ ') and not (' + rhs_neg_str + ')' else: rhs_str = rhs_pos_str else: rhs_str = 'not (' + rhs_neg_str + ')' node_eq = '%s* = %s\n' % (node_name, rhs_str) rule_str += node_eq full_str = init_str + '\n' + rule_str if out_file is not None: with open(out_file, 'wt') as fh: fh.write(full_str) return full_str
def function[print_boolean_net, parameter[self, out_file]]: constant[Return a Boolean network from the assembled graph. See https://github.com/ialbert/booleannet for details about the format used to encode the Boolean rules. Parameters ---------- out_file : Optional[str] A file name in which the Boolean network is saved. Returns ------- full_str : str The string representing the Boolean network. ] variable[init_str] assign[=] constant[] for taget[name[node_key]] in starred[call[name[self].graph.nodes, parameter[]]] begin[:] variable[node_name] assign[=] call[call[name[self].graph.node][name[node_key]]][constant[name]] <ast.AugAssign object at 0x7da18f00c250> variable[rule_str] assign[=] constant[] for taget[name[node_key]] in starred[call[name[self].graph.nodes, parameter[]]] begin[:] variable[node_name] assign[=] call[call[name[self].graph.node][name[node_key]]][constant[name]] variable[in_edges] assign[=] call[name[self].graph.in_edges, parameter[name[node_key]]] if <ast.UnaryOp object at 0x7da18f00e500> begin[:] continue variable[parents] assign[=] <ast.ListComp object at 0x7da18f00dea0> variable[polarities] assign[=] <ast.ListComp object at 0x7da18f00fbe0> variable[pos_parents] assign[=] <ast.ListComp object at 0x7da18f00e290> variable[neg_parents] assign[=] <ast.ListComp object at 0x7da18f00d7b0> variable[rhs_pos_parts] assign[=] list[[]] for taget[name[par]] in starred[name[pos_parents]] begin[:] call[name[rhs_pos_parts].append, parameter[call[call[name[self].graph.node][name[par]]][constant[name]]]] variable[rhs_pos_str] assign[=] call[constant[ or ].join, parameter[name[rhs_pos_parts]]] variable[rhs_neg_parts] assign[=] list[[]] for taget[name[par]] in starred[name[neg_parents]] begin[:] call[name[rhs_neg_parts].append, parameter[call[call[name[self].graph.node][name[par]]][constant[name]]]] variable[rhs_neg_str] assign[=] call[constant[ or ].join, parameter[name[rhs_neg_parts]]] if name[rhs_pos_str] begin[:] if name[rhs_neg_str] begin[:] variable[rhs_str] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[(] + name[rhs_pos_str]] + constant[) and not (]] + name[rhs_neg_str]] + constant[)]] variable[node_eq] assign[=] binary_operation[constant[%s* = %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00f7f0>, <ast.Name object at 0x7da18f00ec50>]]] <ast.AugAssign object at 0x7da18f00e200> variable[full_str] assign[=] binary_operation[binary_operation[name[init_str] + constant[ ]] + name[rule_str]] if compare[name[out_file] is_not constant[None]] begin[:] with call[name[open], parameter[name[out_file], constant[wt]]] begin[:] call[name[fh].write, parameter[name[full_str]]] return[name[full_str]]
keyword[def] identifier[print_boolean_net] ( identifier[self] , identifier[out_file] = keyword[None] ): literal[string] identifier[init_str] = literal[string] keyword[for] identifier[node_key] keyword[in] identifier[self] . identifier[graph] . identifier[nodes] (): identifier[node_name] = identifier[self] . identifier[graph] . identifier[node] [ identifier[node_key] ][ literal[string] ] identifier[init_str] += literal[string] % identifier[node_name] identifier[rule_str] = literal[string] keyword[for] identifier[node_key] keyword[in] identifier[self] . identifier[graph] . identifier[nodes] (): identifier[node_name] = identifier[self] . identifier[graph] . identifier[node] [ identifier[node_key] ][ literal[string] ] identifier[in_edges] = identifier[self] . identifier[graph] . identifier[in_edges] ( identifier[node_key] ) keyword[if] keyword[not] identifier[in_edges] : keyword[continue] identifier[parents] =[ identifier[e] [ literal[int] ] keyword[for] identifier[e] keyword[in] identifier[in_edges] ] identifier[polarities] =[ identifier[self] . identifier[graph] . identifier[edge] [ identifier[e] [ literal[int] ]][ identifier[node_key] ][ literal[string] ] keyword[for] identifier[e] keyword[in] identifier[in_edges] ] identifier[pos_parents] =[ identifier[par] keyword[for] identifier[par] , identifier[pol] keyword[in] identifier[zip] ( identifier[parents] , identifier[polarities] ) keyword[if] identifier[pol] == literal[string] ] identifier[neg_parents] =[ identifier[par] keyword[for] identifier[par] , identifier[pol] keyword[in] identifier[zip] ( identifier[parents] , identifier[polarities] ) keyword[if] identifier[pol] == literal[string] ] identifier[rhs_pos_parts] =[] keyword[for] identifier[par] keyword[in] identifier[pos_parents] : identifier[rhs_pos_parts] . identifier[append] ( identifier[self] . identifier[graph] . identifier[node] [ identifier[par] ][ literal[string] ]) identifier[rhs_pos_str] = literal[string] . identifier[join] ( identifier[rhs_pos_parts] ) identifier[rhs_neg_parts] =[] keyword[for] identifier[par] keyword[in] identifier[neg_parents] : identifier[rhs_neg_parts] . identifier[append] ( identifier[self] . identifier[graph] . identifier[node] [ identifier[par] ][ literal[string] ]) identifier[rhs_neg_str] = literal[string] . identifier[join] ( identifier[rhs_neg_parts] ) keyword[if] identifier[rhs_pos_str] : keyword[if] identifier[rhs_neg_str] : identifier[rhs_str] = literal[string] + identifier[rhs_pos_str] + literal[string] + identifier[rhs_neg_str] + literal[string] keyword[else] : identifier[rhs_str] = identifier[rhs_pos_str] keyword[else] : identifier[rhs_str] = literal[string] + identifier[rhs_neg_str] + literal[string] identifier[node_eq] = literal[string] %( identifier[node_name] , identifier[rhs_str] ) identifier[rule_str] += identifier[node_eq] identifier[full_str] = identifier[init_str] + literal[string] + identifier[rule_str] keyword[if] identifier[out_file] keyword[is] keyword[not] keyword[None] : keyword[with] identifier[open] ( identifier[out_file] , literal[string] ) keyword[as] identifier[fh] : identifier[fh] . identifier[write] ( identifier[full_str] ) keyword[return] identifier[full_str]
def print_boolean_net(self, out_file=None): """Return a Boolean network from the assembled graph. See https://github.com/ialbert/booleannet for details about the format used to encode the Boolean rules. Parameters ---------- out_file : Optional[str] A file name in which the Boolean network is saved. Returns ------- full_str : str The string representing the Boolean network. """ init_str = '' for node_key in self.graph.nodes(): node_name = self.graph.node[node_key]['name'] init_str += '%s = False\n' % node_name # depends on [control=['for'], data=['node_key']] rule_str = '' for node_key in self.graph.nodes(): node_name = self.graph.node[node_key]['name'] in_edges = self.graph.in_edges(node_key) if not in_edges: continue # depends on [control=['if'], data=[]] parents = [e[0] for e in in_edges] polarities = [self.graph.edge[e[0]][node_key]['polarity'] for e in in_edges] pos_parents = [par for (par, pol) in zip(parents, polarities) if pol == 'positive'] neg_parents = [par for (par, pol) in zip(parents, polarities) if pol == 'negative'] rhs_pos_parts = [] for par in pos_parents: rhs_pos_parts.append(self.graph.node[par]['name']) # depends on [control=['for'], data=['par']] rhs_pos_str = ' or '.join(rhs_pos_parts) rhs_neg_parts = [] for par in neg_parents: rhs_neg_parts.append(self.graph.node[par]['name']) # depends on [control=['for'], data=['par']] rhs_neg_str = ' or '.join(rhs_neg_parts) if rhs_pos_str: if rhs_neg_str: rhs_str = '(' + rhs_pos_str + ') and not (' + rhs_neg_str + ')' # depends on [control=['if'], data=[]] else: rhs_str = rhs_pos_str # depends on [control=['if'], data=[]] else: rhs_str = 'not (' + rhs_neg_str + ')' node_eq = '%s* = %s\n' % (node_name, rhs_str) rule_str += node_eq # depends on [control=['for'], data=['node_key']] full_str = init_str + '\n' + rule_str if out_file is not None: with open(out_file, 'wt') as fh: fh.write(full_str) # depends on [control=['with'], data=['fh']] # depends on [control=['if'], data=['out_file']] return full_str
def _getAnnotationAnalysis(self, varFile): """ Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis """ header = varFile.header analysis = protocol.Analysis() formats = header.formats.items() infos = header.info.items() filters = header.filters.items() for prefix, content in [("FORMAT", formats), ("INFO", infos), ("FILTER", filters)]: for contentKey, value in content: key = "{0}.{1}".format(prefix, value.name) if key not in analysis.attributes.attr: analysis.attributes.attr[key].Clear() if value.description is not None: analysis.attributes.attr[ key].values.add().string_value = value.description analysis.created = self._creationTime analysis.updated = self._updatedTime for r in header.records: # Don't add a key to info if there's nothing in the value if r.value is not None: if r.key not in analysis.attributes.attr: analysis.attributes.attr[r.key].Clear() analysis.attributes.attr[r.key] \ .values.add().string_value = str(r.value) if r.key == "created" or r.key == "fileDate": # TODO handle more date formats try: if '-' in r.value: fmtStr = "%Y-%m-%d" else: fmtStr = "%Y%m%d" analysis.created = datetime.datetime.strptime( r.value, fmtStr).isoformat() + "Z" except ValueError: # is there a logger we should tell? # print("INFO: Could not parse variant annotation time") pass # analysis.create_date_time remains datetime.now() if r.key == "software": analysis.software.append(r.value) if r.key == "name": analysis.name = r.value if r.key == "description": analysis.description = r.value analysis.id = str(datamodel.VariantAnnotationSetAnalysisCompoundId( self._compoundId, "analysis")) return analysis
def function[_getAnnotationAnalysis, parameter[self, varFile]]: constant[ Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis ] variable[header] assign[=] name[varFile].header variable[analysis] assign[=] call[name[protocol].Analysis, parameter[]] variable[formats] assign[=] call[name[header].formats.items, parameter[]] variable[infos] assign[=] call[name[header].info.items, parameter[]] variable[filters] assign[=] call[name[header].filters.items, parameter[]] for taget[tuple[[<ast.Name object at 0x7da18bcc8df0>, <ast.Name object at 0x7da18bcc87f0>]]] in starred[list[[<ast.Tuple object at 0x7da18bcc9c60>, <ast.Tuple object at 0x7da18bccae60>, <ast.Tuple object at 0x7da18bcc8460>]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18bccba30>, <ast.Name object at 0x7da18bcca290>]]] in starred[name[content]] begin[:] variable[key] assign[=] call[constant[{0}.{1}].format, parameter[name[prefix], name[value].name]] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[analysis].attributes.attr] begin[:] call[call[name[analysis].attributes.attr][name[key]].Clear, parameter[]] if compare[name[value].description is_not constant[None]] begin[:] call[call[name[analysis].attributes.attr][name[key]].values.add, parameter[]].string_value assign[=] name[value].description name[analysis].created assign[=] name[self]._creationTime name[analysis].updated assign[=] name[self]._updatedTime for taget[name[r]] in starred[name[header].records] begin[:] if compare[name[r].value is_not constant[None]] begin[:] if compare[name[r].key <ast.NotIn object at 0x7da2590d7190> name[analysis].attributes.attr] begin[:] call[call[name[analysis].attributes.attr][name[r].key].Clear, parameter[]] call[call[name[analysis].attributes.attr][name[r].key].values.add, parameter[]].string_value assign[=] call[name[str], parameter[name[r].value]] if <ast.BoolOp object at 0x7da204564130> begin[:] <ast.Try object at 0x7da2045668c0> if compare[name[r].key equal[==] constant[software]] begin[:] call[name[analysis].software.append, parameter[name[r].value]] if compare[name[r].key equal[==] constant[name]] begin[:] name[analysis].name assign[=] name[r].value if compare[name[r].key equal[==] constant[description]] begin[:] name[analysis].description assign[=] name[r].value name[analysis].id assign[=] call[name[str], parameter[call[name[datamodel].VariantAnnotationSetAnalysisCompoundId, parameter[name[self]._compoundId, constant[analysis]]]]] return[name[analysis]]
keyword[def] identifier[_getAnnotationAnalysis] ( identifier[self] , identifier[varFile] ): literal[string] identifier[header] = identifier[varFile] . identifier[header] identifier[analysis] = identifier[protocol] . identifier[Analysis] () identifier[formats] = identifier[header] . identifier[formats] . identifier[items] () identifier[infos] = identifier[header] . identifier[info] . identifier[items] () identifier[filters] = identifier[header] . identifier[filters] . identifier[items] () keyword[for] identifier[prefix] , identifier[content] keyword[in] [( literal[string] , identifier[formats] ),( literal[string] , identifier[infos] ), ( literal[string] , identifier[filters] )]: keyword[for] identifier[contentKey] , identifier[value] keyword[in] identifier[content] : identifier[key] = literal[string] . identifier[format] ( identifier[prefix] , identifier[value] . identifier[name] ) keyword[if] identifier[key] keyword[not] keyword[in] identifier[analysis] . identifier[attributes] . identifier[attr] : identifier[analysis] . identifier[attributes] . identifier[attr] [ identifier[key] ]. identifier[Clear] () keyword[if] identifier[value] . identifier[description] keyword[is] keyword[not] keyword[None] : identifier[analysis] . identifier[attributes] . identifier[attr] [ identifier[key] ]. identifier[values] . identifier[add] (). identifier[string_value] = identifier[value] . identifier[description] identifier[analysis] . identifier[created] = identifier[self] . identifier[_creationTime] identifier[analysis] . identifier[updated] = identifier[self] . identifier[_updatedTime] keyword[for] identifier[r] keyword[in] identifier[header] . identifier[records] : keyword[if] identifier[r] . identifier[value] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[r] . identifier[key] keyword[not] keyword[in] identifier[analysis] . identifier[attributes] . identifier[attr] : identifier[analysis] . identifier[attributes] . identifier[attr] [ identifier[r] . identifier[key] ]. identifier[Clear] () identifier[analysis] . identifier[attributes] . identifier[attr] [ identifier[r] . identifier[key] ]. identifier[values] . identifier[add] (). identifier[string_value] = identifier[str] ( identifier[r] . identifier[value] ) keyword[if] identifier[r] . identifier[key] == literal[string] keyword[or] identifier[r] . identifier[key] == literal[string] : keyword[try] : keyword[if] literal[string] keyword[in] identifier[r] . identifier[value] : identifier[fmtStr] = literal[string] keyword[else] : identifier[fmtStr] = literal[string] identifier[analysis] . identifier[created] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[r] . identifier[value] , identifier[fmtStr] ). identifier[isoformat] ()+ literal[string] keyword[except] identifier[ValueError] : keyword[pass] keyword[if] identifier[r] . identifier[key] == literal[string] : identifier[analysis] . identifier[software] . identifier[append] ( identifier[r] . identifier[value] ) keyword[if] identifier[r] . identifier[key] == literal[string] : identifier[analysis] . identifier[name] = identifier[r] . identifier[value] keyword[if] identifier[r] . identifier[key] == literal[string] : identifier[analysis] . identifier[description] = identifier[r] . identifier[value] identifier[analysis] . identifier[id] = identifier[str] ( identifier[datamodel] . identifier[VariantAnnotationSetAnalysisCompoundId] ( identifier[self] . identifier[_compoundId] , literal[string] )) keyword[return] identifier[analysis]
def _getAnnotationAnalysis(self, varFile): """ Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis """ header = varFile.header analysis = protocol.Analysis() formats = header.formats.items() infos = header.info.items() filters = header.filters.items() for (prefix, content) in [('FORMAT', formats), ('INFO', infos), ('FILTER', filters)]: for (contentKey, value) in content: key = '{0}.{1}'.format(prefix, value.name) if key not in analysis.attributes.attr: analysis.attributes.attr[key].Clear() # depends on [control=['if'], data=['key']] if value.description is not None: analysis.attributes.attr[key].values.add().string_value = value.description # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] analysis.created = self._creationTime analysis.updated = self._updatedTime for r in header.records: # Don't add a key to info if there's nothing in the value if r.value is not None: if r.key not in analysis.attributes.attr: analysis.attributes.attr[r.key].Clear() # depends on [control=['if'], data=[]] analysis.attributes.attr[r.key].values.add().string_value = str(r.value) # depends on [control=['if'], data=[]] if r.key == 'created' or r.key == 'fileDate': # TODO handle more date formats try: if '-' in r.value: fmtStr = '%Y-%m-%d' # depends on [control=['if'], data=[]] else: fmtStr = '%Y%m%d' analysis.created = datetime.datetime.strptime(r.value, fmtStr).isoformat() + 'Z' # depends on [control=['try'], data=[]] except ValueError: # is there a logger we should tell? # print("INFO: Could not parse variant annotation time") pass # analysis.create_date_time remains datetime.now() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if r.key == 'software': analysis.software.append(r.value) # depends on [control=['if'], data=[]] if r.key == 'name': analysis.name = r.value # depends on [control=['if'], data=[]] if r.key == 'description': analysis.description = r.value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']] analysis.id = str(datamodel.VariantAnnotationSetAnalysisCompoundId(self._compoundId, 'analysis')) return analysis
def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> int: """Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix """ if max_restarts is None: max_restarts = 100 global _task_id assert _task_id is None if num_processes is None or num_processes <= 0: num_processes = cpu_count() gen_log.info("Starting %d processes", num_processes) children = {} def start_child(i: int) -> Optional[int]: pid = os.fork() if pid == 0: # child process _reseed_random() global _task_id _task_id = i return i else: children[pid] = i return None for i in range(num_processes): id = start_child(i) if id is not None: return id num_restarts = 0 while children: try: pid, status = os.wait() except OSError as e: if errno_from_exception(e) == errno.EINTR: continue raise if pid not in children: continue id = children.pop(pid) if os.WIFSIGNALED(status): gen_log.warning( "child %d (pid %d) killed by signal %d, restarting", id, pid, os.WTERMSIG(status), ) elif os.WEXITSTATUS(status) != 0: gen_log.warning( "child %d (pid %d) exited with status %d, restarting", id, pid, os.WEXITSTATUS(status), ) else: gen_log.info("child %d (pid %d) exited normally", id, pid) continue num_restarts += 1 if num_restarts > max_restarts: raise RuntimeError("Too many child restarts, giving up") new_id = start_child(id) if new_id is not None: return new_id # All child processes exited cleanly, so exit the master process # instead of just returning to right after the call to # fork_processes (which will probably just start up another IOLoop # unless the caller checks the return value). sys.exit(0)
def function[fork_processes, parameter[num_processes, max_restarts]]: constant[Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix ] if compare[name[max_restarts] is constant[None]] begin[:] variable[max_restarts] assign[=] constant[100] <ast.Global object at 0x7da1b1f75000> assert[compare[name[_task_id] is constant[None]]] if <ast.BoolOp object at 0x7da1b1f74100> begin[:] variable[num_processes] assign[=] call[name[cpu_count], parameter[]] call[name[gen_log].info, parameter[constant[Starting %d processes], name[num_processes]]] variable[children] assign[=] dictionary[[], []] def function[start_child, parameter[i]]: variable[pid] assign[=] call[name[os].fork, parameter[]] if compare[name[pid] equal[==] constant[0]] begin[:] call[name[_reseed_random], parameter[]] <ast.Global object at 0x7da1b1f74400> variable[_task_id] assign[=] name[i] return[name[i]] for taget[name[i]] in starred[call[name[range], parameter[name[num_processes]]]] begin[:] variable[id] assign[=] call[name[start_child], parameter[name[i]]] if compare[name[id] is_not constant[None]] begin[:] return[name[id]] variable[num_restarts] assign[=] constant[0] while name[children] begin[:] <ast.Try object at 0x7da1b1fe51b0> if compare[name[pid] <ast.NotIn object at 0x7da2590d7190> name[children]] begin[:] continue variable[id] assign[=] call[name[children].pop, parameter[name[pid]]] if call[name[os].WIFSIGNALED, parameter[name[status]]] begin[:] call[name[gen_log].warning, parameter[constant[child %d (pid %d) killed by signal %d, restarting], name[id], name[pid], call[name[os].WTERMSIG, parameter[name[status]]]]] <ast.AugAssign object at 0x7da1b20c8340> if compare[name[num_restarts] greater[>] name[max_restarts]] begin[:] <ast.Raise object at 0x7da1b20cb250> variable[new_id] assign[=] call[name[start_child], parameter[name[id]]] if compare[name[new_id] is_not constant[None]] begin[:] return[name[new_id]] call[name[sys].exit, parameter[constant[0]]]
keyword[def] identifier[fork_processes] ( identifier[num_processes] : identifier[Optional] [ identifier[int] ], identifier[max_restarts] : identifier[int] = keyword[None] )-> identifier[int] : literal[string] keyword[if] identifier[max_restarts] keyword[is] keyword[None] : identifier[max_restarts] = literal[int] keyword[global] identifier[_task_id] keyword[assert] identifier[_task_id] keyword[is] keyword[None] keyword[if] identifier[num_processes] keyword[is] keyword[None] keyword[or] identifier[num_processes] <= literal[int] : identifier[num_processes] = identifier[cpu_count] () identifier[gen_log] . identifier[info] ( literal[string] , identifier[num_processes] ) identifier[children] ={} keyword[def] identifier[start_child] ( identifier[i] : identifier[int] )-> identifier[Optional] [ identifier[int] ]: identifier[pid] = identifier[os] . identifier[fork] () keyword[if] identifier[pid] == literal[int] : identifier[_reseed_random] () keyword[global] identifier[_task_id] identifier[_task_id] = identifier[i] keyword[return] identifier[i] keyword[else] : identifier[children] [ identifier[pid] ]= identifier[i] keyword[return] keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_processes] ): identifier[id] = identifier[start_child] ( identifier[i] ) keyword[if] identifier[id] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[id] identifier[num_restarts] = literal[int] keyword[while] identifier[children] : keyword[try] : identifier[pid] , identifier[status] = identifier[os] . identifier[wait] () keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[if] identifier[errno_from_exception] ( identifier[e] )== identifier[errno] . identifier[EINTR] : keyword[continue] keyword[raise] keyword[if] identifier[pid] keyword[not] keyword[in] identifier[children] : keyword[continue] identifier[id] = identifier[children] . identifier[pop] ( identifier[pid] ) keyword[if] identifier[os] . identifier[WIFSIGNALED] ( identifier[status] ): identifier[gen_log] . identifier[warning] ( literal[string] , identifier[id] , identifier[pid] , identifier[os] . identifier[WTERMSIG] ( identifier[status] ), ) keyword[elif] identifier[os] . identifier[WEXITSTATUS] ( identifier[status] )!= literal[int] : identifier[gen_log] . identifier[warning] ( literal[string] , identifier[id] , identifier[pid] , identifier[os] . identifier[WEXITSTATUS] ( identifier[status] ), ) keyword[else] : identifier[gen_log] . identifier[info] ( literal[string] , identifier[id] , identifier[pid] ) keyword[continue] identifier[num_restarts] += literal[int] keyword[if] identifier[num_restarts] > identifier[max_restarts] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[new_id] = identifier[start_child] ( identifier[id] ) keyword[if] identifier[new_id] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[new_id] identifier[sys] . identifier[exit] ( literal[int] )
def fork_processes(num_processes: Optional[int], max_restarts: int=None) -> int: """Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix """ if max_restarts is None: max_restarts = 100 # depends on [control=['if'], data=['max_restarts']] global _task_id assert _task_id is None if num_processes is None or num_processes <= 0: num_processes = cpu_count() # depends on [control=['if'], data=[]] gen_log.info('Starting %d processes', num_processes) children = {} def start_child(i: int) -> Optional[int]: pid = os.fork() if pid == 0: # child process _reseed_random() global _task_id _task_id = i return i # depends on [control=['if'], data=[]] else: children[pid] = i return None for i in range(num_processes): id = start_child(i) if id is not None: return id # depends on [control=['if'], data=['id']] # depends on [control=['for'], data=['i']] num_restarts = 0 while children: try: (pid, status) = os.wait() # depends on [control=['try'], data=[]] except OSError as e: if errno_from_exception(e) == errno.EINTR: continue # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['e']] if pid not in children: continue # depends on [control=['if'], data=[]] id = children.pop(pid) if os.WIFSIGNALED(status): gen_log.warning('child %d (pid %d) killed by signal %d, restarting', id, pid, os.WTERMSIG(status)) # depends on [control=['if'], data=[]] elif os.WEXITSTATUS(status) != 0: gen_log.warning('child %d (pid %d) exited with status %d, restarting', id, pid, os.WEXITSTATUS(status)) # depends on [control=['if'], data=[]] else: gen_log.info('child %d (pid %d) exited normally', id, pid) continue num_restarts += 1 if num_restarts > max_restarts: raise RuntimeError('Too many child restarts, giving up') # depends on [control=['if'], data=[]] new_id = start_child(id) if new_id is not None: return new_id # depends on [control=['if'], data=['new_id']] # depends on [control=['while'], data=[]] # All child processes exited cleanly, so exit the master process # instead of just returning to right after the call to # fork_processes (which will probably just start up another IOLoop # unless the caller checks the return value). sys.exit(0)
def argument_search(self): """ Uses the command line arguments to fill the search function and call it. """ arguments, _ = self.argparser.parse_known_args() return self.search(**vars(arguments))
def function[argument_search, parameter[self]]: constant[ Uses the command line arguments to fill the search function and call it. ] <ast.Tuple object at 0x7da1afe78bb0> assign[=] call[name[self].argparser.parse_known_args, parameter[]] return[call[name[self].search, parameter[]]]
keyword[def] identifier[argument_search] ( identifier[self] ): literal[string] identifier[arguments] , identifier[_] = identifier[self] . identifier[argparser] . identifier[parse_known_args] () keyword[return] identifier[self] . identifier[search] (** identifier[vars] ( identifier[arguments] ))
def argument_search(self): """ Uses the command line arguments to fill the search function and call it. """ (arguments, _) = self.argparser.parse_known_args() return self.search(**vars(arguments))
def get_update_api(self, resource): """ Generates the meta descriptor for the resource listing api. """ update_api = { 'path': '/%s/{id}/' % resource.get_api_name(), 'description': 'Operations on %s' % resource.model.__name__, 'operations': [ { 'httpMethod': 'PUT', 'nickname': 'update%ss' % resource.model .__name__, 'summary': 'Update %ss' % resource.model.__name__, 'parameters': [ { 'paramType': 'path', 'name': 'id', 'description': '%s id' % (resource.model.__name__), 'dataType': 'int', 'required': True, 'allowMultiple': False, }, { 'description': '%s object' % (resource.model.__name__), 'paramType': 'body', 'required': True, 'allowMultiple': False, 'dataType': resource.model.__name__ } ] } ] } return update_api
def function[get_update_api, parameter[self, resource]]: constant[ Generates the meta descriptor for the resource listing api. ] variable[update_api] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f9dba0>, <ast.Constant object at 0x7da1b1f9cd00>, <ast.Constant object at 0x7da1b1f9e020>], [<ast.BinOp object at 0x7da1b1f9eda0>, <ast.BinOp object at 0x7da1b1f9f970>, <ast.List object at 0x7da1b1f9ce20>]] return[name[update_api]]
keyword[def] identifier[get_update_api] ( identifier[self] , identifier[resource] ): literal[string] identifier[update_api] ={ literal[string] : literal[string] % identifier[resource] . identifier[get_api_name] (), literal[string] : literal[string] % identifier[resource] . identifier[model] . identifier[__name__] , literal[string] :[ { literal[string] : literal[string] , literal[string] : literal[string] % identifier[resource] . identifier[model] . identifier[__name__] , literal[string] : literal[string] % identifier[resource] . identifier[model] . identifier[__name__] , literal[string] :[ { literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] %( identifier[resource] . identifier[model] . identifier[__name__] ), literal[string] : literal[string] , literal[string] : keyword[True] , literal[string] : keyword[False] , }, { literal[string] : literal[string] %( identifier[resource] . identifier[model] . identifier[__name__] ), literal[string] : literal[string] , literal[string] : keyword[True] , literal[string] : keyword[False] , literal[string] : identifier[resource] . identifier[model] . identifier[__name__] } ] } ] } keyword[return] identifier[update_api]
def get_update_api(self, resource): """ Generates the meta descriptor for the resource listing api. """ update_api = {'path': '/%s/{id}/' % resource.get_api_name(), 'description': 'Operations on %s' % resource.model.__name__, 'operations': [{'httpMethod': 'PUT', 'nickname': 'update%ss' % resource.model.__name__, 'summary': 'Update %ss' % resource.model.__name__, 'parameters': [{'paramType': 'path', 'name': 'id', 'description': '%s id' % resource.model.__name__, 'dataType': 'int', 'required': True, 'allowMultiple': False}, {'description': '%s object' % resource.model.__name__, 'paramType': 'body', 'required': True, 'allowMultiple': False, 'dataType': resource.model.__name__}]}]} return update_api
def template(self): """ Create a rules file in ipset --restore format """ s = Template(self._IPSET_TEMPLATE) return s.substitute(sets='\n'.join(self.sets), date=datetime.today())
def function[template, parameter[self]]: constant[ Create a rules file in ipset --restore format ] variable[s] assign[=] call[name[Template], parameter[name[self]._IPSET_TEMPLATE]] return[call[name[s].substitute, parameter[]]]
keyword[def] identifier[template] ( identifier[self] ): literal[string] identifier[s] = identifier[Template] ( identifier[self] . identifier[_IPSET_TEMPLATE] ) keyword[return] identifier[s] . identifier[substitute] ( identifier[sets] = literal[string] . identifier[join] ( identifier[self] . identifier[sets] ), identifier[date] = identifier[datetime] . identifier[today] ())
def template(self): """ Create a rules file in ipset --restore format """ s = Template(self._IPSET_TEMPLATE) return s.substitute(sets='\n'.join(self.sets), date=datetime.today())
def summary(self, input_size=None, hashsummary=False): """ Print a model summary """ if input_size is None: print(self) print("-" * 120) number = sum(p.numel() for p in self.model.parameters()) print("Number of model parameters: {:,}".format(number)) print("-" * 120) else: summary(self, input_size) if hashsummary: for idx, hashvalue in enumerate(self.hashsummary()): print(f"{idx}: {hashvalue}")
def function[summary, parameter[self, input_size, hashsummary]]: constant[ Print a model summary ] if compare[name[input_size] is constant[None]] begin[:] call[name[print], parameter[name[self]]] call[name[print], parameter[binary_operation[constant[-] * constant[120]]]] variable[number] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18bcc9ae0>]] call[name[print], parameter[call[constant[Number of model parameters: {:,}].format, parameter[name[number]]]]] call[name[print], parameter[binary_operation[constant[-] * constant[120]]]] if name[hashsummary] begin[:] for taget[tuple[[<ast.Name object at 0x7da18bcc85e0>, <ast.Name object at 0x7da18bccb490>]]] in starred[call[name[enumerate], parameter[call[name[self].hashsummary, parameter[]]]]] begin[:] call[name[print], parameter[<ast.JoinedStr object at 0x7da18bccab60>]]
keyword[def] identifier[summary] ( identifier[self] , identifier[input_size] = keyword[None] , identifier[hashsummary] = keyword[False] ): literal[string] keyword[if] identifier[input_size] keyword[is] keyword[None] : identifier[print] ( identifier[self] ) identifier[print] ( literal[string] * literal[int] ) identifier[number] = identifier[sum] ( identifier[p] . identifier[numel] () keyword[for] identifier[p] keyword[in] identifier[self] . identifier[model] . identifier[parameters] ()) identifier[print] ( literal[string] . identifier[format] ( identifier[number] )) identifier[print] ( literal[string] * literal[int] ) keyword[else] : identifier[summary] ( identifier[self] , identifier[input_size] ) keyword[if] identifier[hashsummary] : keyword[for] identifier[idx] , identifier[hashvalue] keyword[in] identifier[enumerate] ( identifier[self] . identifier[hashsummary] ()): identifier[print] ( literal[string] )
def summary(self, input_size=None, hashsummary=False): """ Print a model summary """ if input_size is None: print(self) print('-' * 120) number = sum((p.numel() for p in self.model.parameters())) print('Number of model parameters: {:,}'.format(number)) print('-' * 120) # depends on [control=['if'], data=[]] else: summary(self, input_size) if hashsummary: for (idx, hashvalue) in enumerate(self.hashsummary()): print(f'{idx}: {hashvalue}') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def start(self) -> pd.Timestamp: """Returns the minimum timestamp value of the DataFrame.""" start = self.data.timestamp.min() self.data = self.data.assign(start=start) return start
def function[start, parameter[self]]: constant[Returns the minimum timestamp value of the DataFrame.] variable[start] assign[=] call[name[self].data.timestamp.min, parameter[]] name[self].data assign[=] call[name[self].data.assign, parameter[]] return[name[start]]
keyword[def] identifier[start] ( identifier[self] )-> identifier[pd] . identifier[Timestamp] : literal[string] identifier[start] = identifier[self] . identifier[data] . identifier[timestamp] . identifier[min] () identifier[self] . identifier[data] = identifier[self] . identifier[data] . identifier[assign] ( identifier[start] = identifier[start] ) keyword[return] identifier[start]
def start(self) -> pd.Timestamp: """Returns the minimum timestamp value of the DataFrame.""" start = self.data.timestamp.min() self.data = self.data.assign(start=start) return start
def _dt_array_cmp(cls, op): """ Wrap comparison operations to convert datetime-like to datetime64 """ opname = '__{name}__'.format(name=op.__name__) nat_result = opname == '__ne__' def wrapper(self, other): if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)): return NotImplemented other = lib.item_from_zerodim(other) if isinstance(other, (datetime, np.datetime64, str)): if isinstance(other, (datetime, np.datetime64)): # GH#18435 strings get a pass from tzawareness compat self._assert_tzawareness_compat(other) try: other = _to_M8(other, tz=self.tz) except ValueError: # string that cannot be parsed to Timestamp return ops.invalid_comparison(self, other, op) result = op(self.asi8, other.view('i8')) if isna(other): result.fill(nat_result) elif lib.is_scalar(other) or np.ndim(other) == 0: return ops.invalid_comparison(self, other, op) elif len(other) != len(self): raise ValueError("Lengths must match") else: if isinstance(other, list): try: other = type(self)._from_sequence(other) except ValueError: other = np.array(other, dtype=np.object_) elif not isinstance(other, (np.ndarray, ABCIndexClass, ABCSeries, DatetimeArray)): # Following Timestamp convention, __eq__ is all-False # and __ne__ is all True, others raise TypeError. return ops.invalid_comparison(self, other, op) if is_object_dtype(other): # We have to use _comp_method_OBJECT_ARRAY instead of numpy # comparison otherwise it would fail to raise when # comparing tz-aware and tz-naive with np.errstate(all='ignore'): result = ops._comp_method_OBJECT_ARRAY(op, self.astype(object), other) o_mask = isna(other) elif not (is_datetime64_dtype(other) or is_datetime64tz_dtype(other)): # e.g. is_timedelta64_dtype(other) return ops.invalid_comparison(self, other, op) else: self._assert_tzawareness_compat(other) if isinstance(other, (ABCIndexClass, ABCSeries)): other = other.array if (is_datetime64_dtype(other) and not is_datetime64_ns_dtype(other) or not hasattr(other, 'asi8')): # e.g. other.dtype == 'datetime64[s]' # or an object-dtype ndarray other = type(self)._from_sequence(other) result = op(self.view('i8'), other.view('i8')) o_mask = other._isnan result = com.values_from_object(result) if o_mask.any(): result[o_mask] = nat_result if self._hasnans: result[self._isnan] = nat_result return result return compat.set_function_name(wrapper, opname, cls)
def function[_dt_array_cmp, parameter[cls, op]]: constant[ Wrap comparison operations to convert datetime-like to datetime64 ] variable[opname] assign[=] call[constant[__{name}__].format, parameter[]] variable[nat_result] assign[=] compare[name[opname] equal[==] constant[__ne__]] def function[wrapper, parameter[self, other]]: if call[name[isinstance], parameter[name[other], tuple[[<ast.Name object at 0x7da1b1eb4af0>, <ast.Name object at 0x7da1b1eb4190>, <ast.Name object at 0x7da1b1eb42e0>]]]] begin[:] return[name[NotImplemented]] variable[other] assign[=] call[name[lib].item_from_zerodim, parameter[name[other]]] if call[name[isinstance], parameter[name[other], tuple[[<ast.Name object at 0x7da1b1eb48b0>, <ast.Attribute object at 0x7da1b1eb4a90>, <ast.Name object at 0x7da1b1eb4700>]]]] begin[:] if call[name[isinstance], parameter[name[other], tuple[[<ast.Name object at 0x7da1b1eb43a0>, <ast.Attribute object at 0x7da1b1eb40d0>]]]] begin[:] call[name[self]._assert_tzawareness_compat, parameter[name[other]]] <ast.Try object at 0x7da1b1eb48e0> variable[result] assign[=] call[name[op], parameter[name[self].asi8, call[name[other].view, parameter[constant[i8]]]]] if call[name[isna], parameter[name[other]]] begin[:] call[name[result].fill, parameter[name[nat_result]]] if name[self]._hasnans begin[:] call[name[result]][name[self]._isnan] assign[=] name[nat_result] return[name[result]] return[call[name[compat].set_function_name, parameter[name[wrapper], name[opname], name[cls]]]]
keyword[def] identifier[_dt_array_cmp] ( identifier[cls] , identifier[op] ): literal[string] identifier[opname] = literal[string] . identifier[format] ( identifier[name] = identifier[op] . identifier[__name__] ) identifier[nat_result] = identifier[opname] == literal[string] keyword[def] identifier[wrapper] ( identifier[self] , identifier[other] ): keyword[if] identifier[isinstance] ( identifier[other] ,( identifier[ABCDataFrame] , identifier[ABCSeries] , identifier[ABCIndexClass] )): keyword[return] identifier[NotImplemented] identifier[other] = identifier[lib] . identifier[item_from_zerodim] ( identifier[other] ) keyword[if] identifier[isinstance] ( identifier[other] ,( identifier[datetime] , identifier[np] . identifier[datetime64] , identifier[str] )): keyword[if] identifier[isinstance] ( identifier[other] ,( identifier[datetime] , identifier[np] . identifier[datetime64] )): identifier[self] . identifier[_assert_tzawareness_compat] ( identifier[other] ) keyword[try] : identifier[other] = identifier[_to_M8] ( identifier[other] , identifier[tz] = identifier[self] . identifier[tz] ) keyword[except] identifier[ValueError] : keyword[return] identifier[ops] . identifier[invalid_comparison] ( identifier[self] , identifier[other] , identifier[op] ) identifier[result] = identifier[op] ( identifier[self] . identifier[asi8] , identifier[other] . identifier[view] ( literal[string] )) keyword[if] identifier[isna] ( identifier[other] ): identifier[result] . identifier[fill] ( identifier[nat_result] ) keyword[elif] identifier[lib] . identifier[is_scalar] ( identifier[other] ) keyword[or] identifier[np] . identifier[ndim] ( identifier[other] )== literal[int] : keyword[return] identifier[ops] . identifier[invalid_comparison] ( identifier[self] , identifier[other] , identifier[op] ) keyword[elif] identifier[len] ( identifier[other] )!= identifier[len] ( identifier[self] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[other] , identifier[list] ): keyword[try] : identifier[other] = identifier[type] ( identifier[self] ). identifier[_from_sequence] ( identifier[other] ) keyword[except] identifier[ValueError] : identifier[other] = identifier[np] . identifier[array] ( identifier[other] , identifier[dtype] = identifier[np] . identifier[object_] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[other] ,( identifier[np] . identifier[ndarray] , identifier[ABCIndexClass] , identifier[ABCSeries] , identifier[DatetimeArray] )): keyword[return] identifier[ops] . identifier[invalid_comparison] ( identifier[self] , identifier[other] , identifier[op] ) keyword[if] identifier[is_object_dtype] ( identifier[other] ): keyword[with] identifier[np] . identifier[errstate] ( identifier[all] = literal[string] ): identifier[result] = identifier[ops] . identifier[_comp_method_OBJECT_ARRAY] ( identifier[op] , identifier[self] . identifier[astype] ( identifier[object] ), identifier[other] ) identifier[o_mask] = identifier[isna] ( identifier[other] ) keyword[elif] keyword[not] ( identifier[is_datetime64_dtype] ( identifier[other] ) keyword[or] identifier[is_datetime64tz_dtype] ( identifier[other] )): keyword[return] identifier[ops] . identifier[invalid_comparison] ( identifier[self] , identifier[other] , identifier[op] ) keyword[else] : identifier[self] . identifier[_assert_tzawareness_compat] ( identifier[other] ) keyword[if] identifier[isinstance] ( identifier[other] ,( identifier[ABCIndexClass] , identifier[ABCSeries] )): identifier[other] = identifier[other] . identifier[array] keyword[if] ( identifier[is_datetime64_dtype] ( identifier[other] ) keyword[and] keyword[not] identifier[is_datetime64_ns_dtype] ( identifier[other] ) keyword[or] keyword[not] identifier[hasattr] ( identifier[other] , literal[string] )): identifier[other] = identifier[type] ( identifier[self] ). identifier[_from_sequence] ( identifier[other] ) identifier[result] = identifier[op] ( identifier[self] . identifier[view] ( literal[string] ), identifier[other] . identifier[view] ( literal[string] )) identifier[o_mask] = identifier[other] . identifier[_isnan] identifier[result] = identifier[com] . identifier[values_from_object] ( identifier[result] ) keyword[if] identifier[o_mask] . identifier[any] (): identifier[result] [ identifier[o_mask] ]= identifier[nat_result] keyword[if] identifier[self] . identifier[_hasnans] : identifier[result] [ identifier[self] . identifier[_isnan] ]= identifier[nat_result] keyword[return] identifier[result] keyword[return] identifier[compat] . identifier[set_function_name] ( identifier[wrapper] , identifier[opname] , identifier[cls] )
def _dt_array_cmp(cls, op): """ Wrap comparison operations to convert datetime-like to datetime64 """ opname = '__{name}__'.format(name=op.__name__) nat_result = opname == '__ne__' def wrapper(self, other): if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)): return NotImplemented # depends on [control=['if'], data=[]] other = lib.item_from_zerodim(other) if isinstance(other, (datetime, np.datetime64, str)): if isinstance(other, (datetime, np.datetime64)): # GH#18435 strings get a pass from tzawareness compat self._assert_tzawareness_compat(other) # depends on [control=['if'], data=[]] try: other = _to_M8(other, tz=self.tz) # depends on [control=['try'], data=[]] except ValueError: # string that cannot be parsed to Timestamp return ops.invalid_comparison(self, other, op) # depends on [control=['except'], data=[]] result = op(self.asi8, other.view('i8')) if isna(other): result.fill(nat_result) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif lib.is_scalar(other) or np.ndim(other) == 0: return ops.invalid_comparison(self, other, op) # depends on [control=['if'], data=[]] elif len(other) != len(self): raise ValueError('Lengths must match') # depends on [control=['if'], data=[]] else: if isinstance(other, list): try: other = type(self)._from_sequence(other) # depends on [control=['try'], data=[]] except ValueError: other = np.array(other, dtype=np.object_) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif not isinstance(other, (np.ndarray, ABCIndexClass, ABCSeries, DatetimeArray)): # Following Timestamp convention, __eq__ is all-False # and __ne__ is all True, others raise TypeError. return ops.invalid_comparison(self, other, op) # depends on [control=['if'], data=[]] if is_object_dtype(other): # We have to use _comp_method_OBJECT_ARRAY instead of numpy # comparison otherwise it would fail to raise when # comparing tz-aware and tz-naive with np.errstate(all='ignore'): result = ops._comp_method_OBJECT_ARRAY(op, self.astype(object), other) # depends on [control=['with'], data=[]] o_mask = isna(other) # depends on [control=['if'], data=[]] elif not (is_datetime64_dtype(other) or is_datetime64tz_dtype(other)): # e.g. is_timedelta64_dtype(other) return ops.invalid_comparison(self, other, op) # depends on [control=['if'], data=[]] else: self._assert_tzawareness_compat(other) if isinstance(other, (ABCIndexClass, ABCSeries)): other = other.array # depends on [control=['if'], data=[]] if is_datetime64_dtype(other) and (not is_datetime64_ns_dtype(other)) or not hasattr(other, 'asi8'): # e.g. other.dtype == 'datetime64[s]' # or an object-dtype ndarray other = type(self)._from_sequence(other) # depends on [control=['if'], data=[]] result = op(self.view('i8'), other.view('i8')) o_mask = other._isnan result = com.values_from_object(result) if o_mask.any(): result[o_mask] = nat_result # depends on [control=['if'], data=[]] if self._hasnans: result[self._isnan] = nat_result # depends on [control=['if'], data=[]] return result return compat.set_function_name(wrapper, opname, cls)
def publish(self, channel_id): """ publish: publishes tree to Kolibri Args: channel_id (str): channel's id on Kolibri Studio Returns: None """ payload = { "channel_id":channel_id, } response = config.SESSION.post(config.publish_channel_url(), data=json.dumps(payload)) response.raise_for_status()
def function[publish, parameter[self, channel_id]]: constant[ publish: publishes tree to Kolibri Args: channel_id (str): channel's id on Kolibri Studio Returns: None ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da2046222c0>], [<ast.Name object at 0x7da204623b50>]] variable[response] assign[=] call[name[config].SESSION.post, parameter[call[name[config].publish_channel_url, parameter[]]]] call[name[response].raise_for_status, parameter[]]
keyword[def] identifier[publish] ( identifier[self] , identifier[channel_id] ): literal[string] identifier[payload] ={ literal[string] : identifier[channel_id] , } identifier[response] = identifier[config] . identifier[SESSION] . identifier[post] ( identifier[config] . identifier[publish_channel_url] (), identifier[data] = identifier[json] . identifier[dumps] ( identifier[payload] )) identifier[response] . identifier[raise_for_status] ()
def publish(self, channel_id): """ publish: publishes tree to Kolibri Args: channel_id (str): channel's id on Kolibri Studio Returns: None """ payload = {'channel_id': channel_id} response = config.SESSION.post(config.publish_channel_url(), data=json.dumps(payload)) response.raise_for_status()
def show_clusters(clusters, sample, covariances, means, figure = None, display = True): """! @brief Draws clusters and in case of two-dimensional dataset draws their ellipses. @param[in] clusters (list): Clusters that were allocated by the algorithm. @param[in] sample (list): Dataset that were used for clustering. @param[in] covariances (list): Covariances of the clusters. @param[in] means (list): Means of the clusters. @param[in] figure (figure): If 'None' then new is figure is creater, otherwise specified figure is used for visualization. @param[in] display (bool): If 'True' then figure will be shown by the method, otherwise it should be shown manually using matplotlib function 'plt.show()'. @return (figure) Figure where clusters were drawn. """ visualizer = cluster_visualizer() visualizer.append_clusters(clusters, sample) if figure is None: figure = visualizer.show(display = False) else: visualizer.show(figure = figure, display = False) if len(sample[0]) == 2: ema_visualizer.__draw_ellipses(figure, visualizer, clusters, covariances, means) if display is True: plt.show() return figure
def function[show_clusters, parameter[clusters, sample, covariances, means, figure, display]]: constant[! @brief Draws clusters and in case of two-dimensional dataset draws their ellipses. @param[in] clusters (list): Clusters that were allocated by the algorithm. @param[in] sample (list): Dataset that were used for clustering. @param[in] covariances (list): Covariances of the clusters. @param[in] means (list): Means of the clusters. @param[in] figure (figure): If 'None' then new is figure is creater, otherwise specified figure is used for visualization. @param[in] display (bool): If 'True' then figure will be shown by the method, otherwise it should be shown manually using matplotlib function 'plt.show()'. @return (figure) Figure where clusters were drawn. ] variable[visualizer] assign[=] call[name[cluster_visualizer], parameter[]] call[name[visualizer].append_clusters, parameter[name[clusters], name[sample]]] if compare[name[figure] is constant[None]] begin[:] variable[figure] assign[=] call[name[visualizer].show, parameter[]] if compare[call[name[len], parameter[call[name[sample]][constant[0]]]] equal[==] constant[2]] begin[:] call[name[ema_visualizer].__draw_ellipses, parameter[name[figure], name[visualizer], name[clusters], name[covariances], name[means]]] if compare[name[display] is constant[True]] begin[:] call[name[plt].show, parameter[]] return[name[figure]]
keyword[def] identifier[show_clusters] ( identifier[clusters] , identifier[sample] , identifier[covariances] , identifier[means] , identifier[figure] = keyword[None] , identifier[display] = keyword[True] ): literal[string] identifier[visualizer] = identifier[cluster_visualizer] () identifier[visualizer] . identifier[append_clusters] ( identifier[clusters] , identifier[sample] ) keyword[if] identifier[figure] keyword[is] keyword[None] : identifier[figure] = identifier[visualizer] . identifier[show] ( identifier[display] = keyword[False] ) keyword[else] : identifier[visualizer] . identifier[show] ( identifier[figure] = identifier[figure] , identifier[display] = keyword[False] ) keyword[if] identifier[len] ( identifier[sample] [ literal[int] ])== literal[int] : identifier[ema_visualizer] . identifier[__draw_ellipses] ( identifier[figure] , identifier[visualizer] , identifier[clusters] , identifier[covariances] , identifier[means] ) keyword[if] identifier[display] keyword[is] keyword[True] : identifier[plt] . identifier[show] () keyword[return] identifier[figure]
def show_clusters(clusters, sample, covariances, means, figure=None, display=True): """! @brief Draws clusters and in case of two-dimensional dataset draws their ellipses. @param[in] clusters (list): Clusters that were allocated by the algorithm. @param[in] sample (list): Dataset that were used for clustering. @param[in] covariances (list): Covariances of the clusters. @param[in] means (list): Means of the clusters. @param[in] figure (figure): If 'None' then new is figure is creater, otherwise specified figure is used for visualization. @param[in] display (bool): If 'True' then figure will be shown by the method, otherwise it should be shown manually using matplotlib function 'plt.show()'. @return (figure) Figure where clusters were drawn. """ visualizer = cluster_visualizer() visualizer.append_clusters(clusters, sample) if figure is None: figure = visualizer.show(display=False) # depends on [control=['if'], data=['figure']] else: visualizer.show(figure=figure, display=False) if len(sample[0]) == 2: ema_visualizer.__draw_ellipses(figure, visualizer, clusters, covariances, means) # depends on [control=['if'], data=[]] if display is True: plt.show() # depends on [control=['if'], data=[]] return figure
def show(name, root=None): ''' .. versionadded:: 2014.7.0 Show properties of one or more units/jobs or the manager root Enable/disable/mask unit files in the specified root directory CLI Example: salt '*' service.show <service name> ''' ret = {} out = __salt__['cmd.run'](_systemctl_cmd('show', name, root=root), python_shell=False) for line in salt.utils.itertools.split(out, '\n'): comps = line.split('=') name = comps[0] value = '='.join(comps[1:]) if value.startswith('{'): value = value.replace('{', '').replace('}', '') ret[name] = {} for item in value.split(' ; '): comps = item.split('=') ret[name][comps[0].strip()] = comps[1].strip() elif name in ('Before', 'After', 'Wants'): ret[name] = value.split() else: ret[name] = value return ret
def function[show, parameter[name, root]]: constant[ .. versionadded:: 2014.7.0 Show properties of one or more units/jobs or the manager root Enable/disable/mask unit files in the specified root directory CLI Example: salt '*' service.show <service name> ] variable[ret] assign[=] dictionary[[], []] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[call[name[_systemctl_cmd], parameter[constant[show], name[name]]]]] for taget[name[line]] in starred[call[name[salt].utils.itertools.split, parameter[name[out], constant[ ]]]] begin[:] variable[comps] assign[=] call[name[line].split, parameter[constant[=]]] variable[name] assign[=] call[name[comps]][constant[0]] variable[value] assign[=] call[constant[=].join, parameter[call[name[comps]][<ast.Slice object at 0x7da18f8136a0>]]] if call[name[value].startswith, parameter[constant[{]]] begin[:] variable[value] assign[=] call[call[name[value].replace, parameter[constant[{], constant[]]].replace, parameter[constant[}], constant[]]] call[name[ret]][name[name]] assign[=] dictionary[[], []] for taget[name[item]] in starred[call[name[value].split, parameter[constant[ ; ]]]] begin[:] variable[comps] assign[=] call[name[item].split, parameter[constant[=]]] call[call[name[ret]][name[name]]][call[call[name[comps]][constant[0]].strip, parameter[]]] assign[=] call[call[name[comps]][constant[1]].strip, parameter[]] return[name[ret]]
keyword[def] identifier[show] ( identifier[name] , identifier[root] = keyword[None] ): literal[string] identifier[ret] ={} identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[_systemctl_cmd] ( literal[string] , identifier[name] , identifier[root] = identifier[root] ), identifier[python_shell] = keyword[False] ) keyword[for] identifier[line] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[out] , literal[string] ): identifier[comps] = identifier[line] . identifier[split] ( literal[string] ) identifier[name] = identifier[comps] [ literal[int] ] identifier[value] = literal[string] . identifier[join] ( identifier[comps] [ literal[int] :]) keyword[if] identifier[value] . identifier[startswith] ( literal[string] ): identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) identifier[ret] [ identifier[name] ]={} keyword[for] identifier[item] keyword[in] identifier[value] . identifier[split] ( literal[string] ): identifier[comps] = identifier[item] . identifier[split] ( literal[string] ) identifier[ret] [ identifier[name] ][ identifier[comps] [ literal[int] ]. identifier[strip] ()]= identifier[comps] [ literal[int] ]. identifier[strip] () keyword[elif] identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[ret] [ identifier[name] ]= identifier[value] . identifier[split] () keyword[else] : identifier[ret] [ identifier[name] ]= identifier[value] keyword[return] identifier[ret]
def show(name, root=None): """ .. versionadded:: 2014.7.0 Show properties of one or more units/jobs or the manager root Enable/disable/mask unit files in the specified root directory CLI Example: salt '*' service.show <service name> """ ret = {} out = __salt__['cmd.run'](_systemctl_cmd('show', name, root=root), python_shell=False) for line in salt.utils.itertools.split(out, '\n'): comps = line.split('=') name = comps[0] value = '='.join(comps[1:]) if value.startswith('{'): value = value.replace('{', '').replace('}', '') ret[name] = {} for item in value.split(' ; '): comps = item.split('=') ret[name][comps[0].strip()] = comps[1].strip() # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] elif name in ('Before', 'After', 'Wants'): ret[name] = value.split() # depends on [control=['if'], data=['name']] else: ret[name] = value # depends on [control=['for'], data=['line']] return ret
def print_entitlements(opts, data, page_info=None, show_list_info=True): """Print entitlements as a table or output in another format.""" if utils.maybe_print_as_json(opts, data, page_info): return headers = ["Name", "Token", "Created / Updated", "Identifier"] rows = [] for entitlement in sorted(data, key=itemgetter("name")): rows.append( [ click.style( "%(name)s (%(type)s)" % { "name": click.style(entitlement["name"], fg="cyan"), "type": "user" if entitlement["user"] else "token", } ), click.style(entitlement["token"], fg="yellow"), click.style(entitlement["updated_at"], fg="blue"), click.style(entitlement["slug_perm"], fg="green"), ] ) if data: click.echo() utils.pretty_print_table(headers, rows) if not show_list_info: return click.echo() num_results = len(data) list_suffix = "entitlement%s" % ("s" if num_results != 1 else "") utils.pretty_print_list_info(num_results=num_results, suffix=list_suffix)
def function[print_entitlements, parameter[opts, data, page_info, show_list_info]]: constant[Print entitlements as a table or output in another format.] if call[name[utils].maybe_print_as_json, parameter[name[opts], name[data], name[page_info]]] begin[:] return[None] variable[headers] assign[=] list[[<ast.Constant object at 0x7da1b191e650>, <ast.Constant object at 0x7da1b191f430>, <ast.Constant object at 0x7da1b191de40>, <ast.Constant object at 0x7da1b191f280>]] variable[rows] assign[=] list[[]] for taget[name[entitlement]] in starred[call[name[sorted], parameter[name[data]]]] begin[:] call[name[rows].append, parameter[list[[<ast.Call object at 0x7da1b191e4d0>, <ast.Call object at 0x7da1b191ead0>, <ast.Call object at 0x7da1b1a77070>, <ast.Call object at 0x7da1b1a74670>]]]] if name[data] begin[:] call[name[click].echo, parameter[]] call[name[utils].pretty_print_table, parameter[name[headers], name[rows]]] if <ast.UnaryOp object at 0x7da1b1a74ca0> begin[:] return[None] call[name[click].echo, parameter[]] variable[num_results] assign[=] call[name[len], parameter[name[data]]] variable[list_suffix] assign[=] binary_operation[constant[entitlement%s] <ast.Mod object at 0x7da2590d6920> <ast.IfExp object at 0x7da1b1a75f00>] call[name[utils].pretty_print_list_info, parameter[]]
keyword[def] identifier[print_entitlements] ( identifier[opts] , identifier[data] , identifier[page_info] = keyword[None] , identifier[show_list_info] = keyword[True] ): literal[string] keyword[if] identifier[utils] . identifier[maybe_print_as_json] ( identifier[opts] , identifier[data] , identifier[page_info] ): keyword[return] identifier[headers] =[ literal[string] , literal[string] , literal[string] , literal[string] ] identifier[rows] =[] keyword[for] identifier[entitlement] keyword[in] identifier[sorted] ( identifier[data] , identifier[key] = identifier[itemgetter] ( literal[string] )): identifier[rows] . identifier[append] ( [ identifier[click] . identifier[style] ( literal[string] %{ literal[string] : identifier[click] . identifier[style] ( identifier[entitlement] [ literal[string] ], identifier[fg] = literal[string] ), literal[string] : literal[string] keyword[if] identifier[entitlement] [ literal[string] ] keyword[else] literal[string] , } ), identifier[click] . identifier[style] ( identifier[entitlement] [ literal[string] ], identifier[fg] = literal[string] ), identifier[click] . identifier[style] ( identifier[entitlement] [ literal[string] ], identifier[fg] = literal[string] ), identifier[click] . identifier[style] ( identifier[entitlement] [ literal[string] ], identifier[fg] = literal[string] ), ] ) keyword[if] identifier[data] : identifier[click] . identifier[echo] () identifier[utils] . identifier[pretty_print_table] ( identifier[headers] , identifier[rows] ) keyword[if] keyword[not] identifier[show_list_info] : keyword[return] identifier[click] . identifier[echo] () identifier[num_results] = identifier[len] ( identifier[data] ) identifier[list_suffix] = literal[string] %( literal[string] keyword[if] identifier[num_results] != literal[int] keyword[else] literal[string] ) identifier[utils] . identifier[pretty_print_list_info] ( identifier[num_results] = identifier[num_results] , identifier[suffix] = identifier[list_suffix] )
def print_entitlements(opts, data, page_info=None, show_list_info=True): """Print entitlements as a table or output in another format.""" if utils.maybe_print_as_json(opts, data, page_info): return # depends on [control=['if'], data=[]] headers = ['Name', 'Token', 'Created / Updated', 'Identifier'] rows = [] for entitlement in sorted(data, key=itemgetter('name')): rows.append([click.style('%(name)s (%(type)s)' % {'name': click.style(entitlement['name'], fg='cyan'), 'type': 'user' if entitlement['user'] else 'token'}), click.style(entitlement['token'], fg='yellow'), click.style(entitlement['updated_at'], fg='blue'), click.style(entitlement['slug_perm'], fg='green')]) # depends on [control=['for'], data=['entitlement']] if data: click.echo() utils.pretty_print_table(headers, rows) # depends on [control=['if'], data=[]] if not show_list_info: return # depends on [control=['if'], data=[]] click.echo() num_results = len(data) list_suffix = 'entitlement%s' % ('s' if num_results != 1 else '') utils.pretty_print_list_info(num_results=num_results, suffix=list_suffix)
def MoveCursorToInnerPos(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True) -> tuple: """ Move cursor to control's internal position, default to center. x: int, if < 0, move to self.BoundingRectangle.right + x, if not None, ignore ratioX. y: int, if < 0, move to self.BoundingRectangle.bottom + y, if not None, ignore ratioY. ratioX: float. ratioY: float. simulateMove: bool. Return tuple, two ints(x,y), the cursor positon relative to screen(0,0) after moving or None if control's width or height == 0. """ rect = self.BoundingRectangle if rect.width() == 0 or rect.height() == 0: Logger.ColorfullyWriteLine('<Color=Yellow>Can not move curosr</Color>. {}\'s BoundingRectangle is {}. SearchProperties: {}'.format( self.ControlTypeName, rect, self.GetColorfulSearchPropertiesStr())) return if x is None: x = rect.left + int(rect.width() * ratioX) else: x = (rect.left if x >= 0 else rect.right) + x if y is None: y = rect.top + int(rect.height() * ratioY) else: y = (rect.top if y >= 0 else rect.bottom) + y if simulateMove and MAX_MOVE_SECOND > 0: MoveTo(x, y, waitTime=0) else: SetCursorPos(x, y) return x, y
def function[MoveCursorToInnerPos, parameter[self, x, y, ratioX, ratioY, simulateMove]]: constant[ Move cursor to control's internal position, default to center. x: int, if < 0, move to self.BoundingRectangle.right + x, if not None, ignore ratioX. y: int, if < 0, move to self.BoundingRectangle.bottom + y, if not None, ignore ratioY. ratioX: float. ratioY: float. simulateMove: bool. Return tuple, two ints(x,y), the cursor positon relative to screen(0,0) after moving or None if control's width or height == 0. ] variable[rect] assign[=] name[self].BoundingRectangle if <ast.BoolOp object at 0x7da18dc9aa70> begin[:] call[name[Logger].ColorfullyWriteLine, parameter[call[constant[<Color=Yellow>Can not move curosr</Color>. {}'s BoundingRectangle is {}. SearchProperties: {}].format, parameter[name[self].ControlTypeName, name[rect], call[name[self].GetColorfulSearchPropertiesStr, parameter[]]]]]] return[None] if compare[name[x] is constant[None]] begin[:] variable[x] assign[=] binary_operation[name[rect].left + call[name[int], parameter[binary_operation[call[name[rect].width, parameter[]] * name[ratioX]]]]] if compare[name[y] is constant[None]] begin[:] variable[y] assign[=] binary_operation[name[rect].top + call[name[int], parameter[binary_operation[call[name[rect].height, parameter[]] * name[ratioY]]]]] if <ast.BoolOp object at 0x7da18dc9a8f0> begin[:] call[name[MoveTo], parameter[name[x], name[y]]] return[tuple[[<ast.Name object at 0x7da18dc9bb50>, <ast.Name object at 0x7da18dc9a7a0>]]]
keyword[def] identifier[MoveCursorToInnerPos] ( identifier[self] , identifier[x] : identifier[int] = keyword[None] , identifier[y] : identifier[int] = keyword[None] , identifier[ratioX] : identifier[float] = literal[int] , identifier[ratioY] : identifier[float] = literal[int] , identifier[simulateMove] : identifier[bool] = keyword[True] )-> identifier[tuple] : literal[string] identifier[rect] = identifier[self] . identifier[BoundingRectangle] keyword[if] identifier[rect] . identifier[width] ()== literal[int] keyword[or] identifier[rect] . identifier[height] ()== literal[int] : identifier[Logger] . identifier[ColorfullyWriteLine] ( literal[string] . identifier[format] ( identifier[self] . identifier[ControlTypeName] , identifier[rect] , identifier[self] . identifier[GetColorfulSearchPropertiesStr] ())) keyword[return] keyword[if] identifier[x] keyword[is] keyword[None] : identifier[x] = identifier[rect] . identifier[left] + identifier[int] ( identifier[rect] . identifier[width] ()* identifier[ratioX] ) keyword[else] : identifier[x] =( identifier[rect] . identifier[left] keyword[if] identifier[x] >= literal[int] keyword[else] identifier[rect] . identifier[right] )+ identifier[x] keyword[if] identifier[y] keyword[is] keyword[None] : identifier[y] = identifier[rect] . identifier[top] + identifier[int] ( identifier[rect] . identifier[height] ()* identifier[ratioY] ) keyword[else] : identifier[y] =( identifier[rect] . identifier[top] keyword[if] identifier[y] >= literal[int] keyword[else] identifier[rect] . identifier[bottom] )+ identifier[y] keyword[if] identifier[simulateMove] keyword[and] identifier[MAX_MOVE_SECOND] > literal[int] : identifier[MoveTo] ( identifier[x] , identifier[y] , identifier[waitTime] = literal[int] ) keyword[else] : identifier[SetCursorPos] ( identifier[x] , identifier[y] ) keyword[return] identifier[x] , identifier[y]
def MoveCursorToInnerPos(self, x: int=None, y: int=None, ratioX: float=0.5, ratioY: float=0.5, simulateMove: bool=True) -> tuple: """ Move cursor to control's internal position, default to center. x: int, if < 0, move to self.BoundingRectangle.right + x, if not None, ignore ratioX. y: int, if < 0, move to self.BoundingRectangle.bottom + y, if not None, ignore ratioY. ratioX: float. ratioY: float. simulateMove: bool. Return tuple, two ints(x,y), the cursor positon relative to screen(0,0) after moving or None if control's width or height == 0. """ rect = self.BoundingRectangle if rect.width() == 0 or rect.height() == 0: Logger.ColorfullyWriteLine("<Color=Yellow>Can not move curosr</Color>. {}'s BoundingRectangle is {}. SearchProperties: {}".format(self.ControlTypeName, rect, self.GetColorfulSearchPropertiesStr())) return # depends on [control=['if'], data=[]] if x is None: x = rect.left + int(rect.width() * ratioX) # depends on [control=['if'], data=['x']] else: x = (rect.left if x >= 0 else rect.right) + x if y is None: y = rect.top + int(rect.height() * ratioY) # depends on [control=['if'], data=['y']] else: y = (rect.top if y >= 0 else rect.bottom) + y if simulateMove and MAX_MOVE_SECOND > 0: MoveTo(x, y, waitTime=0) # depends on [control=['if'], data=[]] else: SetCursorPos(x, y) return (x, y)
def api_notifications(): """Receive MTurk REST notifications.""" event_type = request.values['Event.1.EventType'] assignment_id = request.values['Event.1.AssignmentId'] # Add the notification to the queue. db.logger.debug('rq: Queueing %s with id: %s for worker_function', event_type, assignment_id) q.enqueue(worker_function, event_type, assignment_id, None) db.logger.debug('rq: Submitted Queue Length: %d (%s)', len(q), ', '.join(q.job_ids)) return success_response(request_type="notification")
def function[api_notifications, parameter[]]: constant[Receive MTurk REST notifications.] variable[event_type] assign[=] call[name[request].values][constant[Event.1.EventType]] variable[assignment_id] assign[=] call[name[request].values][constant[Event.1.AssignmentId]] call[name[db].logger.debug, parameter[constant[rq: Queueing %s with id: %s for worker_function], name[event_type], name[assignment_id]]] call[name[q].enqueue, parameter[name[worker_function], name[event_type], name[assignment_id], constant[None]]] call[name[db].logger.debug, parameter[constant[rq: Submitted Queue Length: %d (%s)], call[name[len], parameter[name[q]]], call[constant[, ].join, parameter[name[q].job_ids]]]] return[call[name[success_response], parameter[]]]
keyword[def] identifier[api_notifications] (): literal[string] identifier[event_type] = identifier[request] . identifier[values] [ literal[string] ] identifier[assignment_id] = identifier[request] . identifier[values] [ literal[string] ] identifier[db] . identifier[logger] . identifier[debug] ( literal[string] , identifier[event_type] , identifier[assignment_id] ) identifier[q] . identifier[enqueue] ( identifier[worker_function] , identifier[event_type] , identifier[assignment_id] , keyword[None] ) identifier[db] . identifier[logger] . identifier[debug] ( literal[string] , identifier[len] ( identifier[q] ), literal[string] . identifier[join] ( identifier[q] . identifier[job_ids] )) keyword[return] identifier[success_response] ( identifier[request_type] = literal[string] )
def api_notifications(): """Receive MTurk REST notifications.""" event_type = request.values['Event.1.EventType'] assignment_id = request.values['Event.1.AssignmentId'] # Add the notification to the queue. db.logger.debug('rq: Queueing %s with id: %s for worker_function', event_type, assignment_id) q.enqueue(worker_function, event_type, assignment_id, None) db.logger.debug('rq: Submitted Queue Length: %d (%s)', len(q), ', '.join(q.job_ids)) return success_response(request_type='notification')
def running(concurrent=False): ''' Return a list of strings that contain state return data if a state function is already running. This function is used to prevent multiple state calls from being run at the same time. CLI Example: .. code-block:: bash salt '*' state.running ''' ret = [] if concurrent: return ret active = __salt__['saltutil.is_running']('state.*') for data in active: err = ( 'The function "{0}" is running as PID {1} and was started at ' '{2} with jid {3}' ).format( data['fun'], data['pid'], salt.utils.jid.jid_to_time(data['jid']), data['jid'], ) ret.append(err) return ret
def function[running, parameter[concurrent]]: constant[ Return a list of strings that contain state return data if a state function is already running. This function is used to prevent multiple state calls from being run at the same time. CLI Example: .. code-block:: bash salt '*' state.running ] variable[ret] assign[=] list[[]] if name[concurrent] begin[:] return[name[ret]] variable[active] assign[=] call[call[name[__salt__]][constant[saltutil.is_running]], parameter[constant[state.*]]] for taget[name[data]] in starred[name[active]] begin[:] variable[err] assign[=] call[constant[The function "{0}" is running as PID {1} and was started at {2} with jid {3}].format, parameter[call[name[data]][constant[fun]], call[name[data]][constant[pid]], call[name[salt].utils.jid.jid_to_time, parameter[call[name[data]][constant[jid]]]], call[name[data]][constant[jid]]]] call[name[ret].append, parameter[name[err]]] return[name[ret]]
keyword[def] identifier[running] ( identifier[concurrent] = keyword[False] ): literal[string] identifier[ret] =[] keyword[if] identifier[concurrent] : keyword[return] identifier[ret] identifier[active] = identifier[__salt__] [ literal[string] ]( literal[string] ) keyword[for] identifier[data] keyword[in] identifier[active] : identifier[err] =( literal[string] literal[string] ). identifier[format] ( identifier[data] [ literal[string] ], identifier[data] [ literal[string] ], identifier[salt] . identifier[utils] . identifier[jid] . identifier[jid_to_time] ( identifier[data] [ literal[string] ]), identifier[data] [ literal[string] ], ) identifier[ret] . identifier[append] ( identifier[err] ) keyword[return] identifier[ret]
def running(concurrent=False): """ Return a list of strings that contain state return data if a state function is already running. This function is used to prevent multiple state calls from being run at the same time. CLI Example: .. code-block:: bash salt '*' state.running """ ret = [] if concurrent: return ret # depends on [control=['if'], data=[]] active = __salt__['saltutil.is_running']('state.*') for data in active: err = 'The function "{0}" is running as PID {1} and was started at {2} with jid {3}'.format(data['fun'], data['pid'], salt.utils.jid.jid_to_time(data['jid']), data['jid']) ret.append(err) # depends on [control=['for'], data=['data']] return ret
def mag_field_df(MAG, ofs=None): '''calculate magnetic field strength from raw magnetometer (dataflash version)''' mag = Vector3(MAG.MagX, MAG.MagY, MAG.MagZ) offsets = Vector3(MAG.OfsX, MAG.OfsY, MAG.OfsZ) if ofs is not None: mag = (mag - offsets) + Vector3(ofs[0], ofs[1], ofs[2]) return mag.length()
def function[mag_field_df, parameter[MAG, ofs]]: constant[calculate magnetic field strength from raw magnetometer (dataflash version)] variable[mag] assign[=] call[name[Vector3], parameter[name[MAG].MagX, name[MAG].MagY, name[MAG].MagZ]] variable[offsets] assign[=] call[name[Vector3], parameter[name[MAG].OfsX, name[MAG].OfsY, name[MAG].OfsZ]] if compare[name[ofs] is_not constant[None]] begin[:] variable[mag] assign[=] binary_operation[binary_operation[name[mag] - name[offsets]] + call[name[Vector3], parameter[call[name[ofs]][constant[0]], call[name[ofs]][constant[1]], call[name[ofs]][constant[2]]]]] return[call[name[mag].length, parameter[]]]
keyword[def] identifier[mag_field_df] ( identifier[MAG] , identifier[ofs] = keyword[None] ): literal[string] identifier[mag] = identifier[Vector3] ( identifier[MAG] . identifier[MagX] , identifier[MAG] . identifier[MagY] , identifier[MAG] . identifier[MagZ] ) identifier[offsets] = identifier[Vector3] ( identifier[MAG] . identifier[OfsX] , identifier[MAG] . identifier[OfsY] , identifier[MAG] . identifier[OfsZ] ) keyword[if] identifier[ofs] keyword[is] keyword[not] keyword[None] : identifier[mag] =( identifier[mag] - identifier[offsets] )+ identifier[Vector3] ( identifier[ofs] [ literal[int] ], identifier[ofs] [ literal[int] ], identifier[ofs] [ literal[int] ]) keyword[return] identifier[mag] . identifier[length] ()
def mag_field_df(MAG, ofs=None): """calculate magnetic field strength from raw magnetometer (dataflash version)""" mag = Vector3(MAG.MagX, MAG.MagY, MAG.MagZ) offsets = Vector3(MAG.OfsX, MAG.OfsY, MAG.OfsZ) if ofs is not None: mag = mag - offsets + Vector3(ofs[0], ofs[1], ofs[2]) # depends on [control=['if'], data=['ofs']] return mag.length()
def oauth_register(form): """Register user if possible. :param form: A form instance. :returns: A :class:`invenio_accounts.models.User` instance. """ if form.validate(): data = form.to_dict() if not data.get('password'): data['password'] = '' user = register_user(**data) if not data['password']: user.password = None _datastore.commit() return user
def function[oauth_register, parameter[form]]: constant[Register user if possible. :param form: A form instance. :returns: A :class:`invenio_accounts.models.User` instance. ] if call[name[form].validate, parameter[]] begin[:] variable[data] assign[=] call[name[form].to_dict, parameter[]] if <ast.UnaryOp object at 0x7da1b25d14e0> begin[:] call[name[data]][constant[password]] assign[=] constant[] variable[user] assign[=] call[name[register_user], parameter[]] if <ast.UnaryOp object at 0x7da1b25d3250> begin[:] name[user].password assign[=] constant[None] call[name[_datastore].commit, parameter[]] return[name[user]]
keyword[def] identifier[oauth_register] ( identifier[form] ): literal[string] keyword[if] identifier[form] . identifier[validate] (): identifier[data] = identifier[form] . identifier[to_dict] () keyword[if] keyword[not] identifier[data] . identifier[get] ( literal[string] ): identifier[data] [ literal[string] ]= literal[string] identifier[user] = identifier[register_user] (** identifier[data] ) keyword[if] keyword[not] identifier[data] [ literal[string] ]: identifier[user] . identifier[password] = keyword[None] identifier[_datastore] . identifier[commit] () keyword[return] identifier[user]
def oauth_register(form): """Register user if possible. :param form: A form instance. :returns: A :class:`invenio_accounts.models.User` instance. """ if form.validate(): data = form.to_dict() if not data.get('password'): data['password'] = '' # depends on [control=['if'], data=[]] user = register_user(**data) if not data['password']: user.password = None # depends on [control=['if'], data=[]] _datastore.commit() return user # depends on [control=['if'], data=[]]
def _message_generator(self): """Iterate over processed messages in the receive queue. :rtype: generator[~uamqp.message.Message] """ self.open() auto_complete = self.auto_complete self.auto_complete = False receiving = True message = None try: while receiving: while receiving and self._received_messages.empty(): receiving = self.do_work() while not self._received_messages.empty(): message = self._received_messages.get() self._received_messages.task_done() yield message self._complete_message(message, auto_complete) finally: self._complete_message(message, auto_complete) self.auto_complete = auto_complete self.close()
def function[_message_generator, parameter[self]]: constant[Iterate over processed messages in the receive queue. :rtype: generator[~uamqp.message.Message] ] call[name[self].open, parameter[]] variable[auto_complete] assign[=] name[self].auto_complete name[self].auto_complete assign[=] constant[False] variable[receiving] assign[=] constant[True] variable[message] assign[=] constant[None] <ast.Try object at 0x7da204962860>
keyword[def] identifier[_message_generator] ( identifier[self] ): literal[string] identifier[self] . identifier[open] () identifier[auto_complete] = identifier[self] . identifier[auto_complete] identifier[self] . identifier[auto_complete] = keyword[False] identifier[receiving] = keyword[True] identifier[message] = keyword[None] keyword[try] : keyword[while] identifier[receiving] : keyword[while] identifier[receiving] keyword[and] identifier[self] . identifier[_received_messages] . identifier[empty] (): identifier[receiving] = identifier[self] . identifier[do_work] () keyword[while] keyword[not] identifier[self] . identifier[_received_messages] . identifier[empty] (): identifier[message] = identifier[self] . identifier[_received_messages] . identifier[get] () identifier[self] . identifier[_received_messages] . identifier[task_done] () keyword[yield] identifier[message] identifier[self] . identifier[_complete_message] ( identifier[message] , identifier[auto_complete] ) keyword[finally] : identifier[self] . identifier[_complete_message] ( identifier[message] , identifier[auto_complete] ) identifier[self] . identifier[auto_complete] = identifier[auto_complete] identifier[self] . identifier[close] ()
def _message_generator(self): """Iterate over processed messages in the receive queue. :rtype: generator[~uamqp.message.Message] """ self.open() auto_complete = self.auto_complete self.auto_complete = False receiving = True message = None try: while receiving: while receiving and self._received_messages.empty(): receiving = self.do_work() # depends on [control=['while'], data=[]] while not self._received_messages.empty(): message = self._received_messages.get() self._received_messages.task_done() yield message self._complete_message(message, auto_complete) # depends on [control=['while'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] finally: self._complete_message(message, auto_complete) self.auto_complete = auto_complete self.close()
def _check_invalid_longitudes(self): ''' Checks to ensure that all longitudes are in the range -180. to 180 ''' idlon = self.strain.data['longitude'] > 180. if np.any(idlon): self.strain.data['longitude'][idlon] = \ self.strain.data['longitude'][idlon] - 360.
def function[_check_invalid_longitudes, parameter[self]]: constant[ Checks to ensure that all longitudes are in the range -180. to 180 ] variable[idlon] assign[=] compare[call[name[self].strain.data][constant[longitude]] greater[>] constant[180.0]] if call[name[np].any, parameter[name[idlon]]] begin[:] call[call[name[self].strain.data][constant[longitude]]][name[idlon]] assign[=] binary_operation[call[call[name[self].strain.data][constant[longitude]]][name[idlon]] - constant[360.0]]
keyword[def] identifier[_check_invalid_longitudes] ( identifier[self] ): literal[string] identifier[idlon] = identifier[self] . identifier[strain] . identifier[data] [ literal[string] ]> literal[int] keyword[if] identifier[np] . identifier[any] ( identifier[idlon] ): identifier[self] . identifier[strain] . identifier[data] [ literal[string] ][ identifier[idlon] ]= identifier[self] . identifier[strain] . identifier[data] [ literal[string] ][ identifier[idlon] ]- literal[int]
def _check_invalid_longitudes(self): """ Checks to ensure that all longitudes are in the range -180. to 180 """ idlon = self.strain.data['longitude'] > 180.0 if np.any(idlon): self.strain.data['longitude'][idlon] = self.strain.data['longitude'][idlon] - 360.0 # depends on [control=['if'], data=[]]
def _deriv_arctan2(y, x): """Derivative of the arctan2 function""" r2 = x*x + y*y df_dy = x / r2 df_dx = -y / r2 return np.hstack([df_dy, df_dx])
def function[_deriv_arctan2, parameter[y, x]]: constant[Derivative of the arctan2 function] variable[r2] assign[=] binary_operation[binary_operation[name[x] * name[x]] + binary_operation[name[y] * name[y]]] variable[df_dy] assign[=] binary_operation[name[x] / name[r2]] variable[df_dx] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c7ca110> / name[r2]] return[call[name[np].hstack, parameter[list[[<ast.Name object at 0x7da20c7cad70>, <ast.Name object at 0x7da20c7cbc40>]]]]]
keyword[def] identifier[_deriv_arctan2] ( identifier[y] , identifier[x] ): literal[string] identifier[r2] = identifier[x] * identifier[x] + identifier[y] * identifier[y] identifier[df_dy] = identifier[x] / identifier[r2] identifier[df_dx] =- identifier[y] / identifier[r2] keyword[return] identifier[np] . identifier[hstack] ([ identifier[df_dy] , identifier[df_dx] ])
def _deriv_arctan2(y, x): """Derivative of the arctan2 function""" r2 = x * x + y * y df_dy = x / r2 df_dx = -y / r2 return np.hstack([df_dy, df_dx])
def do_parse(infilename: str, jsonfilename: Optional[str], rdffilename: Optional[str], rdffmt: str, context: Optional[str] = None) -> bool: """ Parse the jsg in infilename and save the results in outfilename :param infilename: name of the file containing the ShExC :param jsonfilename: target ShExJ equivalent :param rdffilename: target ShExR equivalent :param rdffmt: target RDF format :param context: @context to use for rdf generation. If None use what is in the file :return: true if success """ shexj = parse(FileStream(infilename, encoding="utf-8")) if shexj is not None: shexj['@context'] = context if context else "http://www.w3.org/ns/shex.jsonld" if jsonfilename: with open(jsonfilename, 'w') as outfile: outfile.write(shexj._as_json_dumps()) if rdffilename: g = Graph().parse(data=shexj._as_json, format="json-ld") g.serialize(open(rdffilename, "wb"), format=rdffmt) return True return False
def function[do_parse, parameter[infilename, jsonfilename, rdffilename, rdffmt, context]]: constant[ Parse the jsg in infilename and save the results in outfilename :param infilename: name of the file containing the ShExC :param jsonfilename: target ShExJ equivalent :param rdffilename: target ShExR equivalent :param rdffmt: target RDF format :param context: @context to use for rdf generation. If None use what is in the file :return: true if success ] variable[shexj] assign[=] call[name[parse], parameter[call[name[FileStream], parameter[name[infilename]]]]] if compare[name[shexj] is_not constant[None]] begin[:] call[name[shexj]][constant[@context]] assign[=] <ast.IfExp object at 0x7da1b2587fd0> if name[jsonfilename] begin[:] with call[name[open], parameter[name[jsonfilename], constant[w]]] begin[:] call[name[outfile].write, parameter[call[name[shexj]._as_json_dumps, parameter[]]]] if name[rdffilename] begin[:] variable[g] assign[=] call[call[name[Graph], parameter[]].parse, parameter[]] call[name[g].serialize, parameter[call[name[open], parameter[name[rdffilename], constant[wb]]]]] return[constant[True]] return[constant[False]]
keyword[def] identifier[do_parse] ( identifier[infilename] : identifier[str] , identifier[jsonfilename] : identifier[Optional] [ identifier[str] ], identifier[rdffilename] : identifier[Optional] [ identifier[str] ], identifier[rdffmt] : identifier[str] , identifier[context] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[bool] : literal[string] identifier[shexj] = identifier[parse] ( identifier[FileStream] ( identifier[infilename] , identifier[encoding] = literal[string] )) keyword[if] identifier[shexj] keyword[is] keyword[not] keyword[None] : identifier[shexj] [ literal[string] ]= identifier[context] keyword[if] identifier[context] keyword[else] literal[string] keyword[if] identifier[jsonfilename] : keyword[with] identifier[open] ( identifier[jsonfilename] , literal[string] ) keyword[as] identifier[outfile] : identifier[outfile] . identifier[write] ( identifier[shexj] . identifier[_as_json_dumps] ()) keyword[if] identifier[rdffilename] : identifier[g] = identifier[Graph] (). identifier[parse] ( identifier[data] = identifier[shexj] . identifier[_as_json] , identifier[format] = literal[string] ) identifier[g] . identifier[serialize] ( identifier[open] ( identifier[rdffilename] , literal[string] ), identifier[format] = identifier[rdffmt] ) keyword[return] keyword[True] keyword[return] keyword[False]
def do_parse(infilename: str, jsonfilename: Optional[str], rdffilename: Optional[str], rdffmt: str, context: Optional[str]=None) -> bool: """ Parse the jsg in infilename and save the results in outfilename :param infilename: name of the file containing the ShExC :param jsonfilename: target ShExJ equivalent :param rdffilename: target ShExR equivalent :param rdffmt: target RDF format :param context: @context to use for rdf generation. If None use what is in the file :return: true if success """ shexj = parse(FileStream(infilename, encoding='utf-8')) if shexj is not None: shexj['@context'] = context if context else 'http://www.w3.org/ns/shex.jsonld' if jsonfilename: with open(jsonfilename, 'w') as outfile: outfile.write(shexj._as_json_dumps()) # depends on [control=['with'], data=['outfile']] # depends on [control=['if'], data=[]] if rdffilename: g = Graph().parse(data=shexj._as_json, format='json-ld') g.serialize(open(rdffilename, 'wb'), format=rdffmt) # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=['shexj']] return False
def write_c_string( self, value ): """ Read a zero terminated (C style) string """ self.file.write( value ) self.file.write( b'\0' )
def function[write_c_string, parameter[self, value]]: constant[ Read a zero terminated (C style) string ] call[name[self].file.write, parameter[name[value]]] call[name[self].file.write, parameter[constant[b'\x00']]]
keyword[def] identifier[write_c_string] ( identifier[self] , identifier[value] ): literal[string] identifier[self] . identifier[file] . identifier[write] ( identifier[value] ) identifier[self] . identifier[file] . identifier[write] ( literal[string] )
def write_c_string(self, value): """ Read a zero terminated (C style) string """ self.file.write(value) self.file.write(b'\x00')
def allowed_transitions(constraint_type: str, labels: Dict[int, str]) -> List[Tuple[int, int]]: """ Given labels and a constraint type, returns the allowed transitions. It will additionally include transitions for the start and end states, which are used by the conditional random field. Parameters ---------- constraint_type : ``str``, required Indicates which constraint to apply. Current choices are "BIO", "IOB1", "BIOUL", and "BMES". labels : ``Dict[int, str]``, required A mapping {label_id -> label}. Most commonly this would be the value from Vocabulary.get_index_to_token_vocabulary() Returns ------- ``List[Tuple[int, int]]`` The allowed transitions (from_label_id, to_label_id). """ num_labels = len(labels) start_tag = num_labels end_tag = num_labels + 1 labels_with_boundaries = list(labels.items()) + [(start_tag, "START"), (end_tag, "END")] allowed = [] for from_label_index, from_label in labels_with_boundaries: if from_label in ("START", "END"): from_tag = from_label from_entity = "" else: from_tag = from_label[0] from_entity = from_label[1:] for to_label_index, to_label in labels_with_boundaries: if to_label in ("START", "END"): to_tag = to_label to_entity = "" else: to_tag = to_label[0] to_entity = to_label[1:] if is_transition_allowed(constraint_type, from_tag, from_entity, to_tag, to_entity): allowed.append((from_label_index, to_label_index)) return allowed
def function[allowed_transitions, parameter[constraint_type, labels]]: constant[ Given labels and a constraint type, returns the allowed transitions. It will additionally include transitions for the start and end states, which are used by the conditional random field. Parameters ---------- constraint_type : ``str``, required Indicates which constraint to apply. Current choices are "BIO", "IOB1", "BIOUL", and "BMES". labels : ``Dict[int, str]``, required A mapping {label_id -> label}. Most commonly this would be the value from Vocabulary.get_index_to_token_vocabulary() Returns ------- ``List[Tuple[int, int]]`` The allowed transitions (from_label_id, to_label_id). ] variable[num_labels] assign[=] call[name[len], parameter[name[labels]]] variable[start_tag] assign[=] name[num_labels] variable[end_tag] assign[=] binary_operation[name[num_labels] + constant[1]] variable[labels_with_boundaries] assign[=] binary_operation[call[name[list], parameter[call[name[labels].items, parameter[]]]] + list[[<ast.Tuple object at 0x7da1b1f95750>, <ast.Tuple object at 0x7da1b1f971f0>]]] variable[allowed] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1f944c0>, <ast.Name object at 0x7da1b1f96410>]]] in starred[name[labels_with_boundaries]] begin[:] if compare[name[from_label] in tuple[[<ast.Constant object at 0x7da1b1f94580>, <ast.Constant object at 0x7da1b1f97f10>]]] begin[:] variable[from_tag] assign[=] name[from_label] variable[from_entity] assign[=] constant[] for taget[tuple[[<ast.Name object at 0x7da1b1f96500>, <ast.Name object at 0x7da1b1f94dc0>]]] in starred[name[labels_with_boundaries]] begin[:] if compare[name[to_label] in tuple[[<ast.Constant object at 0x7da1b1f961a0>, <ast.Constant object at 0x7da1b1f96e60>]]] begin[:] variable[to_tag] assign[=] name[to_label] variable[to_entity] assign[=] constant[] if call[name[is_transition_allowed], parameter[name[constraint_type], name[from_tag], name[from_entity], name[to_tag], name[to_entity]]] begin[:] call[name[allowed].append, parameter[tuple[[<ast.Name object at 0x7da20c9904c0>, <ast.Name object at 0x7da20c993670>]]]] return[name[allowed]]
keyword[def] identifier[allowed_transitions] ( identifier[constraint_type] : identifier[str] , identifier[labels] : identifier[Dict] [ identifier[int] , identifier[str] ])-> identifier[List] [ identifier[Tuple] [ identifier[int] , identifier[int] ]]: literal[string] identifier[num_labels] = identifier[len] ( identifier[labels] ) identifier[start_tag] = identifier[num_labels] identifier[end_tag] = identifier[num_labels] + literal[int] identifier[labels_with_boundaries] = identifier[list] ( identifier[labels] . identifier[items] ())+[( identifier[start_tag] , literal[string] ),( identifier[end_tag] , literal[string] )] identifier[allowed] =[] keyword[for] identifier[from_label_index] , identifier[from_label] keyword[in] identifier[labels_with_boundaries] : keyword[if] identifier[from_label] keyword[in] ( literal[string] , literal[string] ): identifier[from_tag] = identifier[from_label] identifier[from_entity] = literal[string] keyword[else] : identifier[from_tag] = identifier[from_label] [ literal[int] ] identifier[from_entity] = identifier[from_label] [ literal[int] :] keyword[for] identifier[to_label_index] , identifier[to_label] keyword[in] identifier[labels_with_boundaries] : keyword[if] identifier[to_label] keyword[in] ( literal[string] , literal[string] ): identifier[to_tag] = identifier[to_label] identifier[to_entity] = literal[string] keyword[else] : identifier[to_tag] = identifier[to_label] [ literal[int] ] identifier[to_entity] = identifier[to_label] [ literal[int] :] keyword[if] identifier[is_transition_allowed] ( identifier[constraint_type] , identifier[from_tag] , identifier[from_entity] , identifier[to_tag] , identifier[to_entity] ): identifier[allowed] . identifier[append] (( identifier[from_label_index] , identifier[to_label_index] )) keyword[return] identifier[allowed]
def allowed_transitions(constraint_type: str, labels: Dict[int, str]) -> List[Tuple[int, int]]: """ Given labels and a constraint type, returns the allowed transitions. It will additionally include transitions for the start and end states, which are used by the conditional random field. Parameters ---------- constraint_type : ``str``, required Indicates which constraint to apply. Current choices are "BIO", "IOB1", "BIOUL", and "BMES". labels : ``Dict[int, str]``, required A mapping {label_id -> label}. Most commonly this would be the value from Vocabulary.get_index_to_token_vocabulary() Returns ------- ``List[Tuple[int, int]]`` The allowed transitions (from_label_id, to_label_id). """ num_labels = len(labels) start_tag = num_labels end_tag = num_labels + 1 labels_with_boundaries = list(labels.items()) + [(start_tag, 'START'), (end_tag, 'END')] allowed = [] for (from_label_index, from_label) in labels_with_boundaries: if from_label in ('START', 'END'): from_tag = from_label from_entity = '' # depends on [control=['if'], data=['from_label']] else: from_tag = from_label[0] from_entity = from_label[1:] for (to_label_index, to_label) in labels_with_boundaries: if to_label in ('START', 'END'): to_tag = to_label to_entity = '' # depends on [control=['if'], data=['to_label']] else: to_tag = to_label[0] to_entity = to_label[1:] if is_transition_allowed(constraint_type, from_tag, from_entity, to_tag, to_entity): allowed.append((from_label_index, to_label_index)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return allowed
def intcomma(value): """ Borrowed from django.contrib.humanize Converts an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. """ orig = str(value) new = re.sub("^(-?\d+)(\d{3})", '\g<1>,\g<2>', orig) if orig == new: return new else: return intcomma(new)
def function[intcomma, parameter[value]]: constant[ Borrowed from django.contrib.humanize Converts an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. ] variable[orig] assign[=] call[name[str], parameter[name[value]]] variable[new] assign[=] call[name[re].sub, parameter[constant[^(-?\d+)(\d{3})], constant[\g<1>,\g<2>], name[orig]]] if compare[name[orig] equal[==] name[new]] begin[:] return[name[new]]
keyword[def] identifier[intcomma] ( identifier[value] ): literal[string] identifier[orig] = identifier[str] ( identifier[value] ) identifier[new] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[orig] ) keyword[if] identifier[orig] == identifier[new] : keyword[return] identifier[new] keyword[else] : keyword[return] identifier[intcomma] ( identifier[new] )
def intcomma(value): """ Borrowed from django.contrib.humanize Converts an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. """ orig = str(value) new = re.sub('^(-?\\d+)(\\d{3})', '\\g<1>,\\g<2>', orig) if orig == new: return new # depends on [control=['if'], data=['new']] else: return intcomma(new)
def _do_resolve_index( python_bin: str, solver: PythonSolver, *, all_solvers: typing.List[PythonSolver], requirements: typing.List[str], exclude_packages: set = None, transitive: bool = True, subgraph_check_api: str = None, ) -> dict: """Perform resolution of requirements against the given solver.""" index_url = solver.release_fetcher.index_url source = solver.release_fetcher.source packages_seen = set() packages = [] errors = [] unresolved = [] unparsed = [] exclude_packages = exclude_packages or {} queue = deque() for requirement in requirements: _LOGGER.debug("Parsing requirement %r", requirement) try: dependency = PythonDependencyParser.parse_python(requirement) except Exception as exc: unparsed.append({"requirement": requirement, "details": str(exc)}) continue if dependency.name in exclude_packages: continue version_spec = _get_dependency_specification(dependency.spec) resolved_versions = _resolve_versions(solver, source, dependency.name, version_spec) if not resolved_versions: _LOGGER.warning("No versions were resolved for dependency %r in version %r", dependency.name, version_spec) unresolved.append({"package_name": dependency.name, "version_spec": version_spec, "index": index_url}) else: for version in resolved_versions: entry = (dependency.name, version) packages_seen.add(entry) queue.append(entry) while queue: package_name, package_version = queue.pop() _LOGGER.info("Using index %r to discover package %r in version %r", index_url, package_name, package_version) try: with _install_requirement(python_bin, package_name, package_version, index_url): package_info = _pipdeptree(python_bin, package_name, warn=True) except CommandError as exc: _LOGGER.debug( "There was an error during package %r in version %r discovery from %r: %s", package_name, package_version, index_url, exc, ) errors.append( { "package_name": package_name, "index": index_url, "version": package_version, "type": "command_error", "details": exc.to_dict(), } ) continue if package_info is None: errors.append( { "package_name": package_name, "index": index_url, "version": package_version, "type": "not_site_package", "details": { "message": "Failed to get information about installed package, probably not site package" }, } ) continue if package_info["package"]["installed_version"] != package_version: _LOGGER.warning( "Requested to install version %r of package %r, but installed version is %r, error is not fatal", package_version, package_name, package_info["package"]["installed_version"], ) if package_info["package"]["package_name"] != package_name: _LOGGER.warning( "Requested to install package %r, but installed package name is %r, error is not fatal", package_name, package_info["package"]["package_name"], ) entry = _create_entry(package_info, source) packages.append(entry) for dependency in entry["dependencies"]: dependency_name, dependency_range = dependency["package_name"], dependency["required_version"] dependency["resolved_versions"] = [] for dep_solver in all_solvers: _LOGGER.info( "Resolving dependency versions for %r with range %r from %r", dependency_name, dependency_range, dep_solver.release_fetcher.index_url, ) resolved_versions = _resolve_versions( dep_solver, dep_solver.release_fetcher.source, dependency_name, dependency_range ) _LOGGER.debug( "Resolved versions for package %r with range specifier %r: %s", dependency_name, dependency_range, resolved_versions, ) dependency["resolved_versions"].append( {"versions": resolved_versions, "index": dep_solver.release_fetcher.index_url} ) if not transitive: continue for version in resolved_versions: # Did we check this package already - do not check indexes, we manually insert them. seen_entry = (dependency_name, version) if seen_entry not in packages_seen and ( not subgraph_check_api or ( subgraph_check_api and _should_resolve_subgraph(subgraph_check_api, dependency_name, version, index_url) ) ): _LOGGER.debug( "Adding package %r in version %r for next resolution round", dependency_name, version ) packages_seen.add(seen_entry) queue.append((dependency_name, version)) return {"tree": packages, "errors": errors, "unparsed": unparsed, "unresolved": unresolved}
def function[_do_resolve_index, parameter[python_bin, solver]]: constant[Perform resolution of requirements against the given solver.] variable[index_url] assign[=] name[solver].release_fetcher.index_url variable[source] assign[=] name[solver].release_fetcher.source variable[packages_seen] assign[=] call[name[set], parameter[]] variable[packages] assign[=] list[[]] variable[errors] assign[=] list[[]] variable[unresolved] assign[=] list[[]] variable[unparsed] assign[=] list[[]] variable[exclude_packages] assign[=] <ast.BoolOp object at 0x7da1b1290100> variable[queue] assign[=] call[name[deque], parameter[]] for taget[name[requirement]] in starred[name[requirements]] begin[:] call[name[_LOGGER].debug, parameter[constant[Parsing requirement %r], name[requirement]]] <ast.Try object at 0x7da1b1290f10> if compare[name[dependency].name in name[exclude_packages]] begin[:] continue variable[version_spec] assign[=] call[name[_get_dependency_specification], parameter[name[dependency].spec]] variable[resolved_versions] assign[=] call[name[_resolve_versions], parameter[name[solver], name[source], name[dependency].name, name[version_spec]]] if <ast.UnaryOp object at 0x7da1b12bc850> begin[:] call[name[_LOGGER].warning, parameter[constant[No versions were resolved for dependency %r in version %r], name[dependency].name, name[version_spec]]] call[name[unresolved].append, parameter[dictionary[[<ast.Constant object at 0x7da1b12be200>, <ast.Constant object at 0x7da1b12bc3a0>, <ast.Constant object at 0x7da1b12be950>], [<ast.Attribute object at 0x7da1b12be140>, <ast.Name object at 0x7da1b12bec50>, <ast.Name object at 0x7da1b12bdcf0>]]]] while name[queue] begin[:] <ast.Tuple object at 0x7da1b12bd390> assign[=] call[name[queue].pop, parameter[]] call[name[_LOGGER].info, parameter[constant[Using index %r to discover package %r in version %r], name[index_url], name[package_name], name[package_version]]] <ast.Try object at 0x7da1b12bc310> if compare[name[package_info] is constant[None]] begin[:] call[name[errors].append, parameter[dictionary[[<ast.Constant object at 0x7da1b12bda20>, <ast.Constant object at 0x7da1b12bcf40>, <ast.Constant object at 0x7da1b12bebf0>, <ast.Constant object at 0x7da1b12bf220>, <ast.Constant object at 0x7da1b12bc340>], [<ast.Name object at 0x7da1b12bf6a0>, <ast.Name object at 0x7da1b12becb0>, <ast.Name object at 0x7da1b12bf250>, <ast.Constant object at 0x7da1b12be5f0>, <ast.Dict object at 0x7da1b12bee90>]]]] continue if compare[call[call[name[package_info]][constant[package]]][constant[installed_version]] not_equal[!=] name[package_version]] begin[:] call[name[_LOGGER].warning, parameter[constant[Requested to install version %r of package %r, but installed version is %r, error is not fatal], name[package_version], name[package_name], call[call[name[package_info]][constant[package]]][constant[installed_version]]]] if compare[call[call[name[package_info]][constant[package]]][constant[package_name]] not_equal[!=] name[package_name]] begin[:] call[name[_LOGGER].warning, parameter[constant[Requested to install package %r, but installed package name is %r, error is not fatal], name[package_name], call[call[name[package_info]][constant[package]]][constant[package_name]]]] variable[entry] assign[=] call[name[_create_entry], parameter[name[package_info], name[source]]] call[name[packages].append, parameter[name[entry]]] for taget[name[dependency]] in starred[call[name[entry]][constant[dependencies]]] begin[:] <ast.Tuple object at 0x7da1b113a470> assign[=] tuple[[<ast.Subscript object at 0x7da1b113b0a0>, <ast.Subscript object at 0x7da1b1139870>]] call[name[dependency]][constant[resolved_versions]] assign[=] list[[]] for taget[name[dep_solver]] in starred[name[all_solvers]] begin[:] call[name[_LOGGER].info, parameter[constant[Resolving dependency versions for %r with range %r from %r], name[dependency_name], name[dependency_range], name[dep_solver].release_fetcher.index_url]] variable[resolved_versions] assign[=] call[name[_resolve_versions], parameter[name[dep_solver], name[dep_solver].release_fetcher.source, name[dependency_name], name[dependency_range]]] call[name[_LOGGER].debug, parameter[constant[Resolved versions for package %r with range specifier %r: %s], name[dependency_name], name[dependency_range], name[resolved_versions]]] call[call[name[dependency]][constant[resolved_versions]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b113b6a0>, <ast.Constant object at 0x7da1b1139c30>], [<ast.Name object at 0x7da1b1139e70>, <ast.Attribute object at 0x7da1b113b8e0>]]]] if <ast.UnaryOp object at 0x7da1b1138040> begin[:] continue for taget[name[version]] in starred[name[resolved_versions]] begin[:] variable[seen_entry] assign[=] tuple[[<ast.Name object at 0x7da1b11382e0>, <ast.Name object at 0x7da1b11395a0>]] if <ast.BoolOp object at 0x7da1b113b070> begin[:] call[name[_LOGGER].debug, parameter[constant[Adding package %r in version %r for next resolution round], name[dependency_name], name[version]]] call[name[packages_seen].add, parameter[name[seen_entry]]] call[name[queue].append, parameter[tuple[[<ast.Name object at 0x7da1b1139090>, <ast.Name object at 0x7da1b1139960>]]]] return[dictionary[[<ast.Constant object at 0x7da1b113b790>, <ast.Constant object at 0x7da1b113ada0>, <ast.Constant object at 0x7da1b113a860>, <ast.Constant object at 0x7da1b11385b0>], [<ast.Name object at 0x7da1b113ae30>, <ast.Name object at 0x7da1b1139540>, <ast.Name object at 0x7da1b11399f0>, <ast.Name object at 0x7da1b11389a0>]]]
keyword[def] identifier[_do_resolve_index] ( identifier[python_bin] : identifier[str] , identifier[solver] : identifier[PythonSolver] , *, identifier[all_solvers] : identifier[typing] . identifier[List] [ identifier[PythonSolver] ], identifier[requirements] : identifier[typing] . identifier[List] [ identifier[str] ], identifier[exclude_packages] : identifier[set] = keyword[None] , identifier[transitive] : identifier[bool] = keyword[True] , identifier[subgraph_check_api] : identifier[str] = keyword[None] , )-> identifier[dict] : literal[string] identifier[index_url] = identifier[solver] . identifier[release_fetcher] . identifier[index_url] identifier[source] = identifier[solver] . identifier[release_fetcher] . identifier[source] identifier[packages_seen] = identifier[set] () identifier[packages] =[] identifier[errors] =[] identifier[unresolved] =[] identifier[unparsed] =[] identifier[exclude_packages] = identifier[exclude_packages] keyword[or] {} identifier[queue] = identifier[deque] () keyword[for] identifier[requirement] keyword[in] identifier[requirements] : identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[requirement] ) keyword[try] : identifier[dependency] = identifier[PythonDependencyParser] . identifier[parse_python] ( identifier[requirement] ) keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[unparsed] . identifier[append] ({ literal[string] : identifier[requirement] , literal[string] : identifier[str] ( identifier[exc] )}) keyword[continue] keyword[if] identifier[dependency] . identifier[name] keyword[in] identifier[exclude_packages] : keyword[continue] identifier[version_spec] = identifier[_get_dependency_specification] ( identifier[dependency] . identifier[spec] ) identifier[resolved_versions] = identifier[_resolve_versions] ( identifier[solver] , identifier[source] , identifier[dependency] . identifier[name] , identifier[version_spec] ) keyword[if] keyword[not] identifier[resolved_versions] : identifier[_LOGGER] . identifier[warning] ( literal[string] , identifier[dependency] . identifier[name] , identifier[version_spec] ) identifier[unresolved] . identifier[append] ({ literal[string] : identifier[dependency] . identifier[name] , literal[string] : identifier[version_spec] , literal[string] : identifier[index_url] }) keyword[else] : keyword[for] identifier[version] keyword[in] identifier[resolved_versions] : identifier[entry] =( identifier[dependency] . identifier[name] , identifier[version] ) identifier[packages_seen] . identifier[add] ( identifier[entry] ) identifier[queue] . identifier[append] ( identifier[entry] ) keyword[while] identifier[queue] : identifier[package_name] , identifier[package_version] = identifier[queue] . identifier[pop] () identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[index_url] , identifier[package_name] , identifier[package_version] ) keyword[try] : keyword[with] identifier[_install_requirement] ( identifier[python_bin] , identifier[package_name] , identifier[package_version] , identifier[index_url] ): identifier[package_info] = identifier[_pipdeptree] ( identifier[python_bin] , identifier[package_name] , identifier[warn] = keyword[True] ) keyword[except] identifier[CommandError] keyword[as] identifier[exc] : identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[package_name] , identifier[package_version] , identifier[index_url] , identifier[exc] , ) identifier[errors] . identifier[append] ( { literal[string] : identifier[package_name] , literal[string] : identifier[index_url] , literal[string] : identifier[package_version] , literal[string] : literal[string] , literal[string] : identifier[exc] . identifier[to_dict] (), } ) keyword[continue] keyword[if] identifier[package_info] keyword[is] keyword[None] : identifier[errors] . identifier[append] ( { literal[string] : identifier[package_name] , literal[string] : identifier[index_url] , literal[string] : identifier[package_version] , literal[string] : literal[string] , literal[string] :{ literal[string] : literal[string] }, } ) keyword[continue] keyword[if] identifier[package_info] [ literal[string] ][ literal[string] ]!= identifier[package_version] : identifier[_LOGGER] . identifier[warning] ( literal[string] , identifier[package_version] , identifier[package_name] , identifier[package_info] [ literal[string] ][ literal[string] ], ) keyword[if] identifier[package_info] [ literal[string] ][ literal[string] ]!= identifier[package_name] : identifier[_LOGGER] . identifier[warning] ( literal[string] , identifier[package_name] , identifier[package_info] [ literal[string] ][ literal[string] ], ) identifier[entry] = identifier[_create_entry] ( identifier[package_info] , identifier[source] ) identifier[packages] . identifier[append] ( identifier[entry] ) keyword[for] identifier[dependency] keyword[in] identifier[entry] [ literal[string] ]: identifier[dependency_name] , identifier[dependency_range] = identifier[dependency] [ literal[string] ], identifier[dependency] [ literal[string] ] identifier[dependency] [ literal[string] ]=[] keyword[for] identifier[dep_solver] keyword[in] identifier[all_solvers] : identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[dependency_name] , identifier[dependency_range] , identifier[dep_solver] . identifier[release_fetcher] . identifier[index_url] , ) identifier[resolved_versions] = identifier[_resolve_versions] ( identifier[dep_solver] , identifier[dep_solver] . identifier[release_fetcher] . identifier[source] , identifier[dependency_name] , identifier[dependency_range] ) identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[dependency_name] , identifier[dependency_range] , identifier[resolved_versions] , ) identifier[dependency] [ literal[string] ]. identifier[append] ( { literal[string] : identifier[resolved_versions] , literal[string] : identifier[dep_solver] . identifier[release_fetcher] . identifier[index_url] } ) keyword[if] keyword[not] identifier[transitive] : keyword[continue] keyword[for] identifier[version] keyword[in] identifier[resolved_versions] : identifier[seen_entry] =( identifier[dependency_name] , identifier[version] ) keyword[if] identifier[seen_entry] keyword[not] keyword[in] identifier[packages_seen] keyword[and] ( keyword[not] identifier[subgraph_check_api] keyword[or] ( identifier[subgraph_check_api] keyword[and] identifier[_should_resolve_subgraph] ( identifier[subgraph_check_api] , identifier[dependency_name] , identifier[version] , identifier[index_url] ) ) ): identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[dependency_name] , identifier[version] ) identifier[packages_seen] . identifier[add] ( identifier[seen_entry] ) identifier[queue] . identifier[append] (( identifier[dependency_name] , identifier[version] )) keyword[return] { literal[string] : identifier[packages] , literal[string] : identifier[errors] , literal[string] : identifier[unparsed] , literal[string] : identifier[unresolved] }
def _do_resolve_index(python_bin: str, solver: PythonSolver, *, all_solvers: typing.List[PythonSolver], requirements: typing.List[str], exclude_packages: set=None, transitive: bool=True, subgraph_check_api: str=None) -> dict: """Perform resolution of requirements against the given solver.""" index_url = solver.release_fetcher.index_url source = solver.release_fetcher.source packages_seen = set() packages = [] errors = [] unresolved = [] unparsed = [] exclude_packages = exclude_packages or {} queue = deque() for requirement in requirements: _LOGGER.debug('Parsing requirement %r', requirement) try: dependency = PythonDependencyParser.parse_python(requirement) # depends on [control=['try'], data=[]] except Exception as exc: unparsed.append({'requirement': requirement, 'details': str(exc)}) continue # depends on [control=['except'], data=['exc']] if dependency.name in exclude_packages: continue # depends on [control=['if'], data=[]] version_spec = _get_dependency_specification(dependency.spec) resolved_versions = _resolve_versions(solver, source, dependency.name, version_spec) if not resolved_versions: _LOGGER.warning('No versions were resolved for dependency %r in version %r', dependency.name, version_spec) unresolved.append({'package_name': dependency.name, 'version_spec': version_spec, 'index': index_url}) # depends on [control=['if'], data=[]] else: for version in resolved_versions: entry = (dependency.name, version) packages_seen.add(entry) queue.append(entry) # depends on [control=['for'], data=['version']] # depends on [control=['for'], data=['requirement']] while queue: (package_name, package_version) = queue.pop() _LOGGER.info('Using index %r to discover package %r in version %r', index_url, package_name, package_version) try: with _install_requirement(python_bin, package_name, package_version, index_url): package_info = _pipdeptree(python_bin, package_name, warn=True) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except CommandError as exc: _LOGGER.debug('There was an error during package %r in version %r discovery from %r: %s', package_name, package_version, index_url, exc) errors.append({'package_name': package_name, 'index': index_url, 'version': package_version, 'type': 'command_error', 'details': exc.to_dict()}) continue # depends on [control=['except'], data=['exc']] if package_info is None: errors.append({'package_name': package_name, 'index': index_url, 'version': package_version, 'type': 'not_site_package', 'details': {'message': 'Failed to get information about installed package, probably not site package'}}) continue # depends on [control=['if'], data=[]] if package_info['package']['installed_version'] != package_version: _LOGGER.warning('Requested to install version %r of package %r, but installed version is %r, error is not fatal', package_version, package_name, package_info['package']['installed_version']) # depends on [control=['if'], data=['package_version']] if package_info['package']['package_name'] != package_name: _LOGGER.warning('Requested to install package %r, but installed package name is %r, error is not fatal', package_name, package_info['package']['package_name']) # depends on [control=['if'], data=['package_name']] entry = _create_entry(package_info, source) packages.append(entry) for dependency in entry['dependencies']: (dependency_name, dependency_range) = (dependency['package_name'], dependency['required_version']) dependency['resolved_versions'] = [] for dep_solver in all_solvers: _LOGGER.info('Resolving dependency versions for %r with range %r from %r', dependency_name, dependency_range, dep_solver.release_fetcher.index_url) resolved_versions = _resolve_versions(dep_solver, dep_solver.release_fetcher.source, dependency_name, dependency_range) _LOGGER.debug('Resolved versions for package %r with range specifier %r: %s', dependency_name, dependency_range, resolved_versions) dependency['resolved_versions'].append({'versions': resolved_versions, 'index': dep_solver.release_fetcher.index_url}) if not transitive: continue # depends on [control=['if'], data=[]] for version in resolved_versions: # Did we check this package already - do not check indexes, we manually insert them. seen_entry = (dependency_name, version) if seen_entry not in packages_seen and (not subgraph_check_api or (subgraph_check_api and _should_resolve_subgraph(subgraph_check_api, dependency_name, version, index_url))): _LOGGER.debug('Adding package %r in version %r for next resolution round', dependency_name, version) packages_seen.add(seen_entry) queue.append((dependency_name, version)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['version']] # depends on [control=['for'], data=['dep_solver']] # depends on [control=['for'], data=['dependency']] # depends on [control=['while'], data=[]] return {'tree': packages, 'errors': errors, 'unparsed': unparsed, 'unresolved': unresolved}
def remove_root(self, model, setter=None): ''' Remove a model as root model from this Document. Changes to this model may still trigger ``on_change`` callbacks on this document, if the model is still referred to by other root models. Args: model (Model) : The model to add as a root of this document. setter (ClientSession or ServerSession or None, optional) : This is used to prevent "boomerang" updates to Bokeh apps. (default: None) In the context of a Bokeh server application, incoming updates to properties will be annotated with the session that is doing the updating. This value is propagated through any subsequent change notifications that the update triggers. The session can compare the event setter to itself, and suppress any updates that originate from itself. ''' if model not in self._roots: return # TODO (bev) ValueError? self._push_all_models_freeze() try: self._roots.remove(model) finally: self._pop_all_models_freeze() self._trigger_on_change(RootRemovedEvent(self, model, setter))
def function[remove_root, parameter[self, model, setter]]: constant[ Remove a model as root model from this Document. Changes to this model may still trigger ``on_change`` callbacks on this document, if the model is still referred to by other root models. Args: model (Model) : The model to add as a root of this document. setter (ClientSession or ServerSession or None, optional) : This is used to prevent "boomerang" updates to Bokeh apps. (default: None) In the context of a Bokeh server application, incoming updates to properties will be annotated with the session that is doing the updating. This value is propagated through any subsequent change notifications that the update triggers. The session can compare the event setter to itself, and suppress any updates that originate from itself. ] if compare[name[model] <ast.NotIn object at 0x7da2590d7190> name[self]._roots] begin[:] return[None] call[name[self]._push_all_models_freeze, parameter[]] <ast.Try object at 0x7da204564ac0> call[name[self]._trigger_on_change, parameter[call[name[RootRemovedEvent], parameter[name[self], name[model], name[setter]]]]]
keyword[def] identifier[remove_root] ( identifier[self] , identifier[model] , identifier[setter] = keyword[None] ): literal[string] keyword[if] identifier[model] keyword[not] keyword[in] identifier[self] . identifier[_roots] : keyword[return] identifier[self] . identifier[_push_all_models_freeze] () keyword[try] : identifier[self] . identifier[_roots] . identifier[remove] ( identifier[model] ) keyword[finally] : identifier[self] . identifier[_pop_all_models_freeze] () identifier[self] . identifier[_trigger_on_change] ( identifier[RootRemovedEvent] ( identifier[self] , identifier[model] , identifier[setter] ))
def remove_root(self, model, setter=None): """ Remove a model as root model from this Document. Changes to this model may still trigger ``on_change`` callbacks on this document, if the model is still referred to by other root models. Args: model (Model) : The model to add as a root of this document. setter (ClientSession or ServerSession or None, optional) : This is used to prevent "boomerang" updates to Bokeh apps. (default: None) In the context of a Bokeh server application, incoming updates to properties will be annotated with the session that is doing the updating. This value is propagated through any subsequent change notifications that the update triggers. The session can compare the event setter to itself, and suppress any updates that originate from itself. """ if model not in self._roots: return # TODO (bev) ValueError? # depends on [control=['if'], data=[]] self._push_all_models_freeze() try: self._roots.remove(model) # depends on [control=['try'], data=[]] finally: self._pop_all_models_freeze() self._trigger_on_change(RootRemovedEvent(self, model, setter))
def graph_from_edges(edge_list, node_prefix='', directed=False): """Creates a basic graph out of an edge list. The edge list has to be a list of tuples representing the nodes connected by the edge. The values can be anything: bool, int, float, str. If the graph is undirected by default, it is only calculated from one of the symmetric halves of the matrix. """ if edge_list is None: edge_list = [] graph_type = "digraph" if directed else "graph" with_prefix = functools.partial("{0}{1}".format, node_prefix) graph = Dot(graph_type=graph_type) for src, dst in edge_list: src = with_prefix(src) dst = with_prefix(dst) graph.add_edge(Edge(src, dst)) return graph
def function[graph_from_edges, parameter[edge_list, node_prefix, directed]]: constant[Creates a basic graph out of an edge list. The edge list has to be a list of tuples representing the nodes connected by the edge. The values can be anything: bool, int, float, str. If the graph is undirected by default, it is only calculated from one of the symmetric halves of the matrix. ] if compare[name[edge_list] is constant[None]] begin[:] variable[edge_list] assign[=] list[[]] variable[graph_type] assign[=] <ast.IfExp object at 0x7da1b10e7fd0> variable[with_prefix] assign[=] call[name[functools].partial, parameter[constant[{0}{1}].format, name[node_prefix]]] variable[graph] assign[=] call[name[Dot], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b10e7010>, <ast.Name object at 0x7da1b10e57e0>]]] in starred[name[edge_list]] begin[:] variable[src] assign[=] call[name[with_prefix], parameter[name[src]]] variable[dst] assign[=] call[name[with_prefix], parameter[name[dst]]] call[name[graph].add_edge, parameter[call[name[Edge], parameter[name[src], name[dst]]]]] return[name[graph]]
keyword[def] identifier[graph_from_edges] ( identifier[edge_list] , identifier[node_prefix] = literal[string] , identifier[directed] = keyword[False] ): literal[string] keyword[if] identifier[edge_list] keyword[is] keyword[None] : identifier[edge_list] =[] identifier[graph_type] = literal[string] keyword[if] identifier[directed] keyword[else] literal[string] identifier[with_prefix] = identifier[functools] . identifier[partial] ( literal[string] . identifier[format] , identifier[node_prefix] ) identifier[graph] = identifier[Dot] ( identifier[graph_type] = identifier[graph_type] ) keyword[for] identifier[src] , identifier[dst] keyword[in] identifier[edge_list] : identifier[src] = identifier[with_prefix] ( identifier[src] ) identifier[dst] = identifier[with_prefix] ( identifier[dst] ) identifier[graph] . identifier[add_edge] ( identifier[Edge] ( identifier[src] , identifier[dst] )) keyword[return] identifier[graph]
def graph_from_edges(edge_list, node_prefix='', directed=False): """Creates a basic graph out of an edge list. The edge list has to be a list of tuples representing the nodes connected by the edge. The values can be anything: bool, int, float, str. If the graph is undirected by default, it is only calculated from one of the symmetric halves of the matrix. """ if edge_list is None: edge_list = [] # depends on [control=['if'], data=['edge_list']] graph_type = 'digraph' if directed else 'graph' with_prefix = functools.partial('{0}{1}'.format, node_prefix) graph = Dot(graph_type=graph_type) for (src, dst) in edge_list: src = with_prefix(src) dst = with_prefix(dst) graph.add_edge(Edge(src, dst)) # depends on [control=['for'], data=[]] return graph
def time_remaining_est(self, completed_iter): """Estimate the remaining time left. Parameters: completed_iter (int): Number of iterations completed. Returns: est_time: Estimated time remaining. """ if completed_iter: t_r_est = (time.time() - self.t_start) / \ completed_iter*(self.iter-completed_iter) else: t_r_est = 0 date_time = datetime.datetime(1, 1, 1) + datetime.timedelta(seconds=t_r_est) time_string = "%02d:%02d:%02d:%02d" % \ (date_time.day - 1, date_time.hour, date_time.minute, date_time.second) return time_string
def function[time_remaining_est, parameter[self, completed_iter]]: constant[Estimate the remaining time left. Parameters: completed_iter (int): Number of iterations completed. Returns: est_time: Estimated time remaining. ] if name[completed_iter] begin[:] variable[t_r_est] assign[=] binary_operation[binary_operation[binary_operation[call[name[time].time, parameter[]] - name[self].t_start] / name[completed_iter]] * binary_operation[name[self].iter - name[completed_iter]]] variable[date_time] assign[=] binary_operation[call[name[datetime].datetime, parameter[constant[1], constant[1], constant[1]]] + call[name[datetime].timedelta, parameter[]]] variable[time_string] assign[=] binary_operation[constant[%02d:%02d:%02d:%02d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b05f8220>, <ast.Attribute object at 0x7da1b05f9120>, <ast.Attribute object at 0x7da1b05f9f30>, <ast.Attribute object at 0x7da1b05fa5c0>]]] return[name[time_string]]
keyword[def] identifier[time_remaining_est] ( identifier[self] , identifier[completed_iter] ): literal[string] keyword[if] identifier[completed_iter] : identifier[t_r_est] =( identifier[time] . identifier[time] ()- identifier[self] . identifier[t_start] )/ identifier[completed_iter] *( identifier[self] . identifier[iter] - identifier[completed_iter] ) keyword[else] : identifier[t_r_est] = literal[int] identifier[date_time] = identifier[datetime] . identifier[datetime] ( literal[int] , literal[int] , literal[int] )+ identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[t_r_est] ) identifier[time_string] = literal[string] %( identifier[date_time] . identifier[day] - literal[int] , identifier[date_time] . identifier[hour] , identifier[date_time] . identifier[minute] , identifier[date_time] . identifier[second] ) keyword[return] identifier[time_string]
def time_remaining_est(self, completed_iter): """Estimate the remaining time left. Parameters: completed_iter (int): Number of iterations completed. Returns: est_time: Estimated time remaining. """ if completed_iter: t_r_est = (time.time() - self.t_start) / completed_iter * (self.iter - completed_iter) # depends on [control=['if'], data=[]] else: t_r_est = 0 date_time = datetime.datetime(1, 1, 1) + datetime.timedelta(seconds=t_r_est) time_string = '%02d:%02d:%02d:%02d' % (date_time.day - 1, date_time.hour, date_time.minute, date_time.second) return time_string
def _maybe_handle(self, prefix, handler, path, params, data=None): """Apply the handler if the prefix matches.""" if path.startswith(prefix): relpath = path[len(prefix):] if data: handler(relpath, params, data) else: handler(relpath, params) return True else: return False
def function[_maybe_handle, parameter[self, prefix, handler, path, params, data]]: constant[Apply the handler if the prefix matches.] if call[name[path].startswith, parameter[name[prefix]]] begin[:] variable[relpath] assign[=] call[name[path]][<ast.Slice object at 0x7da1b1eee980>] if name[data] begin[:] call[name[handler], parameter[name[relpath], name[params], name[data]]] return[constant[True]]
keyword[def] identifier[_maybe_handle] ( identifier[self] , identifier[prefix] , identifier[handler] , identifier[path] , identifier[params] , identifier[data] = keyword[None] ): literal[string] keyword[if] identifier[path] . identifier[startswith] ( identifier[prefix] ): identifier[relpath] = identifier[path] [ identifier[len] ( identifier[prefix] ):] keyword[if] identifier[data] : identifier[handler] ( identifier[relpath] , identifier[params] , identifier[data] ) keyword[else] : identifier[handler] ( identifier[relpath] , identifier[params] ) keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def _maybe_handle(self, prefix, handler, path, params, data=None): """Apply the handler if the prefix matches.""" if path.startswith(prefix): relpath = path[len(prefix):] if data: handler(relpath, params, data) # depends on [control=['if'], data=[]] else: handler(relpath, params) return True # depends on [control=['if'], data=[]] else: return False
def export_identities(self, outfile, source=None): """Export identities information to a file. The method exports information related to unique identities, to the given 'outfile' output file. When 'source' parameter is given, only those unique identities which have one or more identities from the given source will be exported. :param outfile: destination file object :param source: source of the identities to export """ exporter = SortingHatIdentitiesExporter(self.db) dump = exporter.export(source) try: outfile.write(dump) outfile.write('\n') except IOError as e: raise RuntimeError(str(e)) return CMD_SUCCESS
def function[export_identities, parameter[self, outfile, source]]: constant[Export identities information to a file. The method exports information related to unique identities, to the given 'outfile' output file. When 'source' parameter is given, only those unique identities which have one or more identities from the given source will be exported. :param outfile: destination file object :param source: source of the identities to export ] variable[exporter] assign[=] call[name[SortingHatIdentitiesExporter], parameter[name[self].db]] variable[dump] assign[=] call[name[exporter].export, parameter[name[source]]] <ast.Try object at 0x7da1b0e26b60> return[name[CMD_SUCCESS]]
keyword[def] identifier[export_identities] ( identifier[self] , identifier[outfile] , identifier[source] = keyword[None] ): literal[string] identifier[exporter] = identifier[SortingHatIdentitiesExporter] ( identifier[self] . identifier[db] ) identifier[dump] = identifier[exporter] . identifier[export] ( identifier[source] ) keyword[try] : identifier[outfile] . identifier[write] ( identifier[dump] ) identifier[outfile] . identifier[write] ( literal[string] ) keyword[except] identifier[IOError] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( identifier[str] ( identifier[e] )) keyword[return] identifier[CMD_SUCCESS]
def export_identities(self, outfile, source=None): """Export identities information to a file. The method exports information related to unique identities, to the given 'outfile' output file. When 'source' parameter is given, only those unique identities which have one or more identities from the given source will be exported. :param outfile: destination file object :param source: source of the identities to export """ exporter = SortingHatIdentitiesExporter(self.db) dump = exporter.export(source) try: outfile.write(dump) outfile.write('\n') # depends on [control=['try'], data=[]] except IOError as e: raise RuntimeError(str(e)) # depends on [control=['except'], data=['e']] return CMD_SUCCESS
def is_creation_model(instance, attribute, value): """Must include at least a ``name`` key.""" creation_name = value.get('name') if not isinstance(creation_name, str): instance_name = instance.__class__.__name__ err_str = ("'name' must be given as a string in the '{attr}' " "parameter of a '{cls}'. Given " "'{value}'").format(attr=attribute.name, cls=instance_name, value=creation_name) raise ModelDataError(err_str)
def function[is_creation_model, parameter[instance, attribute, value]]: constant[Must include at least a ``name`` key.] variable[creation_name] assign[=] call[name[value].get, parameter[constant[name]]] if <ast.UnaryOp object at 0x7da20c7cbeb0> begin[:] variable[instance_name] assign[=] name[instance].__class__.__name__ variable[err_str] assign[=] call[constant['name' must be given as a string in the '{attr}' parameter of a '{cls}'. Given '{value}'].format, parameter[]] <ast.Raise object at 0x7da20c7cb8b0>
keyword[def] identifier[is_creation_model] ( identifier[instance] , identifier[attribute] , identifier[value] ): literal[string] identifier[creation_name] = identifier[value] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[creation_name] , identifier[str] ): identifier[instance_name] = identifier[instance] . identifier[__class__] . identifier[__name__] identifier[err_str] =( literal[string] literal[string] literal[string] ). identifier[format] ( identifier[attr] = identifier[attribute] . identifier[name] , identifier[cls] = identifier[instance_name] , identifier[value] = identifier[creation_name] ) keyword[raise] identifier[ModelDataError] ( identifier[err_str] )
def is_creation_model(instance, attribute, value): """Must include at least a ``name`` key.""" creation_name = value.get('name') if not isinstance(creation_name, str): instance_name = instance.__class__.__name__ err_str = "'name' must be given as a string in the '{attr}' parameter of a '{cls}'. Given '{value}'".format(attr=attribute.name, cls=instance_name, value=creation_name) raise ModelDataError(err_str) # depends on [control=['if'], data=[]]
def fit_upper_harmonic(phi, intensities, order): """ Fit upper harmonic function to a set of (angle, intensity) pairs. With ``order`` set to 3 or 4, the resulting amplitudes, divided by the semimajor axis length and local gradient, measure the deviations from perfect ellipticity. The harmonic function that is fit is: .. math:: y(phi, order) = y0 + An*\\sin(order*phi) + Bn*\\cos(order*phi) Parameters ---------- phi : float or `~numpy.ndarray` The angle(s) along the elliptical path, going towards the positive y axis, starting coincident with the position angle. That is, the angles are defined from the semimajor axis that lies in the positive x quadrant. intensities : `~numpy.ndarray` The intensities measured along the elliptical path, at the angles defined by the ``phi`` parameter. order : int The order of the harmonic to be fitted. Returns ------- y0, An, Bn : float The fitted harmonic values. """ an = bn = 1. def optimize_func(x): return (x[0] + x[1]*np.sin(order*phi) + x[2]*np.cos(order*phi) - intensities) return _least_squares_fit(optimize_func, [np.mean(intensities), an, bn])
def function[fit_upper_harmonic, parameter[phi, intensities, order]]: constant[ Fit upper harmonic function to a set of (angle, intensity) pairs. With ``order`` set to 3 or 4, the resulting amplitudes, divided by the semimajor axis length and local gradient, measure the deviations from perfect ellipticity. The harmonic function that is fit is: .. math:: y(phi, order) = y0 + An*\sin(order*phi) + Bn*\cos(order*phi) Parameters ---------- phi : float or `~numpy.ndarray` The angle(s) along the elliptical path, going towards the positive y axis, starting coincident with the position angle. That is, the angles are defined from the semimajor axis that lies in the positive x quadrant. intensities : `~numpy.ndarray` The intensities measured along the elliptical path, at the angles defined by the ``phi`` parameter. order : int The order of the harmonic to be fitted. Returns ------- y0, An, Bn : float The fitted harmonic values. ] variable[an] assign[=] constant[1.0] def function[optimize_func, parameter[x]]: return[binary_operation[binary_operation[binary_operation[call[name[x]][constant[0]] + binary_operation[call[name[x]][constant[1]] * call[name[np].sin, parameter[binary_operation[name[order] * name[phi]]]]]] + binary_operation[call[name[x]][constant[2]] * call[name[np].cos, parameter[binary_operation[name[order] * name[phi]]]]]] - name[intensities]]] return[call[name[_least_squares_fit], parameter[name[optimize_func], list[[<ast.Call object at 0x7da18f58d600>, <ast.Name object at 0x7da18f58d4b0>, <ast.Name object at 0x7da18f58db70>]]]]]
keyword[def] identifier[fit_upper_harmonic] ( identifier[phi] , identifier[intensities] , identifier[order] ): literal[string] identifier[an] = identifier[bn] = literal[int] keyword[def] identifier[optimize_func] ( identifier[x] ): keyword[return] ( identifier[x] [ literal[int] ]+ identifier[x] [ literal[int] ]* identifier[np] . identifier[sin] ( identifier[order] * identifier[phi] )+ identifier[x] [ literal[int] ]* identifier[np] . identifier[cos] ( identifier[order] * identifier[phi] )- identifier[intensities] ) keyword[return] identifier[_least_squares_fit] ( identifier[optimize_func] ,[ identifier[np] . identifier[mean] ( identifier[intensities] ), identifier[an] , identifier[bn] ])
def fit_upper_harmonic(phi, intensities, order): """ Fit upper harmonic function to a set of (angle, intensity) pairs. With ``order`` set to 3 or 4, the resulting amplitudes, divided by the semimajor axis length and local gradient, measure the deviations from perfect ellipticity. The harmonic function that is fit is: .. math:: y(phi, order) = y0 + An*\\sin(order*phi) + Bn*\\cos(order*phi) Parameters ---------- phi : float or `~numpy.ndarray` The angle(s) along the elliptical path, going towards the positive y axis, starting coincident with the position angle. That is, the angles are defined from the semimajor axis that lies in the positive x quadrant. intensities : `~numpy.ndarray` The intensities measured along the elliptical path, at the angles defined by the ``phi`` parameter. order : int The order of the harmonic to be fitted. Returns ------- y0, An, Bn : float The fitted harmonic values. """ an = bn = 1.0 def optimize_func(x): return x[0] + x[1] * np.sin(order * phi) + x[2] * np.cos(order * phi) - intensities return _least_squares_fit(optimize_func, [np.mean(intensities), an, bn])
def index(self, index, doc_type, body, id=None, params=None): """ Adds or updates a typed JSON document in a specific index, making it searchable. `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg body: The document :arg id: Document ID :arg consistency: Explicit write consistency setting for the operation :arg op_type: Explicit operation type (default: index) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type """ _, data = yield self.transport.perform_request( 'PUT' if id else 'POST', _make_path(index, doc_type, id), params=params, body=body) raise gen.Return(data)
def function[index, parameter[self, index, doc_type, body, id, params]]: constant[ Adds or updates a typed JSON document in a specific index, making it searchable. `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg body: The document :arg id: Document ID :arg consistency: Explicit write consistency setting for the operation :arg op_type: Explicit operation type (default: index) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type ] <ast.Tuple object at 0x7da1b033e770> assign[=] <ast.Yield object at 0x7da1b033d5d0> <ast.Raise object at 0x7da1b033cf10>
keyword[def] identifier[index] ( identifier[self] , identifier[index] , identifier[doc_type] , identifier[body] , identifier[id] = keyword[None] , identifier[params] = keyword[None] ): literal[string] identifier[_] , identifier[data] = keyword[yield] identifier[self] . identifier[transport] . identifier[perform_request] ( literal[string] keyword[if] identifier[id] keyword[else] literal[string] , identifier[_make_path] ( identifier[index] , identifier[doc_type] , identifier[id] ), identifier[params] = identifier[params] , identifier[body] = identifier[body] ) keyword[raise] identifier[gen] . identifier[Return] ( identifier[data] )
def index(self, index, doc_type, body, id=None, params=None): """ Adds or updates a typed JSON document in a specific index, making it searchable. `<http://elasticsearch.org/guide/reference/api/index_/>`_ :arg index: The name of the index :arg doc_type: The type of the document :arg body: The document :arg id: Document ID :arg consistency: Explicit write consistency setting for the operation :arg op_type: Explicit operation type (default: index) :arg parent: ID of the parent document :arg percolate: Percolator queries to execute while indexing the doc :arg refresh: Refresh the index after performing the operation :arg replication: Specific replication type (default: sync) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type """ (_, data) = (yield self.transport.perform_request('PUT' if id else 'POST', _make_path(index, doc_type, id), params=params, body=body)) raise gen.Return(data)
def get_comment_section(self, force_reload=False, reverse=False): """Get CommentSection instance representing all comments for thread. :arg force_reload=False: Whether to force reloading comments directly or allow using what is cached in self.content if possible. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- :returns: CommentSection representing all comments for thread. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- PURPOSE: High-level function called by user to get comments. """ if self.content is not None and not force_reload: return self.content if self.thread_id is None: self.thread_id = self.lookup_thread_id() self.content = self.lookup_comments(reverse=reverse) return self.content
def function[get_comment_section, parameter[self, force_reload, reverse]]: constant[Get CommentSection instance representing all comments for thread. :arg force_reload=False: Whether to force reloading comments directly or allow using what is cached in self.content if possible. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- :returns: CommentSection representing all comments for thread. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- PURPOSE: High-level function called by user to get comments. ] if <ast.BoolOp object at 0x7da207f012a0> begin[:] return[name[self].content] if compare[name[self].thread_id is constant[None]] begin[:] name[self].thread_id assign[=] call[name[self].lookup_thread_id, parameter[]] name[self].content assign[=] call[name[self].lookup_comments, parameter[]] return[name[self].content]
keyword[def] identifier[get_comment_section] ( identifier[self] , identifier[force_reload] = keyword[False] , identifier[reverse] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[content] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[force_reload] : keyword[return] identifier[self] . identifier[content] keyword[if] identifier[self] . identifier[thread_id] keyword[is] keyword[None] : identifier[self] . identifier[thread_id] = identifier[self] . identifier[lookup_thread_id] () identifier[self] . identifier[content] = identifier[self] . identifier[lookup_comments] ( identifier[reverse] = identifier[reverse] ) keyword[return] identifier[self] . identifier[content]
def get_comment_section(self, force_reload=False, reverse=False): """Get CommentSection instance representing all comments for thread. :arg force_reload=False: Whether to force reloading comments directly or allow using what is cached in self.content if possible. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- :returns: CommentSection representing all comments for thread. ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- PURPOSE: High-level function called by user to get comments. """ if self.content is not None and (not force_reload): return self.content # depends on [control=['if'], data=[]] if self.thread_id is None: self.thread_id = self.lookup_thread_id() # depends on [control=['if'], data=[]] self.content = self.lookup_comments(reverse=reverse) return self.content
def _get_avro_type(val): """Infer avro type for the current input. """ if isinstance(val, dict): assert val.get("class") == "File" or "File" in val.get("class") return "File" elif isinstance(val, (tuple, list)): types = [] for ctype in [_get_avro_type(v) for v in val]: if isinstance(ctype, dict): nested_types = [x["items"] for x in types if isinstance(x, dict)] if ctype["items"] not in nested_types: if isinstance(ctype["items"], (list, tuple)): for t in ctype["items"]: if t not in types: types.append(t) else: if ctype not in types: types.append(ctype) elif isinstance(ctype, (list, tuple)): for x in ctype: if x not in types: types.append(x) elif ctype not in types: types.append(ctype) # handle empty types, allow null if len(types) == 0: types = ["null"] # empty lists if isinstance(val, (list, tuple)) and len(val) == 0: types.append({"type": "array", "items": ["null"]}) types = _avoid_duplicate_arrays(types) # Avoid empty null only arrays which confuse some runners if len(types) == 1 and types[0] == "null": types.append("string") return {"type": "array", "items": (types[0] if len(types) == 1 else types)} elif val is None: return ["null"] # encode booleans as string True/False and unencode on other side elif isinstance(val, bool) or isinstance(val, six.string_types) and val.lower() in ["true", "false", "none"]: return ["string", "null", "boolean"] elif isinstance(val, int): return "long" elif isinstance(val, float): return "double" else: return "string"
def function[_get_avro_type, parameter[val]]: constant[Infer avro type for the current input. ] if call[name[isinstance], parameter[name[val], name[dict]]] begin[:] assert[<ast.BoolOp object at 0x7da18f09fc10>] return[constant[File]]
keyword[def] identifier[_get_avro_type] ( identifier[val] ): literal[string] keyword[if] identifier[isinstance] ( identifier[val] , identifier[dict] ): keyword[assert] identifier[val] . identifier[get] ( literal[string] )== literal[string] keyword[or] literal[string] keyword[in] identifier[val] . identifier[get] ( literal[string] ) keyword[return] literal[string] keyword[elif] identifier[isinstance] ( identifier[val] ,( identifier[tuple] , identifier[list] )): identifier[types] =[] keyword[for] identifier[ctype] keyword[in] [ identifier[_get_avro_type] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[val] ]: keyword[if] identifier[isinstance] ( identifier[ctype] , identifier[dict] ): identifier[nested_types] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[types] keyword[if] identifier[isinstance] ( identifier[x] , identifier[dict] )] keyword[if] identifier[ctype] [ literal[string] ] keyword[not] keyword[in] identifier[nested_types] : keyword[if] identifier[isinstance] ( identifier[ctype] [ literal[string] ],( identifier[list] , identifier[tuple] )): keyword[for] identifier[t] keyword[in] identifier[ctype] [ literal[string] ]: keyword[if] identifier[t] keyword[not] keyword[in] identifier[types] : identifier[types] . identifier[append] ( identifier[t] ) keyword[else] : keyword[if] identifier[ctype] keyword[not] keyword[in] identifier[types] : identifier[types] . identifier[append] ( identifier[ctype] ) keyword[elif] identifier[isinstance] ( identifier[ctype] ,( identifier[list] , identifier[tuple] )): keyword[for] identifier[x] keyword[in] identifier[ctype] : keyword[if] identifier[x] keyword[not] keyword[in] identifier[types] : identifier[types] . identifier[append] ( identifier[x] ) keyword[elif] identifier[ctype] keyword[not] keyword[in] identifier[types] : identifier[types] . identifier[append] ( identifier[ctype] ) keyword[if] identifier[len] ( identifier[types] )== literal[int] : identifier[types] =[ literal[string] ] keyword[if] identifier[isinstance] ( identifier[val] ,( identifier[list] , identifier[tuple] )) keyword[and] identifier[len] ( identifier[val] )== literal[int] : identifier[types] . identifier[append] ({ literal[string] : literal[string] , literal[string] :[ literal[string] ]}) identifier[types] = identifier[_avoid_duplicate_arrays] ( identifier[types] ) keyword[if] identifier[len] ( identifier[types] )== literal[int] keyword[and] identifier[types] [ literal[int] ]== literal[string] : identifier[types] . identifier[append] ( literal[string] ) keyword[return] { literal[string] : literal[string] , literal[string] :( identifier[types] [ literal[int] ] keyword[if] identifier[len] ( identifier[types] )== literal[int] keyword[else] identifier[types] )} keyword[elif] identifier[val] keyword[is] keyword[None] : keyword[return] [ literal[string] ] keyword[elif] identifier[isinstance] ( identifier[val] , identifier[bool] ) keyword[or] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[string_types] ) keyword[and] identifier[val] . identifier[lower] () keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[return] [ literal[string] , literal[string] , literal[string] ] keyword[elif] identifier[isinstance] ( identifier[val] , identifier[int] ): keyword[return] literal[string] keyword[elif] identifier[isinstance] ( identifier[val] , identifier[float] ): keyword[return] literal[string] keyword[else] : keyword[return] literal[string]
def _get_avro_type(val): """Infer avro type for the current input. """ if isinstance(val, dict): assert val.get('class') == 'File' or 'File' in val.get('class') return 'File' # depends on [control=['if'], data=[]] elif isinstance(val, (tuple, list)): types = [] for ctype in [_get_avro_type(v) for v in val]: if isinstance(ctype, dict): nested_types = [x['items'] for x in types if isinstance(x, dict)] if ctype['items'] not in nested_types: if isinstance(ctype['items'], (list, tuple)): for t in ctype['items']: if t not in types: types.append(t) # depends on [control=['if'], data=['t', 'types']] # depends on [control=['for'], data=['t']] # depends on [control=['if'], data=[]] elif ctype not in types: types.append(ctype) # depends on [control=['if'], data=['ctype', 'types']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(ctype, (list, tuple)): for x in ctype: if x not in types: types.append(x) # depends on [control=['if'], data=['x', 'types']] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]] elif ctype not in types: types.append(ctype) # depends on [control=['if'], data=['ctype', 'types']] # depends on [control=['for'], data=['ctype']] # handle empty types, allow null if len(types) == 0: types = ['null'] # empty lists if isinstance(val, (list, tuple)) and len(val) == 0: types.append({'type': 'array', 'items': ['null']}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] types = _avoid_duplicate_arrays(types) # Avoid empty null only arrays which confuse some runners if len(types) == 1 and types[0] == 'null': types.append('string') # depends on [control=['if'], data=[]] return {'type': 'array', 'items': types[0] if len(types) == 1 else types} # depends on [control=['if'], data=[]] elif val is None: return ['null'] # depends on [control=['if'], data=[]] # encode booleans as string True/False and unencode on other side elif isinstance(val, bool) or (isinstance(val, six.string_types) and val.lower() in ['true', 'false', 'none']): return ['string', 'null', 'boolean'] # depends on [control=['if'], data=[]] elif isinstance(val, int): return 'long' # depends on [control=['if'], data=[]] elif isinstance(val, float): return 'double' # depends on [control=['if'], data=[]] else: return 'string'
def get_font(self, bold, oblique): """ Get the font based on bold and italic flags. """ if bold and oblique: return self.fonts['BOLDITALIC'] elif bold: return self.fonts['BOLD'] elif oblique: return self.fonts['ITALIC'] else: return self.fonts['NORMAL']
def function[get_font, parameter[self, bold, oblique]]: constant[ Get the font based on bold and italic flags. ] if <ast.BoolOp object at 0x7da18bc73e80> begin[:] return[call[name[self].fonts][constant[BOLDITALIC]]]
keyword[def] identifier[get_font] ( identifier[self] , identifier[bold] , identifier[oblique] ): literal[string] keyword[if] identifier[bold] keyword[and] identifier[oblique] : keyword[return] identifier[self] . identifier[fonts] [ literal[string] ] keyword[elif] identifier[bold] : keyword[return] identifier[self] . identifier[fonts] [ literal[string] ] keyword[elif] identifier[oblique] : keyword[return] identifier[self] . identifier[fonts] [ literal[string] ] keyword[else] : keyword[return] identifier[self] . identifier[fonts] [ literal[string] ]
def get_font(self, bold, oblique): """ Get the font based on bold and italic flags. """ if bold and oblique: return self.fonts['BOLDITALIC'] # depends on [control=['if'], data=[]] elif bold: return self.fonts['BOLD'] # depends on [control=['if'], data=[]] elif oblique: return self.fonts['ITALIC'] # depends on [control=['if'], data=[]] else: return self.fonts['NORMAL']
def get_state(self, caller): """ Get per-program state. """ if caller in self.state: return self.state[caller] else: rv = self.state[caller] = DictObject() return rv
def function[get_state, parameter[self, caller]]: constant[ Get per-program state. ] if compare[name[caller] in name[self].state] begin[:] return[call[name[self].state][name[caller]]]
keyword[def] identifier[get_state] ( identifier[self] , identifier[caller] ): literal[string] keyword[if] identifier[caller] keyword[in] identifier[self] . identifier[state] : keyword[return] identifier[self] . identifier[state] [ identifier[caller] ] keyword[else] : identifier[rv] = identifier[self] . identifier[state] [ identifier[caller] ]= identifier[DictObject] () keyword[return] identifier[rv]
def get_state(self, caller): """ Get per-program state. """ if caller in self.state: return self.state[caller] # depends on [control=['if'], data=['caller']] else: rv = self.state[caller] = DictObject() return rv
def dialect_class(self, adapter): """Get dialect sql class by adapter""" if self.dialects.get(adapter): return self.dialects[adapter] try: class_prefix = getattr( __import__('db.' + adapter, globals(), locals(), ['__class_prefix__']), '__class_prefix__') driver = self._import_class('db.' + adapter + '.dialect.' + class_prefix + 'Dialect') except ImportError: raise DBError("Must install adapter `%s` or doesn't support" % (adapter)) self.dialects[adapter] = driver return driver
def function[dialect_class, parameter[self, adapter]]: constant[Get dialect sql class by adapter] if call[name[self].dialects.get, parameter[name[adapter]]] begin[:] return[call[name[self].dialects][name[adapter]]] <ast.Try object at 0x7da2054a7d30> call[name[self].dialects][name[adapter]] assign[=] name[driver] return[name[driver]]
keyword[def] identifier[dialect_class] ( identifier[self] , identifier[adapter] ): literal[string] keyword[if] identifier[self] . identifier[dialects] . identifier[get] ( identifier[adapter] ): keyword[return] identifier[self] . identifier[dialects] [ identifier[adapter] ] keyword[try] : identifier[class_prefix] = identifier[getattr] ( identifier[__import__] ( literal[string] + identifier[adapter] , identifier[globals] (), identifier[locals] (), [ literal[string] ]), literal[string] ) identifier[driver] = identifier[self] . identifier[_import_class] ( literal[string] + identifier[adapter] + literal[string] + identifier[class_prefix] + literal[string] ) keyword[except] identifier[ImportError] : keyword[raise] identifier[DBError] ( literal[string] % ( identifier[adapter] )) identifier[self] . identifier[dialects] [ identifier[adapter] ]= identifier[driver] keyword[return] identifier[driver]
def dialect_class(self, adapter): """Get dialect sql class by adapter""" if self.dialects.get(adapter): return self.dialects[adapter] # depends on [control=['if'], data=[]] try: class_prefix = getattr(__import__('db.' + adapter, globals(), locals(), ['__class_prefix__']), '__class_prefix__') driver = self._import_class('db.' + adapter + '.dialect.' + class_prefix + 'Dialect') # depends on [control=['try'], data=[]] except ImportError: raise DBError("Must install adapter `%s` or doesn't support" % adapter) # depends on [control=['except'], data=[]] self.dialects[adapter] = driver return driver
def get_memory_region(x, query_block_shape, memory_flange, q_indices): """Get the memory regions that surround a 2d query. The memory regions will be the left and top right. Args: x: A tensor with shape [batch, heads, height, width, depth] query_block_shape: a 2-d tuple of integers memory_flange: a 2-d tuple of integers q_indices: a tensor of indices for each of the center blocks. [num_blocks, block_length] Returns: x_flange: A tensor of shape [batch, heads, #blocks, block_length, depth] """ # Padding x to be multiple of query_shape and then # extracting the memory blocks from the same regions as the query blocks x_query_padded = pad_to_multiple_2d(x, query_block_shape) x_center = gather_blocks_2d(x_query_padded, q_indices) # Then padding the flange region paddings = [[0, 0], [0, 0], [memory_flange[0], 0], [memory_flange[1], memory_flange[1]], [0, 0]] x_memory_padded = tf.pad(x_query_padded, paddings) left_x = None top_x = None # Extracting the memory regions around the query block. left_x_region extends # to the left and the top_x_region is the combination of top left, top, and # top right of the query block # if no left region if memory_flange[1] > 0: left_x_region = x_memory_padded[:, :, memory_flange[ 0]:, :-(query_block_shape[1] + memory_flange[1]), :] left_memory_shape = (query_block_shape[0], memory_flange[1]) left_indices = gather_indices_2d(left_x_region, left_memory_shape, query_block_shape) left_x = gather_blocks_2d(left_x_region, left_indices) # if no top region if memory_flange[0] > 0: top_x_region = x_memory_padded[:, :, :-query_block_shape[0], :, :] top_memory_shape = (memory_flange[0], query_block_shape[1] + 2 * memory_flange[1]) top_indices = gather_indices_2d(top_x_region, top_memory_shape, query_block_shape) top_x = gather_blocks_2d(top_x_region, top_indices) x_flange = None if top_x is not None and left_x is not None: x_flange = tf.concat([top_x, left_x], axis=3) else: x_flange = top_x if top_x is not None else left_x return x_flange, x_center
def function[get_memory_region, parameter[x, query_block_shape, memory_flange, q_indices]]: constant[Get the memory regions that surround a 2d query. The memory regions will be the left and top right. Args: x: A tensor with shape [batch, heads, height, width, depth] query_block_shape: a 2-d tuple of integers memory_flange: a 2-d tuple of integers q_indices: a tensor of indices for each of the center blocks. [num_blocks, block_length] Returns: x_flange: A tensor of shape [batch, heads, #blocks, block_length, depth] ] variable[x_query_padded] assign[=] call[name[pad_to_multiple_2d], parameter[name[x], name[query_block_shape]]] variable[x_center] assign[=] call[name[gather_blocks_2d], parameter[name[x_query_padded], name[q_indices]]] variable[paddings] assign[=] list[[<ast.List object at 0x7da1b2344fd0>, <ast.List object at 0x7da1b2347b50>, <ast.List object at 0x7da1b2345900>, <ast.List object at 0x7da1b2344940>, <ast.List object at 0x7da1b23462f0>]] variable[x_memory_padded] assign[=] call[name[tf].pad, parameter[name[x_query_padded], name[paddings]]] variable[left_x] assign[=] constant[None] variable[top_x] assign[=] constant[None] if compare[call[name[memory_flange]][constant[1]] greater[>] constant[0]] begin[:] variable[left_x_region] assign[=] call[name[x_memory_padded]][tuple[[<ast.Slice object at 0x7da1b23441c0>, <ast.Slice object at 0x7da1b2345b10>, <ast.Slice object at 0x7da1b2347910>, <ast.Slice object at 0x7da1b2344f10>, <ast.Slice object at 0x7da1b2347010>]]] variable[left_memory_shape] assign[=] tuple[[<ast.Subscript object at 0x7da1b2344f70>, <ast.Subscript object at 0x7da1b2345bd0>]] variable[left_indices] assign[=] call[name[gather_indices_2d], parameter[name[left_x_region], name[left_memory_shape], name[query_block_shape]]] variable[left_x] assign[=] call[name[gather_blocks_2d], parameter[name[left_x_region], name[left_indices]]] if compare[call[name[memory_flange]][constant[0]] greater[>] constant[0]] begin[:] variable[top_x_region] assign[=] call[name[x_memory_padded]][tuple[[<ast.Slice object at 0x7da1b26ae620>, <ast.Slice object at 0x7da1b26ac670>, <ast.Slice object at 0x7da1b26ad0c0>, <ast.Slice object at 0x7da1b26aec80>, <ast.Slice object at 0x7da1b26adea0>]]] variable[top_memory_shape] assign[=] tuple[[<ast.Subscript object at 0x7da1b26af1f0>, <ast.BinOp object at 0x7da1b26ac5e0>]] variable[top_indices] assign[=] call[name[gather_indices_2d], parameter[name[top_x_region], name[top_memory_shape], name[query_block_shape]]] variable[top_x] assign[=] call[name[gather_blocks_2d], parameter[name[top_x_region], name[top_indices]]] variable[x_flange] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b26ac8b0> begin[:] variable[x_flange] assign[=] call[name[tf].concat, parameter[list[[<ast.Name object at 0x7da1b26af040>, <ast.Name object at 0x7da1b26afeb0>]]]] return[tuple[[<ast.Name object at 0x7da1b26af400>, <ast.Name object at 0x7da1b26aee00>]]]
keyword[def] identifier[get_memory_region] ( identifier[x] , identifier[query_block_shape] , identifier[memory_flange] , identifier[q_indices] ): literal[string] identifier[x_query_padded] = identifier[pad_to_multiple_2d] ( identifier[x] , identifier[query_block_shape] ) identifier[x_center] = identifier[gather_blocks_2d] ( identifier[x_query_padded] , identifier[q_indices] ) identifier[paddings] =[[ literal[int] , literal[int] ],[ literal[int] , literal[int] ],[ identifier[memory_flange] [ literal[int] ], literal[int] ], [ identifier[memory_flange] [ literal[int] ], identifier[memory_flange] [ literal[int] ]],[ literal[int] , literal[int] ]] identifier[x_memory_padded] = identifier[tf] . identifier[pad] ( identifier[x_query_padded] , identifier[paddings] ) identifier[left_x] = keyword[None] identifier[top_x] = keyword[None] keyword[if] identifier[memory_flange] [ literal[int] ]> literal[int] : identifier[left_x_region] = identifier[x_memory_padded] [:,:, identifier[memory_flange] [ literal[int] ]:,:-( identifier[query_block_shape] [ literal[int] ]+ identifier[memory_flange] [ literal[int] ]),:] identifier[left_memory_shape] =( identifier[query_block_shape] [ literal[int] ], identifier[memory_flange] [ literal[int] ]) identifier[left_indices] = identifier[gather_indices_2d] ( identifier[left_x_region] , identifier[left_memory_shape] , identifier[query_block_shape] ) identifier[left_x] = identifier[gather_blocks_2d] ( identifier[left_x_region] , identifier[left_indices] ) keyword[if] identifier[memory_flange] [ literal[int] ]> literal[int] : identifier[top_x_region] = identifier[x_memory_padded] [:,:,:- identifier[query_block_shape] [ literal[int] ],:,:] identifier[top_memory_shape] =( identifier[memory_flange] [ literal[int] ], identifier[query_block_shape] [ literal[int] ]+ literal[int] * identifier[memory_flange] [ literal[int] ]) identifier[top_indices] = identifier[gather_indices_2d] ( identifier[top_x_region] , identifier[top_memory_shape] , identifier[query_block_shape] ) identifier[top_x] = identifier[gather_blocks_2d] ( identifier[top_x_region] , identifier[top_indices] ) identifier[x_flange] = keyword[None] keyword[if] identifier[top_x] keyword[is] keyword[not] keyword[None] keyword[and] identifier[left_x] keyword[is] keyword[not] keyword[None] : identifier[x_flange] = identifier[tf] . identifier[concat] ([ identifier[top_x] , identifier[left_x] ], identifier[axis] = literal[int] ) keyword[else] : identifier[x_flange] = identifier[top_x] keyword[if] identifier[top_x] keyword[is] keyword[not] keyword[None] keyword[else] identifier[left_x] keyword[return] identifier[x_flange] , identifier[x_center]
def get_memory_region(x, query_block_shape, memory_flange, q_indices): """Get the memory regions that surround a 2d query. The memory regions will be the left and top right. Args: x: A tensor with shape [batch, heads, height, width, depth] query_block_shape: a 2-d tuple of integers memory_flange: a 2-d tuple of integers q_indices: a tensor of indices for each of the center blocks. [num_blocks, block_length] Returns: x_flange: A tensor of shape [batch, heads, #blocks, block_length, depth] """ # Padding x to be multiple of query_shape and then # extracting the memory blocks from the same regions as the query blocks x_query_padded = pad_to_multiple_2d(x, query_block_shape) x_center = gather_blocks_2d(x_query_padded, q_indices) # Then padding the flange region paddings = [[0, 0], [0, 0], [memory_flange[0], 0], [memory_flange[1], memory_flange[1]], [0, 0]] x_memory_padded = tf.pad(x_query_padded, paddings) left_x = None top_x = None # Extracting the memory regions around the query block. left_x_region extends # to the left and the top_x_region is the combination of top left, top, and # top right of the query block # if no left region if memory_flange[1] > 0: left_x_region = x_memory_padded[:, :, memory_flange[0]:, :-(query_block_shape[1] + memory_flange[1]), :] left_memory_shape = (query_block_shape[0], memory_flange[1]) left_indices = gather_indices_2d(left_x_region, left_memory_shape, query_block_shape) left_x = gather_blocks_2d(left_x_region, left_indices) # depends on [control=['if'], data=[]] # if no top region if memory_flange[0] > 0: top_x_region = x_memory_padded[:, :, :-query_block_shape[0], :, :] top_memory_shape = (memory_flange[0], query_block_shape[1] + 2 * memory_flange[1]) top_indices = gather_indices_2d(top_x_region, top_memory_shape, query_block_shape) top_x = gather_blocks_2d(top_x_region, top_indices) # depends on [control=['if'], data=[]] x_flange = None if top_x is not None and left_x is not None: x_flange = tf.concat([top_x, left_x], axis=3) # depends on [control=['if'], data=[]] else: x_flange = top_x if top_x is not None else left_x return (x_flange, x_center)
def _check_api_limits(gh_session, api_required=250, sleep_time=15): """ Simplified check for API limits If necessary, spin in place waiting for API to reset before returning. See: https://developer.github.com/v3/#rate-limiting """ api_rates = gh_session.rate_limit() api_remaining = api_rates['rate']['remaining'] api_reset = api_rates['rate']['reset'] logger.debug('Rate Limit - %d requests remaining', api_remaining) if api_remaining > api_required: return now_time = time.time() time_to_reset = int(api_reset - now_time) logger.warn('Rate Limit Depleted - Sleeping for %d seconds', time_to_reset) while now_time < api_reset: time.sleep(10) now_time = time.time() return
def function[_check_api_limits, parameter[gh_session, api_required, sleep_time]]: constant[ Simplified check for API limits If necessary, spin in place waiting for API to reset before returning. See: https://developer.github.com/v3/#rate-limiting ] variable[api_rates] assign[=] call[name[gh_session].rate_limit, parameter[]] variable[api_remaining] assign[=] call[call[name[api_rates]][constant[rate]]][constant[remaining]] variable[api_reset] assign[=] call[call[name[api_rates]][constant[rate]]][constant[reset]] call[name[logger].debug, parameter[constant[Rate Limit - %d requests remaining], name[api_remaining]]] if compare[name[api_remaining] greater[>] name[api_required]] begin[:] return[None] variable[now_time] assign[=] call[name[time].time, parameter[]] variable[time_to_reset] assign[=] call[name[int], parameter[binary_operation[name[api_reset] - name[now_time]]]] call[name[logger].warn, parameter[constant[Rate Limit Depleted - Sleeping for %d seconds], name[time_to_reset]]] while compare[name[now_time] less[<] name[api_reset]] begin[:] call[name[time].sleep, parameter[constant[10]]] variable[now_time] assign[=] call[name[time].time, parameter[]] return[None]
keyword[def] identifier[_check_api_limits] ( identifier[gh_session] , identifier[api_required] = literal[int] , identifier[sleep_time] = literal[int] ): literal[string] identifier[api_rates] = identifier[gh_session] . identifier[rate_limit] () identifier[api_remaining] = identifier[api_rates] [ literal[string] ][ literal[string] ] identifier[api_reset] = identifier[api_rates] [ literal[string] ][ literal[string] ] identifier[logger] . identifier[debug] ( literal[string] , identifier[api_remaining] ) keyword[if] identifier[api_remaining] > identifier[api_required] : keyword[return] identifier[now_time] = identifier[time] . identifier[time] () identifier[time_to_reset] = identifier[int] ( identifier[api_reset] - identifier[now_time] ) identifier[logger] . identifier[warn] ( literal[string] , identifier[time_to_reset] ) keyword[while] identifier[now_time] < identifier[api_reset] : identifier[time] . identifier[sleep] ( literal[int] ) identifier[now_time] = identifier[time] . identifier[time] () keyword[return]
def _check_api_limits(gh_session, api_required=250, sleep_time=15): """ Simplified check for API limits If necessary, spin in place waiting for API to reset before returning. See: https://developer.github.com/v3/#rate-limiting """ api_rates = gh_session.rate_limit() api_remaining = api_rates['rate']['remaining'] api_reset = api_rates['rate']['reset'] logger.debug('Rate Limit - %d requests remaining', api_remaining) if api_remaining > api_required: return # depends on [control=['if'], data=[]] now_time = time.time() time_to_reset = int(api_reset - now_time) logger.warn('Rate Limit Depleted - Sleeping for %d seconds', time_to_reset) while now_time < api_reset: time.sleep(10) now_time = time.time() # depends on [control=['while'], data=['now_time']] return
def gradient_stops(self): """|GradientStops| object providing access to stops of this gradient. Raises |TypeError| when fill is not gradient (call `fill.gradient()` first). Each stop represents a color between which the gradient smoothly transitions. """ if self.type != MSO_FILL.GRADIENT: raise TypeError('Fill is not of type MSO_FILL_TYPE.GRADIENT') return self._fill.gradient_stops
def function[gradient_stops, parameter[self]]: constant[|GradientStops| object providing access to stops of this gradient. Raises |TypeError| when fill is not gradient (call `fill.gradient()` first). Each stop represents a color between which the gradient smoothly transitions. ] if compare[name[self].type not_equal[!=] name[MSO_FILL].GRADIENT] begin[:] <ast.Raise object at 0x7da20c76eec0> return[name[self]._fill.gradient_stops]
keyword[def] identifier[gradient_stops] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[type] != identifier[MSO_FILL] . identifier[GRADIENT] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[self] . identifier[_fill] . identifier[gradient_stops]
def gradient_stops(self): """|GradientStops| object providing access to stops of this gradient. Raises |TypeError| when fill is not gradient (call `fill.gradient()` first). Each stop represents a color between which the gradient smoothly transitions. """ if self.type != MSO_FILL.GRADIENT: raise TypeError('Fill is not of type MSO_FILL_TYPE.GRADIENT') # depends on [control=['if'], data=[]] return self._fill.gradient_stops
def is_assignee_on(self, login, repository): """Checks if this user can be assigned to issues on login/repository. :returns: :class:`bool` """ url = self._build_url('repos', login, repository, 'assignees', self.login) return self._boolean(self._get(url), 204, 404)
def function[is_assignee_on, parameter[self, login, repository]]: constant[Checks if this user can be assigned to issues on login/repository. :returns: :class:`bool` ] variable[url] assign[=] call[name[self]._build_url, parameter[constant[repos], name[login], name[repository], constant[assignees], name[self].login]] return[call[name[self]._boolean, parameter[call[name[self]._get, parameter[name[url]]], constant[204], constant[404]]]]
keyword[def] identifier[is_assignee_on] ( identifier[self] , identifier[login] , identifier[repository] ): literal[string] identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , identifier[login] , identifier[repository] , literal[string] , identifier[self] . identifier[login] ) keyword[return] identifier[self] . identifier[_boolean] ( identifier[self] . identifier[_get] ( identifier[url] ), literal[int] , literal[int] )
def is_assignee_on(self, login, repository): """Checks if this user can be assigned to issues on login/repository. :returns: :class:`bool` """ url = self._build_url('repos', login, repository, 'assignees', self.login) return self._boolean(self._get(url), 204, 404)
def argv(cls, name, short_name=None, type=None, help=None): """ Set command line arguments as a source Parses the command line arguments described by the parameters. Args: name: the long name of the argument (foo) short_name: the optional short name of the argument (f) type: the optional type of the argument, defaults to bool help: the optional help text for the argument """ cls.__hierarchy.append(argv.Argv(name, short_name, type, help))
def function[argv, parameter[cls, name, short_name, type, help]]: constant[ Set command line arguments as a source Parses the command line arguments described by the parameters. Args: name: the long name of the argument (foo) short_name: the optional short name of the argument (f) type: the optional type of the argument, defaults to bool help: the optional help text for the argument ] call[name[cls].__hierarchy.append, parameter[call[name[argv].Argv, parameter[name[name], name[short_name], name[type], name[help]]]]]
keyword[def] identifier[argv] ( identifier[cls] , identifier[name] , identifier[short_name] = keyword[None] , identifier[type] = keyword[None] , identifier[help] = keyword[None] ): literal[string] identifier[cls] . identifier[__hierarchy] . identifier[append] ( identifier[argv] . identifier[Argv] ( identifier[name] , identifier[short_name] , identifier[type] , identifier[help] ))
def argv(cls, name, short_name=None, type=None, help=None): """ Set command line arguments as a source Parses the command line arguments described by the parameters. Args: name: the long name of the argument (foo) short_name: the optional short name of the argument (f) type: the optional type of the argument, defaults to bool help: the optional help text for the argument """ cls.__hierarchy.append(argv.Argv(name, short_name, type, help))
def _fill(self, values): """Add extra values to fill the line""" zero = self.view.y(min(max(self.zero, self._box.ymin), self._box.ymax)) # Check to see if the data has been padded with "none's" # Fill doesn't work correctly otherwise end = len(values) - 1 while end > 0: x, y = values[end] if self.missing_value_fill_truncation == "either": if x is not None and y is not None: break elif self.missing_value_fill_truncation == "x": if x is not None: break elif self.missing_value_fill_truncation == "y": if y is not None: break else: raise ValueError( "Invalid value ({}) for config key " "'missing_value_fill_truncation';" " Use 'x', 'y' or 'either'".format( self.missing_value_fill_truncation ) ) end -= 1 return ([(values[0][0], zero)] + values + [(values[end][0], zero)])
def function[_fill, parameter[self, values]]: constant[Add extra values to fill the line] variable[zero] assign[=] call[name[self].view.y, parameter[call[name[min], parameter[call[name[max], parameter[name[self].zero, name[self]._box.ymin]], name[self]._box.ymax]]]] variable[end] assign[=] binary_operation[call[name[len], parameter[name[values]]] - constant[1]] while compare[name[end] greater[>] constant[0]] begin[:] <ast.Tuple object at 0x7da20c6e6620> assign[=] call[name[values]][name[end]] if compare[name[self].missing_value_fill_truncation equal[==] constant[either]] begin[:] if <ast.BoolOp object at 0x7da20c6e55d0> begin[:] break <ast.AugAssign object at 0x7da20c6c6140> return[binary_operation[binary_operation[list[[<ast.Tuple object at 0x7da20c6c49d0>]] + name[values]] + list[[<ast.Tuple object at 0x7da20c6c4b50>]]]]
keyword[def] identifier[_fill] ( identifier[self] , identifier[values] ): literal[string] identifier[zero] = identifier[self] . identifier[view] . identifier[y] ( identifier[min] ( identifier[max] ( identifier[self] . identifier[zero] , identifier[self] . identifier[_box] . identifier[ymin] ), identifier[self] . identifier[_box] . identifier[ymax] )) identifier[end] = identifier[len] ( identifier[values] )- literal[int] keyword[while] identifier[end] > literal[int] : identifier[x] , identifier[y] = identifier[values] [ identifier[end] ] keyword[if] identifier[self] . identifier[missing_value_fill_truncation] == literal[string] : keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] keyword[and] identifier[y] keyword[is] keyword[not] keyword[None] : keyword[break] keyword[elif] identifier[self] . identifier[missing_value_fill_truncation] == literal[string] : keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] : keyword[break] keyword[elif] identifier[self] . identifier[missing_value_fill_truncation] == literal[string] : keyword[if] identifier[y] keyword[is] keyword[not] keyword[None] : keyword[break] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[missing_value_fill_truncation] ) ) identifier[end] -= literal[int] keyword[return] ([( identifier[values] [ literal[int] ][ literal[int] ], identifier[zero] )]+ identifier[values] +[( identifier[values] [ identifier[end] ][ literal[int] ], identifier[zero] )])
def _fill(self, values): """Add extra values to fill the line""" zero = self.view.y(min(max(self.zero, self._box.ymin), self._box.ymax)) # Check to see if the data has been padded with "none's" # Fill doesn't work correctly otherwise end = len(values) - 1 while end > 0: (x, y) = values[end] if self.missing_value_fill_truncation == 'either': if x is not None and y is not None: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif self.missing_value_fill_truncation == 'x': if x is not None: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif self.missing_value_fill_truncation == 'y': if y is not None: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise ValueError("Invalid value ({}) for config key 'missing_value_fill_truncation'; Use 'x', 'y' or 'either'".format(self.missing_value_fill_truncation)) end -= 1 # depends on [control=['while'], data=['end']] return [(values[0][0], zero)] + values + [(values[end][0], zero)]
def uplinkBusy(): """UPLINK BUSY Section 9.1.46""" name = "Uplink Busy" a = TpPd(pd=0x6) b = MessageType(mesType=0x2a) # 00101010 packet = a / b return packet
def function[uplinkBusy, parameter[]]: constant[UPLINK BUSY Section 9.1.46] variable[name] assign[=] constant[Uplink Busy] variable[a] assign[=] call[name[TpPd], parameter[]] variable[b] assign[=] call[name[MessageType], parameter[]] variable[packet] assign[=] binary_operation[name[a] / name[b]] return[name[packet]]
keyword[def] identifier[uplinkBusy] (): literal[string] identifier[name] = literal[string] identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] ) identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] ) identifier[packet] = identifier[a] / identifier[b] keyword[return] identifier[packet]
def uplinkBusy(): """UPLINK BUSY Section 9.1.46""" name = 'Uplink Busy' a = TpPd(pd=6) b = MessageType(mesType=42) # 00101010 packet = a / b return packet
def set_dataset_year_range(self, dataset_year, dataset_end_year=None): # type: (Union[str, int], Optional[Union[str, int]]) -> None """Set dataset date as a range from year or start and end year. Args: dataset_year (Union[str, int]): Dataset year given as string or int dataset_end_year (Optional[Union[str, int]]): Dataset end year given as string or int Returns: None """ if isinstance(dataset_year, int): dataset_date = '01/01/%d' % dataset_year elif isinstance(dataset_year, str): dataset_date = '01/01/%s' % dataset_year else: raise hdx.data.hdxobject.HDXError('dataset_year has type %s which is not supported!' % type(dataset_year).__name__) if dataset_end_year is None: dataset_end_year = dataset_year if isinstance(dataset_end_year, int): dataset_end_date = '31/12/%d' % dataset_end_year elif isinstance(dataset_end_year, str): dataset_end_date = '31/12/%s' % dataset_end_year else: raise hdx.data.hdxobject.HDXError('dataset_end_year has type %s which is not supported!' % type(dataset_end_year).__name__) self.set_dataset_date(dataset_date, dataset_end_date)
def function[set_dataset_year_range, parameter[self, dataset_year, dataset_end_year]]: constant[Set dataset date as a range from year or start and end year. Args: dataset_year (Union[str, int]): Dataset year given as string or int dataset_end_year (Optional[Union[str, int]]): Dataset end year given as string or int Returns: None ] if call[name[isinstance], parameter[name[dataset_year], name[int]]] begin[:] variable[dataset_date] assign[=] binary_operation[constant[01/01/%d] <ast.Mod object at 0x7da2590d6920> name[dataset_year]] if compare[name[dataset_end_year] is constant[None]] begin[:] variable[dataset_end_year] assign[=] name[dataset_year] if call[name[isinstance], parameter[name[dataset_end_year], name[int]]] begin[:] variable[dataset_end_date] assign[=] binary_operation[constant[31/12/%d] <ast.Mod object at 0x7da2590d6920> name[dataset_end_year]] call[name[self].set_dataset_date, parameter[name[dataset_date], name[dataset_end_date]]]
keyword[def] identifier[set_dataset_year_range] ( identifier[self] , identifier[dataset_year] , identifier[dataset_end_year] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[dataset_year] , identifier[int] ): identifier[dataset_date] = literal[string] % identifier[dataset_year] keyword[elif] identifier[isinstance] ( identifier[dataset_year] , identifier[str] ): identifier[dataset_date] = literal[string] % identifier[dataset_year] keyword[else] : keyword[raise] identifier[hdx] . identifier[data] . identifier[hdxobject] . identifier[HDXError] ( literal[string] % identifier[type] ( identifier[dataset_year] ). identifier[__name__] ) keyword[if] identifier[dataset_end_year] keyword[is] keyword[None] : identifier[dataset_end_year] = identifier[dataset_year] keyword[if] identifier[isinstance] ( identifier[dataset_end_year] , identifier[int] ): identifier[dataset_end_date] = literal[string] % identifier[dataset_end_year] keyword[elif] identifier[isinstance] ( identifier[dataset_end_year] , identifier[str] ): identifier[dataset_end_date] = literal[string] % identifier[dataset_end_year] keyword[else] : keyword[raise] identifier[hdx] . identifier[data] . identifier[hdxobject] . identifier[HDXError] ( literal[string] % identifier[type] ( identifier[dataset_end_year] ). identifier[__name__] ) identifier[self] . identifier[set_dataset_date] ( identifier[dataset_date] , identifier[dataset_end_date] )
def set_dataset_year_range(self, dataset_year, dataset_end_year=None): # type: (Union[str, int], Optional[Union[str, int]]) -> None 'Set dataset date as a range from year or start and end year.\n\n Args:\n dataset_year (Union[str, int]): Dataset year given as string or int\n dataset_end_year (Optional[Union[str, int]]): Dataset end year given as string or int\n\n Returns:\n None\n ' if isinstance(dataset_year, int): dataset_date = '01/01/%d' % dataset_year # depends on [control=['if'], data=[]] elif isinstance(dataset_year, str): dataset_date = '01/01/%s' % dataset_year # depends on [control=['if'], data=[]] else: raise hdx.data.hdxobject.HDXError('dataset_year has type %s which is not supported!' % type(dataset_year).__name__) if dataset_end_year is None: dataset_end_year = dataset_year # depends on [control=['if'], data=['dataset_end_year']] if isinstance(dataset_end_year, int): dataset_end_date = '31/12/%d' % dataset_end_year # depends on [control=['if'], data=[]] elif isinstance(dataset_end_year, str): dataset_end_date = '31/12/%s' % dataset_end_year # depends on [control=['if'], data=[]] else: raise hdx.data.hdxobject.HDXError('dataset_end_year has type %s which is not supported!' % type(dataset_end_year).__name__) self.set_dataset_date(dataset_date, dataset_end_date)
def append_dist_to_shapes(feed: "Feed") -> "Feed": """ Calculate and append the optional ``shape_dist_traveled`` field in ``feed.shapes`` in terms of the distance units ``feed.dist_units``. Return the resulting Feed. Notes ----- - As a benchmark, using this function on `this Portland feed <https://transitfeeds.com/p/trimet/43/1400947517>`_ produces a ``shape_dist_traveled`` column that differs by at most 0.016 km in absolute value from of the original values - Assume the following feed attributes are not ``None``: * ``feed.shapes`` """ if feed.shapes is None: raise ValueError( "This function requires the feed to have a shapes.txt file" ) feed = feed.copy() f = feed.shapes m_to_dist = hp.get_convert_dist("m", feed.dist_units) def compute_dist(group): # Compute the distances of the stops along this trip group = group.sort_values("shape_pt_sequence") shape = group["shape_id"].iat[0] if not isinstance(shape, str): group["shape_dist_traveled"] = np.nan return group points = [ sg.Point(utm.from_latlon(lat, lon)[:2]) for lon, lat in group[["shape_pt_lon", "shape_pt_lat"]].values ] p_prev = points[0] d = 0 distances = [0] for p in points[1:]: d += p.distance(p_prev) distances.append(d) p_prev = p group["shape_dist_traveled"] = distances return group g = f.groupby("shape_id", group_keys=False).apply(compute_dist) # Convert from meters g["shape_dist_traveled"] = g["shape_dist_traveled"].map(m_to_dist) feed.shapes = g return feed
def function[append_dist_to_shapes, parameter[feed]]: constant[ Calculate and append the optional ``shape_dist_traveled`` field in ``feed.shapes`` in terms of the distance units ``feed.dist_units``. Return the resulting Feed. Notes ----- - As a benchmark, using this function on `this Portland feed <https://transitfeeds.com/p/trimet/43/1400947517>`_ produces a ``shape_dist_traveled`` column that differs by at most 0.016 km in absolute value from of the original values - Assume the following feed attributes are not ``None``: * ``feed.shapes`` ] if compare[name[feed].shapes is constant[None]] begin[:] <ast.Raise object at 0x7da20c992ce0> variable[feed] assign[=] call[name[feed].copy, parameter[]] variable[f] assign[=] name[feed].shapes variable[m_to_dist] assign[=] call[name[hp].get_convert_dist, parameter[constant[m], name[feed].dist_units]] def function[compute_dist, parameter[group]]: variable[group] assign[=] call[name[group].sort_values, parameter[constant[shape_pt_sequence]]] variable[shape] assign[=] call[call[name[group]][constant[shape_id]].iat][constant[0]] if <ast.UnaryOp object at 0x7da20c990070> begin[:] call[name[group]][constant[shape_dist_traveled]] assign[=] name[np].nan return[name[group]] variable[points] assign[=] <ast.ListComp object at 0x7da20c990790> variable[p_prev] assign[=] call[name[points]][constant[0]] variable[d] assign[=] constant[0] variable[distances] assign[=] list[[<ast.Constant object at 0x7da20c990d90>]] for taget[name[p]] in starred[call[name[points]][<ast.Slice object at 0x7da20c991090>]] begin[:] <ast.AugAssign object at 0x7da20c9930a0> call[name[distances].append, parameter[name[d]]] variable[p_prev] assign[=] name[p] call[name[group]][constant[shape_dist_traveled]] assign[=] name[distances] return[name[group]] variable[g] assign[=] call[call[name[f].groupby, parameter[constant[shape_id]]].apply, parameter[name[compute_dist]]] call[name[g]][constant[shape_dist_traveled]] assign[=] call[call[name[g]][constant[shape_dist_traveled]].map, parameter[name[m_to_dist]]] name[feed].shapes assign[=] name[g] return[name[feed]]
keyword[def] identifier[append_dist_to_shapes] ( identifier[feed] : literal[string] )-> literal[string] : literal[string] keyword[if] identifier[feed] . identifier[shapes] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[feed] = identifier[feed] . identifier[copy] () identifier[f] = identifier[feed] . identifier[shapes] identifier[m_to_dist] = identifier[hp] . identifier[get_convert_dist] ( literal[string] , identifier[feed] . identifier[dist_units] ) keyword[def] identifier[compute_dist] ( identifier[group] ): identifier[group] = identifier[group] . identifier[sort_values] ( literal[string] ) identifier[shape] = identifier[group] [ literal[string] ]. identifier[iat] [ literal[int] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[shape] , identifier[str] ): identifier[group] [ literal[string] ]= identifier[np] . identifier[nan] keyword[return] identifier[group] identifier[points] =[ identifier[sg] . identifier[Point] ( identifier[utm] . identifier[from_latlon] ( identifier[lat] , identifier[lon] )[: literal[int] ]) keyword[for] identifier[lon] , identifier[lat] keyword[in] identifier[group] [[ literal[string] , literal[string] ]]. identifier[values] ] identifier[p_prev] = identifier[points] [ literal[int] ] identifier[d] = literal[int] identifier[distances] =[ literal[int] ] keyword[for] identifier[p] keyword[in] identifier[points] [ literal[int] :]: identifier[d] += identifier[p] . identifier[distance] ( identifier[p_prev] ) identifier[distances] . identifier[append] ( identifier[d] ) identifier[p_prev] = identifier[p] identifier[group] [ literal[string] ]= identifier[distances] keyword[return] identifier[group] identifier[g] = identifier[f] . identifier[groupby] ( literal[string] , identifier[group_keys] = keyword[False] ). identifier[apply] ( identifier[compute_dist] ) identifier[g] [ literal[string] ]= identifier[g] [ literal[string] ]. identifier[map] ( identifier[m_to_dist] ) identifier[feed] . identifier[shapes] = identifier[g] keyword[return] identifier[feed]
def append_dist_to_shapes(feed: 'Feed') -> 'Feed': """ Calculate and append the optional ``shape_dist_traveled`` field in ``feed.shapes`` in terms of the distance units ``feed.dist_units``. Return the resulting Feed. Notes ----- - As a benchmark, using this function on `this Portland feed <https://transitfeeds.com/p/trimet/43/1400947517>`_ produces a ``shape_dist_traveled`` column that differs by at most 0.016 km in absolute value from of the original values - Assume the following feed attributes are not ``None``: * ``feed.shapes`` """ if feed.shapes is None: raise ValueError('This function requires the feed to have a shapes.txt file') # depends on [control=['if'], data=[]] feed = feed.copy() f = feed.shapes m_to_dist = hp.get_convert_dist('m', feed.dist_units) def compute_dist(group): # Compute the distances of the stops along this trip group = group.sort_values('shape_pt_sequence') shape = group['shape_id'].iat[0] if not isinstance(shape, str): group['shape_dist_traveled'] = np.nan return group # depends on [control=['if'], data=[]] points = [sg.Point(utm.from_latlon(lat, lon)[:2]) for (lon, lat) in group[['shape_pt_lon', 'shape_pt_lat']].values] p_prev = points[0] d = 0 distances = [0] for p in points[1:]: d += p.distance(p_prev) distances.append(d) p_prev = p # depends on [control=['for'], data=['p']] group['shape_dist_traveled'] = distances return group g = f.groupby('shape_id', group_keys=False).apply(compute_dist) # Convert from meters g['shape_dist_traveled'] = g['shape_dist_traveled'].map(m_to_dist) feed.shapes = g return feed
def run_check(self, data): """Check for uncommon words and difficult words in file.""" if not data: sys.exit(1) data, sentences, chars, num_words = self.pre_check(data) w_dict = Counter(data) uniq_len, uncommon, uncom_len = self.gsl(w_dict) non_dchall_set = Counter({word: count for word, count in w_dict.items() if word and word not in self.dale_chall_words}) diff_count = sum(non_dchall_set.values()) dc_score = round(self.dale_chall(diff_count, num_words, sentences), 1) cli_score = round(self.coleman_liau(chars, num_words, sentences), 1) return uncommon, uncom_len, uniq_len, dc_score, cli_score
def function[run_check, parameter[self, data]]: constant[Check for uncommon words and difficult words in file.] if <ast.UnaryOp object at 0x7da20e955390> begin[:] call[name[sys].exit, parameter[constant[1]]] <ast.Tuple object at 0x7da20e956890> assign[=] call[name[self].pre_check, parameter[name[data]]] variable[w_dict] assign[=] call[name[Counter], parameter[name[data]]] <ast.Tuple object at 0x7da20e957820> assign[=] call[name[self].gsl, parameter[name[w_dict]]] variable[non_dchall_set] assign[=] call[name[Counter], parameter[<ast.DictComp object at 0x7da20e956ef0>]] variable[diff_count] assign[=] call[name[sum], parameter[call[name[non_dchall_set].values, parameter[]]]] variable[dc_score] assign[=] call[name[round], parameter[call[name[self].dale_chall, parameter[name[diff_count], name[num_words], name[sentences]]], constant[1]]] variable[cli_score] assign[=] call[name[round], parameter[call[name[self].coleman_liau, parameter[name[chars], name[num_words], name[sentences]]], constant[1]]] return[tuple[[<ast.Name object at 0x7da18fe90610>, <ast.Name object at 0x7da18fe90cd0>, <ast.Name object at 0x7da18fe90040>, <ast.Name object at 0x7da18fe93c10>, <ast.Name object at 0x7da18fe90370>]]]
keyword[def] identifier[run_check] ( identifier[self] , identifier[data] ): literal[string] keyword[if] keyword[not] identifier[data] : identifier[sys] . identifier[exit] ( literal[int] ) identifier[data] , identifier[sentences] , identifier[chars] , identifier[num_words] = identifier[self] . identifier[pre_check] ( identifier[data] ) identifier[w_dict] = identifier[Counter] ( identifier[data] ) identifier[uniq_len] , identifier[uncommon] , identifier[uncom_len] = identifier[self] . identifier[gsl] ( identifier[w_dict] ) identifier[non_dchall_set] = identifier[Counter] ({ identifier[word] : identifier[count] keyword[for] identifier[word] , identifier[count] keyword[in] identifier[w_dict] . identifier[items] () keyword[if] identifier[word] keyword[and] identifier[word] keyword[not] keyword[in] identifier[self] . identifier[dale_chall_words] }) identifier[diff_count] = identifier[sum] ( identifier[non_dchall_set] . identifier[values] ()) identifier[dc_score] = identifier[round] ( identifier[self] . identifier[dale_chall] ( identifier[diff_count] , identifier[num_words] , identifier[sentences] ), literal[int] ) identifier[cli_score] = identifier[round] ( identifier[self] . identifier[coleman_liau] ( identifier[chars] , identifier[num_words] , identifier[sentences] ), literal[int] ) keyword[return] identifier[uncommon] , identifier[uncom_len] , identifier[uniq_len] , identifier[dc_score] , identifier[cli_score]
def run_check(self, data): """Check for uncommon words and difficult words in file.""" if not data: sys.exit(1) # depends on [control=['if'], data=[]] (data, sentences, chars, num_words) = self.pre_check(data) w_dict = Counter(data) (uniq_len, uncommon, uncom_len) = self.gsl(w_dict) non_dchall_set = Counter({word: count for (word, count) in w_dict.items() if word and word not in self.dale_chall_words}) diff_count = sum(non_dchall_set.values()) dc_score = round(self.dale_chall(diff_count, num_words, sentences), 1) cli_score = round(self.coleman_liau(chars, num_words, sentences), 1) return (uncommon, uncom_len, uniq_len, dc_score, cli_score)
def get_part(self, vertex_in, vertices_border): """List all vertices that are connected to vertex_in, but are not included in or 'behind' vertices_border. """ vertices_new = set(self.neighbors[vertex_in]) vertices_part = set([vertex_in]) while len(vertices_new) > 0: pivot = vertices_new.pop() if pivot in vertices_border: continue vertices_part.add(pivot) pivot_neighbors = set(self.neighbors[pivot]) pivot_neighbors -= vertices_part vertices_new |= pivot_neighbors return vertices_part
def function[get_part, parameter[self, vertex_in, vertices_border]]: constant[List all vertices that are connected to vertex_in, but are not included in or 'behind' vertices_border. ] variable[vertices_new] assign[=] call[name[set], parameter[call[name[self].neighbors][name[vertex_in]]]] variable[vertices_part] assign[=] call[name[set], parameter[list[[<ast.Name object at 0x7da18ede5000>]]]] while compare[call[name[len], parameter[name[vertices_new]]] greater[>] constant[0]] begin[:] variable[pivot] assign[=] call[name[vertices_new].pop, parameter[]] if compare[name[pivot] in name[vertices_border]] begin[:] continue call[name[vertices_part].add, parameter[name[pivot]]] variable[pivot_neighbors] assign[=] call[name[set], parameter[call[name[self].neighbors][name[pivot]]]] <ast.AugAssign object at 0x7da18ede7460> <ast.AugAssign object at 0x7da18ede7cd0> return[name[vertices_part]]
keyword[def] identifier[get_part] ( identifier[self] , identifier[vertex_in] , identifier[vertices_border] ): literal[string] identifier[vertices_new] = identifier[set] ( identifier[self] . identifier[neighbors] [ identifier[vertex_in] ]) identifier[vertices_part] = identifier[set] ([ identifier[vertex_in] ]) keyword[while] identifier[len] ( identifier[vertices_new] )> literal[int] : identifier[pivot] = identifier[vertices_new] . identifier[pop] () keyword[if] identifier[pivot] keyword[in] identifier[vertices_border] : keyword[continue] identifier[vertices_part] . identifier[add] ( identifier[pivot] ) identifier[pivot_neighbors] = identifier[set] ( identifier[self] . identifier[neighbors] [ identifier[pivot] ]) identifier[pivot_neighbors] -= identifier[vertices_part] identifier[vertices_new] |= identifier[pivot_neighbors] keyword[return] identifier[vertices_part]
def get_part(self, vertex_in, vertices_border): """List all vertices that are connected to vertex_in, but are not included in or 'behind' vertices_border. """ vertices_new = set(self.neighbors[vertex_in]) vertices_part = set([vertex_in]) while len(vertices_new) > 0: pivot = vertices_new.pop() if pivot in vertices_border: continue # depends on [control=['if'], data=[]] vertices_part.add(pivot) pivot_neighbors = set(self.neighbors[pivot]) pivot_neighbors -= vertices_part vertices_new |= pivot_neighbors # depends on [control=['while'], data=[]] return vertices_part
def link_to(self, model, idx, self_idx): """ Register (self.name, self.idx) in `model._from` Returns ------- """ if model in self.system.loaded_groups: # access group instance grp = self.system.__dict__[model] # doing it one by one for i, self_i in zip(idx, self_idx): # query model name and access model instance mdl_name = grp._idx_model[i] mdl = self.system.__dict__[mdl_name] # query the corresponding uid u = mdl.get_uid(i) # update `mdl_from` name_idx_pair = (self._name, self_i) if name_idx_pair not in mdl.mdl_from[u]: mdl.mdl_from[u].append(name_idx_pair) else: # access model instance mdl = self.system.__dict__[model] uid = mdl.get_uid(idx) for u, self_i in zip(uid, self_idx): name_idx_pair = (self._name, self_i) if name_idx_pair not in mdl.mdl_from[u]: mdl.mdl_from[u].append(name_idx_pair)
def function[link_to, parameter[self, model, idx, self_idx]]: constant[ Register (self.name, self.idx) in `model._from` Returns ------- ] if compare[name[model] in name[self].system.loaded_groups] begin[:] variable[grp] assign[=] call[name[self].system.__dict__][name[model]] for taget[tuple[[<ast.Name object at 0x7da2044c01c0>, <ast.Name object at 0x7da2044c39d0>]]] in starred[call[name[zip], parameter[name[idx], name[self_idx]]]] begin[:] variable[mdl_name] assign[=] call[name[grp]._idx_model][name[i]] variable[mdl] assign[=] call[name[self].system.__dict__][name[mdl_name]] variable[u] assign[=] call[name[mdl].get_uid, parameter[name[i]]] variable[name_idx_pair] assign[=] tuple[[<ast.Attribute object at 0x7da2044c1210>, <ast.Name object at 0x7da2044c35b0>]] if compare[name[name_idx_pair] <ast.NotIn object at 0x7da2590d7190> call[name[mdl].mdl_from][name[u]]] begin[:] call[call[name[mdl].mdl_from][name[u]].append, parameter[name[name_idx_pair]]]
keyword[def] identifier[link_to] ( identifier[self] , identifier[model] , identifier[idx] , identifier[self_idx] ): literal[string] keyword[if] identifier[model] keyword[in] identifier[self] . identifier[system] . identifier[loaded_groups] : identifier[grp] = identifier[self] . identifier[system] . identifier[__dict__] [ identifier[model] ] keyword[for] identifier[i] , identifier[self_i] keyword[in] identifier[zip] ( identifier[idx] , identifier[self_idx] ): identifier[mdl_name] = identifier[grp] . identifier[_idx_model] [ identifier[i] ] identifier[mdl] = identifier[self] . identifier[system] . identifier[__dict__] [ identifier[mdl_name] ] identifier[u] = identifier[mdl] . identifier[get_uid] ( identifier[i] ) identifier[name_idx_pair] =( identifier[self] . identifier[_name] , identifier[self_i] ) keyword[if] identifier[name_idx_pair] keyword[not] keyword[in] identifier[mdl] . identifier[mdl_from] [ identifier[u] ]: identifier[mdl] . identifier[mdl_from] [ identifier[u] ]. identifier[append] ( identifier[name_idx_pair] ) keyword[else] : identifier[mdl] = identifier[self] . identifier[system] . identifier[__dict__] [ identifier[model] ] identifier[uid] = identifier[mdl] . identifier[get_uid] ( identifier[idx] ) keyword[for] identifier[u] , identifier[self_i] keyword[in] identifier[zip] ( identifier[uid] , identifier[self_idx] ): identifier[name_idx_pair] =( identifier[self] . identifier[_name] , identifier[self_i] ) keyword[if] identifier[name_idx_pair] keyword[not] keyword[in] identifier[mdl] . identifier[mdl_from] [ identifier[u] ]: identifier[mdl] . identifier[mdl_from] [ identifier[u] ]. identifier[append] ( identifier[name_idx_pair] )
def link_to(self, model, idx, self_idx): """ Register (self.name, self.idx) in `model._from` Returns ------- """ if model in self.system.loaded_groups: # access group instance grp = self.system.__dict__[model] # doing it one by one for (i, self_i) in zip(idx, self_idx): # query model name and access model instance mdl_name = grp._idx_model[i] mdl = self.system.__dict__[mdl_name] # query the corresponding uid u = mdl.get_uid(i) # update `mdl_from` name_idx_pair = (self._name, self_i) if name_idx_pair not in mdl.mdl_from[u]: mdl.mdl_from[u].append(name_idx_pair) # depends on [control=['if'], data=['name_idx_pair']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['model']] else: # access model instance mdl = self.system.__dict__[model] uid = mdl.get_uid(idx) for (u, self_i) in zip(uid, self_idx): name_idx_pair = (self._name, self_i) if name_idx_pair not in mdl.mdl_from[u]: mdl.mdl_from[u].append(name_idx_pair) # depends on [control=['if'], data=['name_idx_pair']] # depends on [control=['for'], data=[]]
def EI(inc): """ Given a mean inclination value of a distribution of directions, this function calculates the expected elongation of this distribution using a best-fit polynomial of the TK03 GAD secular variation model (Tauxe and Kent, 2004). Parameters ---------- inc : inclination in degrees (int or float) Returns --------- elongation : float Examples --------- >>> pmag.EI(20) 2.4863973732 >>> pmag.EI(90) 1.0241570135500004 """ poly_tk03 = [3.15976125e-06, -3.52459817e-04, - 1.46641090e-02, 2.89538539e+00] return poly_tk03[0] * inc**3 + poly_tk03[1] * inc**2 + poly_tk03[2] * inc + poly_tk03[3]
def function[EI, parameter[inc]]: constant[ Given a mean inclination value of a distribution of directions, this function calculates the expected elongation of this distribution using a best-fit polynomial of the TK03 GAD secular variation model (Tauxe and Kent, 2004). Parameters ---------- inc : inclination in degrees (int or float) Returns --------- elongation : float Examples --------- >>> pmag.EI(20) 2.4863973732 >>> pmag.EI(90) 1.0241570135500004 ] variable[poly_tk03] assign[=] list[[<ast.Constant object at 0x7da1b042f7c0>, <ast.UnaryOp object at 0x7da1b042d7b0>, <ast.UnaryOp object at 0x7da1b042d960>, <ast.Constant object at 0x7da1b042d690>]] return[binary_operation[binary_operation[binary_operation[binary_operation[call[name[poly_tk03]][constant[0]] * binary_operation[name[inc] ** constant[3]]] + binary_operation[call[name[poly_tk03]][constant[1]] * binary_operation[name[inc] ** constant[2]]]] + binary_operation[call[name[poly_tk03]][constant[2]] * name[inc]]] + call[name[poly_tk03]][constant[3]]]]
keyword[def] identifier[EI] ( identifier[inc] ): literal[string] identifier[poly_tk03] =[ literal[int] ,- literal[int] ,- literal[int] , literal[int] ] keyword[return] identifier[poly_tk03] [ literal[int] ]* identifier[inc] ** literal[int] + identifier[poly_tk03] [ literal[int] ]* identifier[inc] ** literal[int] + identifier[poly_tk03] [ literal[int] ]* identifier[inc] + identifier[poly_tk03] [ literal[int] ]
def EI(inc): """ Given a mean inclination value of a distribution of directions, this function calculates the expected elongation of this distribution using a best-fit polynomial of the TK03 GAD secular variation model (Tauxe and Kent, 2004). Parameters ---------- inc : inclination in degrees (int or float) Returns --------- elongation : float Examples --------- >>> pmag.EI(20) 2.4863973732 >>> pmag.EI(90) 1.0241570135500004 """ poly_tk03 = [3.15976125e-06, -0.000352459817, -0.014664109, 2.89538539] return poly_tk03[0] * inc ** 3 + poly_tk03[1] * inc ** 2 + poly_tk03[2] * inc + poly_tk03[3]
def get_as_string_with_default(self, key, default_value): """ Converts map element into a string or returns default value if conversion is not possible. :param key: an index of element to get. :param default_value: the default value :return: string value ot the element or default value if conversion is not supported. """ value = self.get(key) return StringConverter.to_string_with_default(value, default_value)
def function[get_as_string_with_default, parameter[self, key, default_value]]: constant[ Converts map element into a string or returns default value if conversion is not possible. :param key: an index of element to get. :param default_value: the default value :return: string value ot the element or default value if conversion is not supported. ] variable[value] assign[=] call[name[self].get, parameter[name[key]]] return[call[name[StringConverter].to_string_with_default, parameter[name[value], name[default_value]]]]
keyword[def] identifier[get_as_string_with_default] ( identifier[self] , identifier[key] , identifier[default_value] ): literal[string] identifier[value] = identifier[self] . identifier[get] ( identifier[key] ) keyword[return] identifier[StringConverter] . identifier[to_string_with_default] ( identifier[value] , identifier[default_value] )
def get_as_string_with_default(self, key, default_value): """ Converts map element into a string or returns default value if conversion is not possible. :param key: an index of element to get. :param default_value: the default value :return: string value ot the element or default value if conversion is not supported. """ value = self.get(key) return StringConverter.to_string_with_default(value, default_value)
def reply(self, connection, reply, orig_req): """Send an asynchronous reply to an earlier request. Parameters ---------- connection : ClientConnection object The client to send the reply to. reply : Message object The reply message to send. orig_req : Message object The request message being replied to. The reply message's id is overridden with the id from orig_req before the reply is sent. """ if isinstance(connection, ClientRequestConnection): self._logger.warn( 'Deprecation warning: do not use self.reply() ' 'within a reply handler context -- use req.reply(*msg_args)\n' 'or req.reply_with_message(msg) Traceback:\n %s', "".join(traceback.format_stack())) # Get the underlying ClientConnection instance connection = connection.client_connection connection.reply(reply, orig_req)
def function[reply, parameter[self, connection, reply, orig_req]]: constant[Send an asynchronous reply to an earlier request. Parameters ---------- connection : ClientConnection object The client to send the reply to. reply : Message object The reply message to send. orig_req : Message object The request message being replied to. The reply message's id is overridden with the id from orig_req before the reply is sent. ] if call[name[isinstance], parameter[name[connection], name[ClientRequestConnection]]] begin[:] call[name[self]._logger.warn, parameter[constant[Deprecation warning: do not use self.reply() within a reply handler context -- use req.reply(*msg_args) or req.reply_with_message(msg) Traceback: %s], call[constant[].join, parameter[call[name[traceback].format_stack, parameter[]]]]]] variable[connection] assign[=] name[connection].client_connection call[name[connection].reply, parameter[name[reply], name[orig_req]]]
keyword[def] identifier[reply] ( identifier[self] , identifier[connection] , identifier[reply] , identifier[orig_req] ): literal[string] keyword[if] identifier[isinstance] ( identifier[connection] , identifier[ClientRequestConnection] ): identifier[self] . identifier[_logger] . identifier[warn] ( literal[string] literal[string] literal[string] , literal[string] . identifier[join] ( identifier[traceback] . identifier[format_stack] ())) identifier[connection] = identifier[connection] . identifier[client_connection] identifier[connection] . identifier[reply] ( identifier[reply] , identifier[orig_req] )
def reply(self, connection, reply, orig_req): """Send an asynchronous reply to an earlier request. Parameters ---------- connection : ClientConnection object The client to send the reply to. reply : Message object The reply message to send. orig_req : Message object The request message being replied to. The reply message's id is overridden with the id from orig_req before the reply is sent. """ if isinstance(connection, ClientRequestConnection): self._logger.warn('Deprecation warning: do not use self.reply() within a reply handler context -- use req.reply(*msg_args)\nor req.reply_with_message(msg) Traceback:\n %s', ''.join(traceback.format_stack())) # Get the underlying ClientConnection instance connection = connection.client_connection # depends on [control=['if'], data=[]] connection.reply(reply, orig_req)
def build_db_query(fields_names, field_values): """ method builds query dictionary by zipping together DB field names with the field values """ if isinstance(field_values, string_types): field_values = [field_values] if len(fields_names) != len(field_values): raise ValueError('Error: unable to build a primary key query due ' 'to mismatch in number of fields {0} vs {1}' .format(len(fields_names), len(field_values))) query = dict() for k, v in zip(fields_names, field_values): query[k] = v return query
def function[build_db_query, parameter[fields_names, field_values]]: constant[ method builds query dictionary by zipping together DB field names with the field values ] if call[name[isinstance], parameter[name[field_values], name[string_types]]] begin[:] variable[field_values] assign[=] list[[<ast.Name object at 0x7da204621cc0>]] if compare[call[name[len], parameter[name[fields_names]]] not_equal[!=] call[name[len], parameter[name[field_values]]]] begin[:] <ast.Raise object at 0x7da2046233d0> variable[query] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da2046221a0>, <ast.Name object at 0x7da204623910>]]] in starred[call[name[zip], parameter[name[fields_names], name[field_values]]]] begin[:] call[name[query]][name[k]] assign[=] name[v] return[name[query]]
keyword[def] identifier[build_db_query] ( identifier[fields_names] , identifier[field_values] ): literal[string] keyword[if] identifier[isinstance] ( identifier[field_values] , identifier[string_types] ): identifier[field_values] =[ identifier[field_values] ] keyword[if] identifier[len] ( identifier[fields_names] )!= identifier[len] ( identifier[field_values] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[len] ( identifier[fields_names] ), identifier[len] ( identifier[field_values] ))) identifier[query] = identifier[dict] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[zip] ( identifier[fields_names] , identifier[field_values] ): identifier[query] [ identifier[k] ]= identifier[v] keyword[return] identifier[query]
def build_db_query(fields_names, field_values): """ method builds query dictionary by zipping together DB field names with the field values """ if isinstance(field_values, string_types): field_values = [field_values] # depends on [control=['if'], data=[]] if len(fields_names) != len(field_values): raise ValueError('Error: unable to build a primary key query due to mismatch in number of fields {0} vs {1}'.format(len(fields_names), len(field_values))) # depends on [control=['if'], data=[]] query = dict() for (k, v) in zip(fields_names, field_values): query[k] = v # depends on [control=['for'], data=[]] return query
def spread(self, X): """ Calculate the average spread for each node. The average spread is a measure of how far each neuron is from the data points which cluster to it. Parameters ---------- X : numpy array The input data. Returns ------- spread : numpy array The average distance from each neuron to each data point. """ distance, _ = self.distance_function(X, self.weights) dists_per_neuron = defaultdict(list) for x, y in zip(np.argmin(distance, 1), distance): dists_per_neuron[x].append(y[x]) out = np.zeros(self.num_neurons) average_spread = {k: np.mean(v) for k, v in dists_per_neuron.items()} for x, y in average_spread.items(): out[x] = y return out
def function[spread, parameter[self, X]]: constant[ Calculate the average spread for each node. The average spread is a measure of how far each neuron is from the data points which cluster to it. Parameters ---------- X : numpy array The input data. Returns ------- spread : numpy array The average distance from each neuron to each data point. ] <ast.Tuple object at 0x7da2047e9c90> assign[=] call[name[self].distance_function, parameter[name[X], name[self].weights]] variable[dists_per_neuron] assign[=] call[name[defaultdict], parameter[name[list]]] for taget[tuple[[<ast.Name object at 0x7da1b2726da0>, <ast.Name object at 0x7da1b2725270>]]] in starred[call[name[zip], parameter[call[name[np].argmin, parameter[name[distance], constant[1]]], name[distance]]]] begin[:] call[call[name[dists_per_neuron]][name[x]].append, parameter[call[name[y]][name[x]]]] variable[out] assign[=] call[name[np].zeros, parameter[name[self].num_neurons]] variable[average_spread] assign[=] <ast.DictComp object at 0x7da1b2725990> for taget[tuple[[<ast.Name object at 0x7da1b2727b80>, <ast.Name object at 0x7da1b27258d0>]]] in starred[call[name[average_spread].items, parameter[]]] begin[:] call[name[out]][name[x]] assign[=] name[y] return[name[out]]
keyword[def] identifier[spread] ( identifier[self] , identifier[X] ): literal[string] identifier[distance] , identifier[_] = identifier[self] . identifier[distance_function] ( identifier[X] , identifier[self] . identifier[weights] ) identifier[dists_per_neuron] = identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[x] , identifier[y] keyword[in] identifier[zip] ( identifier[np] . identifier[argmin] ( identifier[distance] , literal[int] ), identifier[distance] ): identifier[dists_per_neuron] [ identifier[x] ]. identifier[append] ( identifier[y] [ identifier[x] ]) identifier[out] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[num_neurons] ) identifier[average_spread] ={ identifier[k] : identifier[np] . identifier[mean] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[dists_per_neuron] . identifier[items] ()} keyword[for] identifier[x] , identifier[y] keyword[in] identifier[average_spread] . identifier[items] (): identifier[out] [ identifier[x] ]= identifier[y] keyword[return] identifier[out]
def spread(self, X): """ Calculate the average spread for each node. The average spread is a measure of how far each neuron is from the data points which cluster to it. Parameters ---------- X : numpy array The input data. Returns ------- spread : numpy array The average distance from each neuron to each data point. """ (distance, _) = self.distance_function(X, self.weights) dists_per_neuron = defaultdict(list) for (x, y) in zip(np.argmin(distance, 1), distance): dists_per_neuron[x].append(y[x]) # depends on [control=['for'], data=[]] out = np.zeros(self.num_neurons) average_spread = {k: np.mean(v) for (k, v) in dists_per_neuron.items()} for (x, y) in average_spread.items(): out[x] = y # depends on [control=['for'], data=[]] return out
def shapes(self): """Get all time-slice shapes, in order, from this reservation on.""" shapes = [] curRes = self while curRes is not None: shapes.append(curRes.shape) curRes = curRes.nReservation return shapes
def function[shapes, parameter[self]]: constant[Get all time-slice shapes, in order, from this reservation on.] variable[shapes] assign[=] list[[]] variable[curRes] assign[=] name[self] while compare[name[curRes] is_not constant[None]] begin[:] call[name[shapes].append, parameter[name[curRes].shape]] variable[curRes] assign[=] name[curRes].nReservation return[name[shapes]]
keyword[def] identifier[shapes] ( identifier[self] ): literal[string] identifier[shapes] =[] identifier[curRes] = identifier[self] keyword[while] identifier[curRes] keyword[is] keyword[not] keyword[None] : identifier[shapes] . identifier[append] ( identifier[curRes] . identifier[shape] ) identifier[curRes] = identifier[curRes] . identifier[nReservation] keyword[return] identifier[shapes]
def shapes(self): """Get all time-slice shapes, in order, from this reservation on.""" shapes = [] curRes = self while curRes is not None: shapes.append(curRes.shape) curRes = curRes.nReservation # depends on [control=['while'], data=['curRes']] return shapes
def _interfaces_ip(out): """ Uses ip to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) """ ret = dict() def parse_network(value, cols): """ Return a tuple of ip, netmask, broadcast based on the current set of cols """ brd = None if '/' in value: # we have a CIDR in this address ip, cidr = value.split('/') # pylint: disable=C0103 else: ip = value # pylint: disable=C0103 cidr = 32 if type_ == 'inet': mask = cidr_to_ipv4_netmask(int(cidr)) if 'brd' in cols: brd = cols[cols.index('brd') + 1] elif type_ == 'inet6': mask = cidr return (ip, mask, brd) groups = re.compile('\r?\n\\d').split(out) for group in groups: iface = None data = dict() for line in group.splitlines(): if ' ' not in line: continue match = re.match(r'^\d*:\s+([\w.\-]+)(?:@)?([\w.\-]+)?:\s+<(.+)>', line) if match: iface, parent, attrs = match.groups() if 'UP' in attrs.split(','): data['up'] = True else: data['up'] = False if parent: data['parent'] = parent continue cols = line.split() if len(cols) >= 2: type_, value = tuple(cols[0:2]) iflabel = cols[-1:][0] if type_ in ('inet', 'inet6'): if 'secondary' not in cols: ipaddr, netmask, broadcast = parse_network(value, cols) if type_ == 'inet': if 'inet' not in data: data['inet'] = list() addr_obj = dict() addr_obj['address'] = ipaddr addr_obj['netmask'] = netmask addr_obj['broadcast'] = broadcast addr_obj['label'] = iflabel data['inet'].append(addr_obj) elif type_ == 'inet6': if 'inet6' not in data: data['inet6'] = list() addr_obj = dict() addr_obj['address'] = ipaddr addr_obj['prefixlen'] = netmask data['inet6'].append(addr_obj) else: if 'secondary' not in data: data['secondary'] = list() ip_, mask, brd = parse_network(value, cols) data['secondary'].append({ 'type': type_, 'address': ip_, 'netmask': mask, 'broadcast': brd, 'label': iflabel, }) del ip_, mask, brd elif type_.startswith('link'): data['hwaddr'] = value if iface: ret[iface] = data del iface, data return ret
def function[_interfaces_ip, parameter[out]]: constant[ Uses ip to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) ] variable[ret] assign[=] call[name[dict], parameter[]] def function[parse_network, parameter[value, cols]]: constant[ Return a tuple of ip, netmask, broadcast based on the current set of cols ] variable[brd] assign[=] constant[None] if compare[constant[/] in name[value]] begin[:] <ast.Tuple object at 0x7da1b162ba90> assign[=] call[name[value].split, parameter[constant[/]]] if compare[name[type_] equal[==] constant[inet]] begin[:] variable[mask] assign[=] call[name[cidr_to_ipv4_netmask], parameter[call[name[int], parameter[name[cidr]]]]] if compare[constant[brd] in name[cols]] begin[:] variable[brd] assign[=] call[name[cols]][binary_operation[call[name[cols].index, parameter[constant[brd]]] + constant[1]]] return[tuple[[<ast.Name object at 0x7da1b162b1c0>, <ast.Name object at 0x7da1b162b190>, <ast.Name object at 0x7da1b162b160>]]] variable[groups] assign[=] call[call[name[re].compile, parameter[constant[ ? \d]]].split, parameter[name[out]]] for taget[name[group]] in starred[name[groups]] begin[:] variable[iface] assign[=] constant[None] variable[data] assign[=] call[name[dict], parameter[]] for taget[name[line]] in starred[call[name[group].splitlines, parameter[]]] begin[:] if compare[constant[ ] <ast.NotIn object at 0x7da2590d7190> name[line]] begin[:] continue variable[match] assign[=] call[name[re].match, parameter[constant[^\d*:\s+([\w.\-]+)(?:@)?([\w.\-]+)?:\s+<(.+)>], name[line]]] if name[match] begin[:] <ast.Tuple object at 0x7da1b162a950> assign[=] call[name[match].groups, parameter[]] if compare[constant[UP] in call[name[attrs].split, parameter[constant[,]]]] begin[:] call[name[data]][constant[up]] assign[=] constant[True] if name[parent] begin[:] call[name[data]][constant[parent]] assign[=] name[parent] continue variable[cols] assign[=] call[name[line].split, parameter[]] if compare[call[name[len], parameter[name[cols]]] greater_or_equal[>=] constant[2]] begin[:] <ast.Tuple object at 0x7da1b162a0e0> assign[=] call[name[tuple], parameter[call[name[cols]][<ast.Slice object at 0x7da1b1629f90>]]] variable[iflabel] assign[=] call[call[name[cols]][<ast.Slice object at 0x7da1b1629e10>]][constant[0]] if compare[name[type_] in tuple[[<ast.Constant object at 0x7da1b1629c90>, <ast.Constant object at 0x7da1b1629c60>]]] begin[:] if compare[constant[secondary] <ast.NotIn object at 0x7da2590d7190> name[cols]] begin[:] <ast.Tuple object at 0x7da1b1629b40> assign[=] call[name[parse_network], parameter[name[value], name[cols]]] if compare[name[type_] equal[==] constant[inet]] begin[:] if compare[constant[inet] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:] call[name[data]][constant[inet]] assign[=] call[name[list], parameter[]] variable[addr_obj] assign[=] call[name[dict], parameter[]] call[name[addr_obj]][constant[address]] assign[=] name[ipaddr] call[name[addr_obj]][constant[netmask]] assign[=] name[netmask] call[name[addr_obj]][constant[broadcast]] assign[=] name[broadcast] call[name[addr_obj]][constant[label]] assign[=] name[iflabel] call[call[name[data]][constant[inet]].append, parameter[name[addr_obj]]] if name[iface] begin[:] call[name[ret]][name[iface]] assign[=] name[data] <ast.Delete object at 0x7da1b162eaa0> return[name[ret]]
keyword[def] identifier[_interfaces_ip] ( identifier[out] ): literal[string] identifier[ret] = identifier[dict] () keyword[def] identifier[parse_network] ( identifier[value] , identifier[cols] ): literal[string] identifier[brd] = keyword[None] keyword[if] literal[string] keyword[in] identifier[value] : identifier[ip] , identifier[cidr] = identifier[value] . identifier[split] ( literal[string] ) keyword[else] : identifier[ip] = identifier[value] identifier[cidr] = literal[int] keyword[if] identifier[type_] == literal[string] : identifier[mask] = identifier[cidr_to_ipv4_netmask] ( identifier[int] ( identifier[cidr] )) keyword[if] literal[string] keyword[in] identifier[cols] : identifier[brd] = identifier[cols] [ identifier[cols] . identifier[index] ( literal[string] )+ literal[int] ] keyword[elif] identifier[type_] == literal[string] : identifier[mask] = identifier[cidr] keyword[return] ( identifier[ip] , identifier[mask] , identifier[brd] ) identifier[groups] = identifier[re] . identifier[compile] ( literal[string] ). identifier[split] ( identifier[out] ) keyword[for] identifier[group] keyword[in] identifier[groups] : identifier[iface] = keyword[None] identifier[data] = identifier[dict] () keyword[for] identifier[line] keyword[in] identifier[group] . identifier[splitlines] (): keyword[if] literal[string] keyword[not] keyword[in] identifier[line] : keyword[continue] identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[line] ) keyword[if] identifier[match] : identifier[iface] , identifier[parent] , identifier[attrs] = identifier[match] . identifier[groups] () keyword[if] literal[string] keyword[in] identifier[attrs] . identifier[split] ( literal[string] ): identifier[data] [ literal[string] ]= keyword[True] keyword[else] : identifier[data] [ literal[string] ]= keyword[False] keyword[if] identifier[parent] : identifier[data] [ literal[string] ]= identifier[parent] keyword[continue] identifier[cols] = identifier[line] . identifier[split] () keyword[if] identifier[len] ( identifier[cols] )>= literal[int] : identifier[type_] , identifier[value] = identifier[tuple] ( identifier[cols] [ literal[int] : literal[int] ]) identifier[iflabel] = identifier[cols] [- literal[int] :][ literal[int] ] keyword[if] identifier[type_] keyword[in] ( literal[string] , literal[string] ): keyword[if] literal[string] keyword[not] keyword[in] identifier[cols] : identifier[ipaddr] , identifier[netmask] , identifier[broadcast] = identifier[parse_network] ( identifier[value] , identifier[cols] ) keyword[if] identifier[type_] == literal[string] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] : identifier[data] [ literal[string] ]= identifier[list] () identifier[addr_obj] = identifier[dict] () identifier[addr_obj] [ literal[string] ]= identifier[ipaddr] identifier[addr_obj] [ literal[string] ]= identifier[netmask] identifier[addr_obj] [ literal[string] ]= identifier[broadcast] identifier[addr_obj] [ literal[string] ]= identifier[iflabel] identifier[data] [ literal[string] ]. identifier[append] ( identifier[addr_obj] ) keyword[elif] identifier[type_] == literal[string] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] : identifier[data] [ literal[string] ]= identifier[list] () identifier[addr_obj] = identifier[dict] () identifier[addr_obj] [ literal[string] ]= identifier[ipaddr] identifier[addr_obj] [ literal[string] ]= identifier[netmask] identifier[data] [ literal[string] ]. identifier[append] ( identifier[addr_obj] ) keyword[else] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] : identifier[data] [ literal[string] ]= identifier[list] () identifier[ip_] , identifier[mask] , identifier[brd] = identifier[parse_network] ( identifier[value] , identifier[cols] ) identifier[data] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[type_] , literal[string] : identifier[ip_] , literal[string] : identifier[mask] , literal[string] : identifier[brd] , literal[string] : identifier[iflabel] , }) keyword[del] identifier[ip_] , identifier[mask] , identifier[brd] keyword[elif] identifier[type_] . identifier[startswith] ( literal[string] ): identifier[data] [ literal[string] ]= identifier[value] keyword[if] identifier[iface] : identifier[ret] [ identifier[iface] ]= identifier[data] keyword[del] identifier[iface] , identifier[data] keyword[return] identifier[ret]
def _interfaces_ip(out): """ Uses ip to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) """ ret = dict() def parse_network(value, cols): """ Return a tuple of ip, netmask, broadcast based on the current set of cols """ brd = None if '/' in value: # we have a CIDR in this address (ip, cidr) = value.split('/') # pylint: disable=C0103 # depends on [control=['if'], data=['value']] else: ip = value # pylint: disable=C0103 cidr = 32 if type_ == 'inet': mask = cidr_to_ipv4_netmask(int(cidr)) if 'brd' in cols: brd = cols[cols.index('brd') + 1] # depends on [control=['if'], data=['cols']] # depends on [control=['if'], data=[]] elif type_ == 'inet6': mask = cidr # depends on [control=['if'], data=[]] return (ip, mask, brd) groups = re.compile('\r?\n\\d').split(out) for group in groups: iface = None data = dict() for line in group.splitlines(): if ' ' not in line: continue # depends on [control=['if'], data=[]] match = re.match('^\\d*:\\s+([\\w.\\-]+)(?:@)?([\\w.\\-]+)?:\\s+<(.+)>', line) if match: (iface, parent, attrs) = match.groups() if 'UP' in attrs.split(','): data['up'] = True # depends on [control=['if'], data=[]] else: data['up'] = False if parent: data['parent'] = parent # depends on [control=['if'], data=[]] continue # depends on [control=['if'], data=[]] cols = line.split() if len(cols) >= 2: (type_, value) = tuple(cols[0:2]) iflabel = cols[-1:][0] if type_ in ('inet', 'inet6'): if 'secondary' not in cols: (ipaddr, netmask, broadcast) = parse_network(value, cols) if type_ == 'inet': if 'inet' not in data: data['inet'] = list() # depends on [control=['if'], data=['data']] addr_obj = dict() addr_obj['address'] = ipaddr addr_obj['netmask'] = netmask addr_obj['broadcast'] = broadcast addr_obj['label'] = iflabel data['inet'].append(addr_obj) # depends on [control=['if'], data=[]] elif type_ == 'inet6': if 'inet6' not in data: data['inet6'] = list() # depends on [control=['if'], data=['data']] addr_obj = dict() addr_obj['address'] = ipaddr addr_obj['prefixlen'] = netmask data['inet6'].append(addr_obj) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['cols']] else: if 'secondary' not in data: data['secondary'] = list() # depends on [control=['if'], data=['data']] (ip_, mask, brd) = parse_network(value, cols) data['secondary'].append({'type': type_, 'address': ip_, 'netmask': mask, 'broadcast': brd, 'label': iflabel}) del ip_, mask, brd # depends on [control=['if'], data=['type_']] elif type_.startswith('link'): data['hwaddr'] = value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] if iface: ret[iface] = data del iface, data # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']] return ret
def complex_state_generator_bravo(last_state=''): """Pick a state.""" from random import choice states = ['ALPHA', 'BRAVO', 'BRAVO', 'DONE'] if last_state: states.remove(last_state) # Slightly lower chances of previous state. state = choice(states) logging.info('Generating a state... %s', state) return state
def function[complex_state_generator_bravo, parameter[last_state]]: constant[Pick a state.] from relative_module[random] import module[choice] variable[states] assign[=] list[[<ast.Constant object at 0x7da20c6a8130>, <ast.Constant object at 0x7da20c6a86a0>, <ast.Constant object at 0x7da20c6a8280>, <ast.Constant object at 0x7da20c6a9db0>]] if name[last_state] begin[:] call[name[states].remove, parameter[name[last_state]]] variable[state] assign[=] call[name[choice], parameter[name[states]]] call[name[logging].info, parameter[constant[Generating a state... %s], name[state]]] return[name[state]]
keyword[def] identifier[complex_state_generator_bravo] ( identifier[last_state] = literal[string] ): literal[string] keyword[from] identifier[random] keyword[import] identifier[choice] identifier[states] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[last_state] : identifier[states] . identifier[remove] ( identifier[last_state] ) identifier[state] = identifier[choice] ( identifier[states] ) identifier[logging] . identifier[info] ( literal[string] , identifier[state] ) keyword[return] identifier[state]
def complex_state_generator_bravo(last_state=''): """Pick a state.""" from random import choice states = ['ALPHA', 'BRAVO', 'BRAVO', 'DONE'] if last_state: states.remove(last_state) # Slightly lower chances of previous state. # depends on [control=['if'], data=[]] state = choice(states) logging.info('Generating a state... %s', state) return state
def _conv(self, name, x, filter_size, in_filters, out_filters, strides): """Convolution.""" with tf.variable_scope(name): n = filter_size * filter_size * out_filters kernel = tf.get_variable( "DW", [filter_size, filter_size, in_filters, out_filters], tf.float32, initializer=tf.random_normal_initializer( stddev=np.sqrt(2.0 / n))) return tf.nn.conv2d(x, kernel, strides, padding="SAME")
def function[_conv, parameter[self, name, x, filter_size, in_filters, out_filters, strides]]: constant[Convolution.] with call[name[tf].variable_scope, parameter[name[name]]] begin[:] variable[n] assign[=] binary_operation[binary_operation[name[filter_size] * name[filter_size]] * name[out_filters]] variable[kernel] assign[=] call[name[tf].get_variable, parameter[constant[DW], list[[<ast.Name object at 0x7da207f9ad70>, <ast.Name object at 0x7da207f98370>, <ast.Name object at 0x7da207f99ed0>, <ast.Name object at 0x7da207f99a20>]], name[tf].float32]] return[call[name[tf].nn.conv2d, parameter[name[x], name[kernel], name[strides]]]]
keyword[def] identifier[_conv] ( identifier[self] , identifier[name] , identifier[x] , identifier[filter_size] , identifier[in_filters] , identifier[out_filters] , identifier[strides] ): literal[string] keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] ): identifier[n] = identifier[filter_size] * identifier[filter_size] * identifier[out_filters] identifier[kernel] = identifier[tf] . identifier[get_variable] ( literal[string] ,[ identifier[filter_size] , identifier[filter_size] , identifier[in_filters] , identifier[out_filters] ], identifier[tf] . identifier[float32] , identifier[initializer] = identifier[tf] . identifier[random_normal_initializer] ( identifier[stddev] = identifier[np] . identifier[sqrt] ( literal[int] / identifier[n] ))) keyword[return] identifier[tf] . identifier[nn] . identifier[conv2d] ( identifier[x] , identifier[kernel] , identifier[strides] , identifier[padding] = literal[string] )
def _conv(self, name, x, filter_size, in_filters, out_filters, strides): """Convolution.""" with tf.variable_scope(name): n = filter_size * filter_size * out_filters kernel = tf.get_variable('DW', [filter_size, filter_size, in_filters, out_filters], tf.float32, initializer=tf.random_normal_initializer(stddev=np.sqrt(2.0 / n))) return tf.nn.conv2d(x, kernel, strides, padding='SAME') # depends on [control=['with'], data=[]]
def mkdir(self, pathobj, _): """ Creates remote directory Note that this operation is not recursive """ if not pathobj.drive or not pathobj.root: raise RuntimeError("Full path required: '%s'" % str(pathobj)) if pathobj.exists(): raise OSError(17, "File exists: '%s'" % str(pathobj)) url = str(pathobj) + '/' text, code = self.rest_put(url, session=pathobj.session, verify=pathobj.verify, cert=pathobj.cert) if not code == 201: raise RuntimeError("%s %d" % (text, code))
def function[mkdir, parameter[self, pathobj, _]]: constant[ Creates remote directory Note that this operation is not recursive ] if <ast.BoolOp object at 0x7da204962f20> begin[:] <ast.Raise object at 0x7da1b086eb60> if call[name[pathobj].exists, parameter[]] begin[:] <ast.Raise object at 0x7da1b086e8c0> variable[url] assign[=] binary_operation[call[name[str], parameter[name[pathobj]]] + constant[/]] <ast.Tuple object at 0x7da1b086d330> assign[=] call[name[self].rest_put, parameter[name[url]]] if <ast.UnaryOp object at 0x7da1b0865360> begin[:] <ast.Raise object at 0x7da1b0865060>
keyword[def] identifier[mkdir] ( identifier[self] , identifier[pathobj] , identifier[_] ): literal[string] keyword[if] keyword[not] identifier[pathobj] . identifier[drive] keyword[or] keyword[not] identifier[pathobj] . identifier[root] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[str] ( identifier[pathobj] )) keyword[if] identifier[pathobj] . identifier[exists] (): keyword[raise] identifier[OSError] ( literal[int] , literal[string] % identifier[str] ( identifier[pathobj] )) identifier[url] = identifier[str] ( identifier[pathobj] )+ literal[string] identifier[text] , identifier[code] = identifier[self] . identifier[rest_put] ( identifier[url] , identifier[session] = identifier[pathobj] . identifier[session] , identifier[verify] = identifier[pathobj] . identifier[verify] , identifier[cert] = identifier[pathobj] . identifier[cert] ) keyword[if] keyword[not] identifier[code] == literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[text] , identifier[code] ))
def mkdir(self, pathobj, _): """ Creates remote directory Note that this operation is not recursive """ if not pathobj.drive or not pathobj.root: raise RuntimeError("Full path required: '%s'" % str(pathobj)) # depends on [control=['if'], data=[]] if pathobj.exists(): raise OSError(17, "File exists: '%s'" % str(pathobj)) # depends on [control=['if'], data=[]] url = str(pathobj) + '/' (text, code) = self.rest_put(url, session=pathobj.session, verify=pathobj.verify, cert=pathobj.cert) if not code == 201: raise RuntimeError('%s %d' % (text, code)) # depends on [control=['if'], data=[]]
def get_or_create_environment(self, repo: str, branch: str, git_repo: Repo, repo_path: Path) -> str: """ Handles the requirements in the target repository, returns a path to a executable of the virtualenv. """ return str(self.get_or_create_venv(repo_path).resolve() / "bin" / "python")
def function[get_or_create_environment, parameter[self, repo, branch, git_repo, repo_path]]: constant[ Handles the requirements in the target repository, returns a path to a executable of the virtualenv. ] return[call[name[str], parameter[binary_operation[binary_operation[call[call[name[self].get_or_create_venv, parameter[name[repo_path]]].resolve, parameter[]] / constant[bin]] / constant[python]]]]]
keyword[def] identifier[get_or_create_environment] ( identifier[self] , identifier[repo] : identifier[str] , identifier[branch] : identifier[str] , identifier[git_repo] : identifier[Repo] , identifier[repo_path] : identifier[Path] )-> identifier[str] : literal[string] keyword[return] identifier[str] ( identifier[self] . identifier[get_or_create_venv] ( identifier[repo_path] ). identifier[resolve] ()/ literal[string] / literal[string] )
def get_or_create_environment(self, repo: str, branch: str, git_repo: Repo, repo_path: Path) -> str: """ Handles the requirements in the target repository, returns a path to a executable of the virtualenv. """ return str(self.get_or_create_venv(repo_path).resolve() / 'bin' / 'python')
def user_stats(request): """ JSON of user stats of the user GET parameters: html (bool): turn on the HTML version of the API, defaults to false user (int): identifier of the user, defaults to logged user concepts (list): list of identifiers of concepts, defaults to all concepts lang (str): language of requested concepts, defaults to language from django """ user = get_user_id(request) language = get_language(request) concepts = None # meaning all concept if "concepts" in request.GET: concepts = Concept.objects.filter(lang=language, active=True, identifier__in=load_query_json(request.GET, "concepts")) data = UserStat.objects.get_user_stats(user, language, concepts) return render_json(request, data, template='concepts_json.html', help_text=user_stats.__doc__)
def function[user_stats, parameter[request]]: constant[ JSON of user stats of the user GET parameters: html (bool): turn on the HTML version of the API, defaults to false user (int): identifier of the user, defaults to logged user concepts (list): list of identifiers of concepts, defaults to all concepts lang (str): language of requested concepts, defaults to language from django ] variable[user] assign[=] call[name[get_user_id], parameter[name[request]]] variable[language] assign[=] call[name[get_language], parameter[name[request]]] variable[concepts] assign[=] constant[None] if compare[constant[concepts] in name[request].GET] begin[:] variable[concepts] assign[=] call[name[Concept].objects.filter, parameter[]] variable[data] assign[=] call[name[UserStat].objects.get_user_stats, parameter[name[user], name[language], name[concepts]]] return[call[name[render_json], parameter[name[request], name[data]]]]
keyword[def] identifier[user_stats] ( identifier[request] ): literal[string] identifier[user] = identifier[get_user_id] ( identifier[request] ) identifier[language] = identifier[get_language] ( identifier[request] ) identifier[concepts] = keyword[None] keyword[if] literal[string] keyword[in] identifier[request] . identifier[GET] : identifier[concepts] = identifier[Concept] . identifier[objects] . identifier[filter] ( identifier[lang] = identifier[language] , identifier[active] = keyword[True] , identifier[identifier__in] = identifier[load_query_json] ( identifier[request] . identifier[GET] , literal[string] )) identifier[data] = identifier[UserStat] . identifier[objects] . identifier[get_user_stats] ( identifier[user] , identifier[language] , identifier[concepts] ) keyword[return] identifier[render_json] ( identifier[request] , identifier[data] , identifier[template] = literal[string] , identifier[help_text] = identifier[user_stats] . identifier[__doc__] )
def user_stats(request): """ JSON of user stats of the user GET parameters: html (bool): turn on the HTML version of the API, defaults to false user (int): identifier of the user, defaults to logged user concepts (list): list of identifiers of concepts, defaults to all concepts lang (str): language of requested concepts, defaults to language from django """ user = get_user_id(request) language = get_language(request) concepts = None # meaning all concept if 'concepts' in request.GET: concepts = Concept.objects.filter(lang=language, active=True, identifier__in=load_query_json(request.GET, 'concepts')) # depends on [control=['if'], data=[]] data = UserStat.objects.get_user_stats(user, language, concepts) return render_json(request, data, template='concepts_json.html', help_text=user_stats.__doc__)
def get_last_doc(self): """Returns the last document stored in the Solr engine. """ #search everything, sort by descending timestamp, return 1 row try: result = self.solr.search('*:*', sort='_ts desc', rows=1) except ValueError: return None for r in result: r['_id'] = r.pop(self.unique_key) return r
def function[get_last_doc, parameter[self]]: constant[Returns the last document stored in the Solr engine. ] <ast.Try object at 0x7da1b11c5660> for taget[name[r]] in starred[name[result]] begin[:] call[name[r]][constant[_id]] assign[=] call[name[r].pop, parameter[name[self].unique_key]] return[name[r]]
keyword[def] identifier[get_last_doc] ( identifier[self] ): literal[string] keyword[try] : identifier[result] = identifier[self] . identifier[solr] . identifier[search] ( literal[string] , identifier[sort] = literal[string] , identifier[rows] = literal[int] ) keyword[except] identifier[ValueError] : keyword[return] keyword[None] keyword[for] identifier[r] keyword[in] identifier[result] : identifier[r] [ literal[string] ]= identifier[r] . identifier[pop] ( identifier[self] . identifier[unique_key] ) keyword[return] identifier[r]
def get_last_doc(self): """Returns the last document stored in the Solr engine. """ #search everything, sort by descending timestamp, return 1 row try: result = self.solr.search('*:*', sort='_ts desc', rows=1) # depends on [control=['try'], data=[]] except ValueError: return None # depends on [control=['except'], data=[]] for r in result: r['_id'] = r.pop(self.unique_key) return r # depends on [control=['for'], data=['r']]
def remove_armor(armored_data): """Decode armored data into its binary form.""" stream = io.BytesIO(armored_data) lines = stream.readlines()[3:-1] data = base64.b64decode(b''.join(lines)) payload, checksum = data[:-3], data[-3:] assert util.crc24(payload) == checksum return payload
def function[remove_armor, parameter[armored_data]]: constant[Decode armored data into its binary form.] variable[stream] assign[=] call[name[io].BytesIO, parameter[name[armored_data]]] variable[lines] assign[=] call[call[name[stream].readlines, parameter[]]][<ast.Slice object at 0x7da1b12f1510>] variable[data] assign[=] call[name[base64].b64decode, parameter[call[constant[b''].join, parameter[name[lines]]]]] <ast.Tuple object at 0x7da1b12f0700> assign[=] tuple[[<ast.Subscript object at 0x7da1b12f3fd0>, <ast.Subscript object at 0x7da1b12f0fd0>]] assert[compare[call[name[util].crc24, parameter[name[payload]]] equal[==] name[checksum]]] return[name[payload]]
keyword[def] identifier[remove_armor] ( identifier[armored_data] ): literal[string] identifier[stream] = identifier[io] . identifier[BytesIO] ( identifier[armored_data] ) identifier[lines] = identifier[stream] . identifier[readlines] ()[ literal[int] :- literal[int] ] identifier[data] = identifier[base64] . identifier[b64decode] ( literal[string] . identifier[join] ( identifier[lines] )) identifier[payload] , identifier[checksum] = identifier[data] [:- literal[int] ], identifier[data] [- literal[int] :] keyword[assert] identifier[util] . identifier[crc24] ( identifier[payload] )== identifier[checksum] keyword[return] identifier[payload]
def remove_armor(armored_data): """Decode armored data into its binary form.""" stream = io.BytesIO(armored_data) lines = stream.readlines()[3:-1] data = base64.b64decode(b''.join(lines)) (payload, checksum) = (data[:-3], data[-3:]) assert util.crc24(payload) == checksum return payload
def angular_separation(lonp1, latp1, lonp2, latp2): """ Compute the angles between lon / lat points p1 and p2 given in radians. On the unit sphere, this also corresponds to the great circle distance. p1 and p2 can be numpy arrays of the same length. """ xp1, yp1, zp1 = lonlat2xyz(lonp1, latp1) xp2, yp2, zp2 = lonlat2xyz(lonp2, latp2) ## dot products to obtain angles angles = np.arccos((xp1 * xp2 + yp1 * yp2 + zp1 * zp2)) ## As this is a unit sphere, angle = length return angles
def function[angular_separation, parameter[lonp1, latp1, lonp2, latp2]]: constant[ Compute the angles between lon / lat points p1 and p2 given in radians. On the unit sphere, this also corresponds to the great circle distance. p1 and p2 can be numpy arrays of the same length. ] <ast.Tuple object at 0x7da20c76f040> assign[=] call[name[lonlat2xyz], parameter[name[lonp1], name[latp1]]] <ast.Tuple object at 0x7da20c76ebf0> assign[=] call[name[lonlat2xyz], parameter[name[lonp2], name[latp2]]] variable[angles] assign[=] call[name[np].arccos, parameter[binary_operation[binary_operation[binary_operation[name[xp1] * name[xp2]] + binary_operation[name[yp1] * name[yp2]]] + binary_operation[name[zp1] * name[zp2]]]]] return[name[angles]]
keyword[def] identifier[angular_separation] ( identifier[lonp1] , identifier[latp1] , identifier[lonp2] , identifier[latp2] ): literal[string] identifier[xp1] , identifier[yp1] , identifier[zp1] = identifier[lonlat2xyz] ( identifier[lonp1] , identifier[latp1] ) identifier[xp2] , identifier[yp2] , identifier[zp2] = identifier[lonlat2xyz] ( identifier[lonp2] , identifier[latp2] ) identifier[angles] = identifier[np] . identifier[arccos] (( identifier[xp1] * identifier[xp2] + identifier[yp1] * identifier[yp2] + identifier[zp1] * identifier[zp2] )) keyword[return] identifier[angles]
def angular_separation(lonp1, latp1, lonp2, latp2): """ Compute the angles between lon / lat points p1 and p2 given in radians. On the unit sphere, this also corresponds to the great circle distance. p1 and p2 can be numpy arrays of the same length. """ (xp1, yp1, zp1) = lonlat2xyz(lonp1, latp1) (xp2, yp2, zp2) = lonlat2xyz(lonp2, latp2) ## dot products to obtain angles angles = np.arccos(xp1 * xp2 + yp1 * yp2 + zp1 * zp2) ## As this is a unit sphere, angle = length return angles
def _fix_path(): """Finds the google_appengine directory and fixes Python imports to use it.""" import os import sys all_paths = os.environ.get('PYTHONPATH').split(os.pathsep) for path_dir in all_paths: dev_appserver_path = os.path.join(path_dir, 'dev_appserver.py') if os.path.exists(dev_appserver_path): logging.debug('Found appengine SDK on path!') google_appengine = os.path.dirname(os.path.realpath(dev_appserver_path)) sys.path.append(google_appengine) # Use the next import will fix up sys.path even further to bring in # any dependent lib directories that the SDK needs. dev_appserver = __import__('dev_appserver') sys.path.extend(dev_appserver.EXTRA_PATHS) return
def function[_fix_path, parameter[]]: constant[Finds the google_appengine directory and fixes Python imports to use it.] import module[os] import module[sys] variable[all_paths] assign[=] call[call[name[os].environ.get, parameter[constant[PYTHONPATH]]].split, parameter[name[os].pathsep]] for taget[name[path_dir]] in starred[name[all_paths]] begin[:] variable[dev_appserver_path] assign[=] call[name[os].path.join, parameter[name[path_dir], constant[dev_appserver.py]]] if call[name[os].path.exists, parameter[name[dev_appserver_path]]] begin[:] call[name[logging].debug, parameter[constant[Found appengine SDK on path!]]] variable[google_appengine] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.realpath, parameter[name[dev_appserver_path]]]]] call[name[sys].path.append, parameter[name[google_appengine]]] variable[dev_appserver] assign[=] call[name[__import__], parameter[constant[dev_appserver]]] call[name[sys].path.extend, parameter[name[dev_appserver].EXTRA_PATHS]] return[None]
keyword[def] identifier[_fix_path] (): literal[string] keyword[import] identifier[os] keyword[import] identifier[sys] identifier[all_paths] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] ). identifier[split] ( identifier[os] . identifier[pathsep] ) keyword[for] identifier[path_dir] keyword[in] identifier[all_paths] : identifier[dev_appserver_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path_dir] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[dev_appserver_path] ): identifier[logging] . identifier[debug] ( literal[string] ) identifier[google_appengine] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[dev_appserver_path] )) identifier[sys] . identifier[path] . identifier[append] ( identifier[google_appengine] ) identifier[dev_appserver] = identifier[__import__] ( literal[string] ) identifier[sys] . identifier[path] . identifier[extend] ( identifier[dev_appserver] . identifier[EXTRA_PATHS] ) keyword[return]
def _fix_path(): """Finds the google_appengine directory and fixes Python imports to use it.""" import os import sys all_paths = os.environ.get('PYTHONPATH').split(os.pathsep) for path_dir in all_paths: dev_appserver_path = os.path.join(path_dir, 'dev_appserver.py') if os.path.exists(dev_appserver_path): logging.debug('Found appengine SDK on path!') google_appengine = os.path.dirname(os.path.realpath(dev_appserver_path)) sys.path.append(google_appengine) # Use the next import will fix up sys.path even further to bring in # any dependent lib directories that the SDK needs. dev_appserver = __import__('dev_appserver') sys.path.extend(dev_appserver.EXTRA_PATHS) return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path_dir']]