code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def list_stack(profile=None): ''' Return a list of available stack (heat stack-list) profile Profile to use CLI Example: .. code-block:: bash salt '*' heat.list_stack profile=openstack1 ''' ret = {} h_client = _auth(profile) for stack in h_client.stacks.list(): links = {} for link in stack.links: links[link['rel']] = link['href'] ret[stack.stack_name] = { 'status': stack.stack_status, 'id': stack.id, 'name': stack.stack_name, 'creation': stack.creation_time, 'owner': stack.stack_owner, 'reason': stack.stack_status_reason, 'links': links, } return ret
def function[list_stack, parameter[profile]]: constant[ Return a list of available stack (heat stack-list) profile Profile to use CLI Example: .. code-block:: bash salt '*' heat.list_stack profile=openstack1 ] variable[ret] assign[=] dictionary[[], []] variable[h_client] assign[=] call[name[_auth], parameter[name[profile]]] for taget[name[stack]] in starred[call[name[h_client].stacks.list, parameter[]]] begin[:] variable[links] assign[=] dictionary[[], []] for taget[name[link]] in starred[name[stack].links] begin[:] call[name[links]][call[name[link]][constant[rel]]] assign[=] call[name[link]][constant[href]] call[name[ret]][name[stack].stack_name] assign[=] dictionary[[<ast.Constant object at 0x7da1b2045150>, <ast.Constant object at 0x7da1b2044c10>, <ast.Constant object at 0x7da1b2045060>, <ast.Constant object at 0x7da1b2045360>, <ast.Constant object at 0x7da1b2045900>, <ast.Constant object at 0x7da1b20456c0>, <ast.Constant object at 0x7da1b2045480>], [<ast.Attribute object at 0x7da1b20451b0>, <ast.Attribute object at 0x7da1b20454b0>, <ast.Attribute object at 0x7da1b2047ca0>, <ast.Attribute object at 0x7da1b2047550>, <ast.Attribute object at 0x7da1b2044b80>, <ast.Attribute object at 0x7da1b20444c0>, <ast.Name object at 0x7da1b2047af0>]] return[name[ret]]
keyword[def] identifier[list_stack] ( identifier[profile] = keyword[None] ): literal[string] identifier[ret] ={} identifier[h_client] = identifier[_auth] ( identifier[profile] ) keyword[for] identifier[stack] keyword[in] identifier[h_client] . identifier[stacks] . identifier[list] (): identifier[links] ={} keyword[for] identifier[link] keyword[in] identifier[stack] . identifier[links] : identifier[links] [ identifier[link] [ literal[string] ]]= identifier[link] [ literal[string] ] identifier[ret] [ identifier[stack] . identifier[stack_name] ]={ literal[string] : identifier[stack] . identifier[stack_status] , literal[string] : identifier[stack] . identifier[id] , literal[string] : identifier[stack] . identifier[stack_name] , literal[string] : identifier[stack] . identifier[creation_time] , literal[string] : identifier[stack] . identifier[stack_owner] , literal[string] : identifier[stack] . identifier[stack_status_reason] , literal[string] : identifier[links] , } keyword[return] identifier[ret]
def list_stack(profile=None): """ Return a list of available stack (heat stack-list) profile Profile to use CLI Example: .. code-block:: bash salt '*' heat.list_stack profile=openstack1 """ ret = {} h_client = _auth(profile) for stack in h_client.stacks.list(): links = {} for link in stack.links: links[link['rel']] = link['href'] # depends on [control=['for'], data=['link']] ret[stack.stack_name] = {'status': stack.stack_status, 'id': stack.id, 'name': stack.stack_name, 'creation': stack.creation_time, 'owner': stack.stack_owner, 'reason': stack.stack_status_reason, 'links': links} # depends on [control=['for'], data=['stack']] return ret
def override_to_local_variable(enable=True): """ Returns: a context where all variables will be created as local. """ if enable: def custom_getter(getter, name, *args, **kwargs): _replace_global_by_local(kwargs) return getter(name, *args, **kwargs) with custom_getter_scope(custom_getter): yield else: yield
def function[override_to_local_variable, parameter[enable]]: constant[ Returns: a context where all variables will be created as local. ] if name[enable] begin[:] def function[custom_getter, parameter[getter, name]]: call[name[_replace_global_by_local], parameter[name[kwargs]]] return[call[name[getter], parameter[name[name], <ast.Starred object at 0x7da2041d9570>]]] with call[name[custom_getter_scope], parameter[name[custom_getter]]] begin[:] <ast.Yield object at 0x7da2041d8f40>
keyword[def] identifier[override_to_local_variable] ( identifier[enable] = keyword[True] ): literal[string] keyword[if] identifier[enable] : keyword[def] identifier[custom_getter] ( identifier[getter] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ): identifier[_replace_global_by_local] ( identifier[kwargs] ) keyword[return] identifier[getter] ( identifier[name] ,* identifier[args] ,** identifier[kwargs] ) keyword[with] identifier[custom_getter_scope] ( identifier[custom_getter] ): keyword[yield] keyword[else] : keyword[yield]
def override_to_local_variable(enable=True): """ Returns: a context where all variables will be created as local. """ if enable: def custom_getter(getter, name, *args, **kwargs): _replace_global_by_local(kwargs) return getter(name, *args, **kwargs) with custom_getter_scope(custom_getter): yield # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: yield
def authenticate_direct_credentials(self, username, password): """ Performs a direct bind, however using direct credentials. Can be used if interfacing with an Active Directory domain controller which authenticates using [email protected] directly. Performing this kind of lookup limits the information we can get from ldap. Instead we can only deduce whether or not their bind was successful. Do not use this method if you require more user info. Args: username (str): Username for the user to bind with. LDAP_BIND_DIRECT_PREFIX will be prepended and LDAP_BIND_DIRECT_SUFFIX will be appended. password (str): User's password to bind with. Returns: AuthenticationResponse """ bind_user = '{}{}{}'.format( self.config.get('LDAP_BIND_DIRECT_PREFIX'), username, self.config.get('LDAP_BIND_DIRECT_SUFFIX') ) connection = self._make_connection( bind_user=bind_user, bind_password=password, ) response = AuthenticationResponse() try: connection.bind() response.status = AuthenticationResponseStatus.success response.user_id = username log.debug( "Authentication was successful for user '{0}'".format(username)) if self.config.get('LDAP_BIND_DIRECT_GET_USER_INFO'): # User wants extra info about the bind user_filter = '({search_attr}={username})'.format( search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username ) search_filter = '(&{0}{1})'.format( self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter, ) connection.search( search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr( ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES'), ) if len(connection.response) == 0 or \ (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.error( "Could not gather extra info for user '{0}'".format(username)) else: user = connection.response[0] user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_dn = user['dn'] except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(connection) return response
def function[authenticate_direct_credentials, parameter[self, username, password]]: constant[ Performs a direct bind, however using direct credentials. Can be used if interfacing with an Active Directory domain controller which authenticates using [email protected] directly. Performing this kind of lookup limits the information we can get from ldap. Instead we can only deduce whether or not their bind was successful. Do not use this method if you require more user info. Args: username (str): Username for the user to bind with. LDAP_BIND_DIRECT_PREFIX will be prepended and LDAP_BIND_DIRECT_SUFFIX will be appended. password (str): User's password to bind with. Returns: AuthenticationResponse ] variable[bind_user] assign[=] call[constant[{}{}{}].format, parameter[call[name[self].config.get, parameter[constant[LDAP_BIND_DIRECT_PREFIX]]], name[username], call[name[self].config.get, parameter[constant[LDAP_BIND_DIRECT_SUFFIX]]]]] variable[connection] assign[=] call[name[self]._make_connection, parameter[]] variable[response] assign[=] call[name[AuthenticationResponse], parameter[]] <ast.Try object at 0x7da1b02408b0> call[name[self].destroy_connection, parameter[name[connection]]] return[name[response]]
keyword[def] identifier[authenticate_direct_credentials] ( identifier[self] , identifier[username] , identifier[password] ): literal[string] identifier[bind_user] = literal[string] . identifier[format] ( identifier[self] . identifier[config] . identifier[get] ( literal[string] ), identifier[username] , identifier[self] . identifier[config] . identifier[get] ( literal[string] ) ) identifier[connection] = identifier[self] . identifier[_make_connection] ( identifier[bind_user] = identifier[bind_user] , identifier[bind_password] = identifier[password] , ) identifier[response] = identifier[AuthenticationResponse] () keyword[try] : identifier[connection] . identifier[bind] () identifier[response] . identifier[status] = identifier[AuthenticationResponseStatus] . identifier[success] identifier[response] . identifier[user_id] = identifier[username] identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[username] )) keyword[if] identifier[self] . identifier[config] . identifier[get] ( literal[string] ): identifier[user_filter] = literal[string] . identifier[format] ( identifier[search_attr] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ), identifier[username] = identifier[username] ) identifier[search_filter] = literal[string] . identifier[format] ( identifier[self] . identifier[config] . identifier[get] ( literal[string] ), identifier[user_filter] , ) identifier[connection] . identifier[search] ( identifier[search_base] = identifier[self] . identifier[full_user_search_dn] , identifier[search_filter] = identifier[search_filter] , identifier[search_scope] = identifier[getattr] ( identifier[ldap3] , identifier[self] . identifier[config] . identifier[get] ( literal[string] )), identifier[attributes] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ), ) keyword[if] identifier[len] ( identifier[connection] . identifier[response] )== literal[int] keyword[or] ( identifier[self] . identifier[config] . identifier[get] ( literal[string] ) keyword[and] identifier[len] ( identifier[connection] . identifier[response] )> literal[int] ): identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[username] )) keyword[else] : identifier[user] = identifier[connection] . identifier[response] [ literal[int] ] identifier[user] [ literal[string] ][ literal[string] ]= identifier[user] [ literal[string] ] identifier[response] . identifier[user_info] = identifier[user] [ literal[string] ] identifier[response] . identifier[user_dn] = identifier[user] [ literal[string] ] keyword[except] identifier[ldap3] . identifier[core] . identifier[exceptions] . identifier[LDAPInvalidCredentialsResult] : identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[username] )) identifier[response] . identifier[status] = identifier[AuthenticationResponseStatus] . identifier[fail] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[log] . identifier[error] ( identifier[e] ) identifier[response] . identifier[status] = identifier[AuthenticationResponseStatus] . identifier[fail] identifier[self] . identifier[destroy_connection] ( identifier[connection] ) keyword[return] identifier[response]
def authenticate_direct_credentials(self, username, password): """ Performs a direct bind, however using direct credentials. Can be used if interfacing with an Active Directory domain controller which authenticates using [email protected] directly. Performing this kind of lookup limits the information we can get from ldap. Instead we can only deduce whether or not their bind was successful. Do not use this method if you require more user info. Args: username (str): Username for the user to bind with. LDAP_BIND_DIRECT_PREFIX will be prepended and LDAP_BIND_DIRECT_SUFFIX will be appended. password (str): User's password to bind with. Returns: AuthenticationResponse """ bind_user = '{}{}{}'.format(self.config.get('LDAP_BIND_DIRECT_PREFIX'), username, self.config.get('LDAP_BIND_DIRECT_SUFFIX')) connection = self._make_connection(bind_user=bind_user, bind_password=password) response = AuthenticationResponse() try: connection.bind() response.status = AuthenticationResponseStatus.success response.user_id = username log.debug("Authentication was successful for user '{0}'".format(username)) if self.config.get('LDAP_BIND_DIRECT_GET_USER_INFO'): # User wants extra info about the bind user_filter = '({search_attr}={username})'.format(search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username) search_filter = '(&{0}{1})'.format(self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter) connection.search(search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr(ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES')) if len(connection.response) == 0 or (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.error("Could not gather extra info for user '{0}'".format(username)) # depends on [control=['if'], data=[]] else: user = connection.response[0] user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_dn = user['dn'] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug("Authentication was not successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail # depends on [control=['except'], data=[]] except Exception as e: log.error(e) response.status = AuthenticationResponseStatus.fail # depends on [control=['except'], data=['e']] self.destroy_connection(connection) return response
def insert_one(self, validate=True): """Insert this document. The `validate` argument translates to the inverse of the `bypass_document_validation` PyMongo option. https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.insert_one """ kw = {} kw['bypass_document_validation'] = not validate collection = self.get_collection(kw.pop('source', None)) return collection.insert_one(self, **kw)
def function[insert_one, parameter[self, validate]]: constant[Insert this document. The `validate` argument translates to the inverse of the `bypass_document_validation` PyMongo option. https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.insert_one ] variable[kw] assign[=] dictionary[[], []] call[name[kw]][constant[bypass_document_validation]] assign[=] <ast.UnaryOp object at 0x7da20c6a8be0> variable[collection] assign[=] call[name[self].get_collection, parameter[call[name[kw].pop, parameter[constant[source], constant[None]]]]] return[call[name[collection].insert_one, parameter[name[self]]]]
keyword[def] identifier[insert_one] ( identifier[self] , identifier[validate] = keyword[True] ): literal[string] identifier[kw] ={} identifier[kw] [ literal[string] ]= keyword[not] identifier[validate] identifier[collection] = identifier[self] . identifier[get_collection] ( identifier[kw] . identifier[pop] ( literal[string] , keyword[None] )) keyword[return] identifier[collection] . identifier[insert_one] ( identifier[self] ,** identifier[kw] )
def insert_one(self, validate=True): """Insert this document. The `validate` argument translates to the inverse of the `bypass_document_validation` PyMongo option. https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.insert_one """ kw = {} kw['bypass_document_validation'] = not validate collection = self.get_collection(kw.pop('source', None)) return collection.insert_one(self, **kw)
def update_db(self, giver, receiverkarma): """ Record a the giver of karma, the receiver of karma, and the karma amount. Typically the count will be 1, but it can be any positive or negative integer. """ for receiver in receiverkarma: if receiver != giver: urow = KarmaStatsTable( ude(giver), ude(receiver), receiverkarma[receiver]) self.db.session.add(urow) self.db.session.commit()
def function[update_db, parameter[self, giver, receiverkarma]]: constant[ Record a the giver of karma, the receiver of karma, and the karma amount. Typically the count will be 1, but it can be any positive or negative integer. ] for taget[name[receiver]] in starred[name[receiverkarma]] begin[:] if compare[name[receiver] not_equal[!=] name[giver]] begin[:] variable[urow] assign[=] call[name[KarmaStatsTable], parameter[call[name[ude], parameter[name[giver]]], call[name[ude], parameter[name[receiver]]], call[name[receiverkarma]][name[receiver]]]] call[name[self].db.session.add, parameter[name[urow]]] call[name[self].db.session.commit, parameter[]]
keyword[def] identifier[update_db] ( identifier[self] , identifier[giver] , identifier[receiverkarma] ): literal[string] keyword[for] identifier[receiver] keyword[in] identifier[receiverkarma] : keyword[if] identifier[receiver] != identifier[giver] : identifier[urow] = identifier[KarmaStatsTable] ( identifier[ude] ( identifier[giver] ), identifier[ude] ( identifier[receiver] ), identifier[receiverkarma] [ identifier[receiver] ]) identifier[self] . identifier[db] . identifier[session] . identifier[add] ( identifier[urow] ) identifier[self] . identifier[db] . identifier[session] . identifier[commit] ()
def update_db(self, giver, receiverkarma): """ Record a the giver of karma, the receiver of karma, and the karma amount. Typically the count will be 1, but it can be any positive or negative integer. """ for receiver in receiverkarma: if receiver != giver: urow = KarmaStatsTable(ude(giver), ude(receiver), receiverkarma[receiver]) self.db.session.add(urow) # depends on [control=['if'], data=['receiver', 'giver']] # depends on [control=['for'], data=['receiver']] self.db.session.commit()
def stack1d(*points): """Fill out the columns of matrix with a series of points. This is because ``np.hstack()`` will just make another 1D vector out of them and ``np.vstack()`` will put them in the rows. Args: points (Tuple[numpy.ndarray, ...]): Tuple of 1D points (i.e. arrays with shape ``(2,)``. Returns: numpy.ndarray: The array with each point in ``points`` as its columns. """ result = np.empty((2, len(points)), order="F") for index, point in enumerate(points): result[:, index] = point return result
def function[stack1d, parameter[]]: constant[Fill out the columns of matrix with a series of points. This is because ``np.hstack()`` will just make another 1D vector out of them and ``np.vstack()`` will put them in the rows. Args: points (Tuple[numpy.ndarray, ...]): Tuple of 1D points (i.e. arrays with shape ``(2,)``. Returns: numpy.ndarray: The array with each point in ``points`` as its columns. ] variable[result] assign[=] call[name[np].empty, parameter[tuple[[<ast.Constant object at 0x7da20e960f40>, <ast.Call object at 0x7da20e963550>]]]] for taget[tuple[[<ast.Name object at 0x7da18eb570a0>, <ast.Name object at 0x7da18eb55b10>]]] in starred[call[name[enumerate], parameter[name[points]]]] begin[:] call[name[result]][tuple[[<ast.Slice object at 0x7da18eb573d0>, <ast.Name object at 0x7da18eb55480>]]] assign[=] name[point] return[name[result]]
keyword[def] identifier[stack1d] (* identifier[points] ): literal[string] identifier[result] = identifier[np] . identifier[empty] (( literal[int] , identifier[len] ( identifier[points] )), identifier[order] = literal[string] ) keyword[for] identifier[index] , identifier[point] keyword[in] identifier[enumerate] ( identifier[points] ): identifier[result] [:, identifier[index] ]= identifier[point] keyword[return] identifier[result]
def stack1d(*points): """Fill out the columns of matrix with a series of points. This is because ``np.hstack()`` will just make another 1D vector out of them and ``np.vstack()`` will put them in the rows. Args: points (Tuple[numpy.ndarray, ...]): Tuple of 1D points (i.e. arrays with shape ``(2,)``. Returns: numpy.ndarray: The array with each point in ``points`` as its columns. """ result = np.empty((2, len(points)), order='F') for (index, point) in enumerate(points): result[:, index] = point # depends on [control=['for'], data=[]] return result
def custom_code(self, mask: str = '@###', char: str = '@', digit: str = '#') -> str: """Generate custom code using ascii uppercase and random integers. :param mask: Mask of code. :param char: Placeholder for characters. :param digit: Placeholder for digits. :return: Custom code. """ char_code = ord(char) digit_code = ord(digit) code = bytearray(len(mask)) def random_int(a: int, b: int) -> int: b = b - a return int(self.random() * b) + a _mask = mask.encode() for i, p in enumerate(_mask): if p == char_code: a = random_int(65, 91) # A-Z elif p == digit_code: a = random_int(48, 58) # 0-9 else: a = p code[i] = a return code.decode()
def function[custom_code, parameter[self, mask, char, digit]]: constant[Generate custom code using ascii uppercase and random integers. :param mask: Mask of code. :param char: Placeholder for characters. :param digit: Placeholder for digits. :return: Custom code. ] variable[char_code] assign[=] call[name[ord], parameter[name[char]]] variable[digit_code] assign[=] call[name[ord], parameter[name[digit]]] variable[code] assign[=] call[name[bytearray], parameter[call[name[len], parameter[name[mask]]]]] def function[random_int, parameter[a, b]]: variable[b] assign[=] binary_operation[name[b] - name[a]] return[binary_operation[call[name[int], parameter[binary_operation[call[name[self].random, parameter[]] * name[b]]]] + name[a]]] variable[_mask] assign[=] call[name[mask].encode, parameter[]] for taget[tuple[[<ast.Name object at 0x7da18f58cd30>, <ast.Name object at 0x7da18f58e680>]]] in starred[call[name[enumerate], parameter[name[_mask]]]] begin[:] if compare[name[p] equal[==] name[char_code]] begin[:] variable[a] assign[=] call[name[random_int], parameter[constant[65], constant[91]]] call[name[code]][name[i]] assign[=] name[a] return[call[name[code].decode, parameter[]]]
keyword[def] identifier[custom_code] ( identifier[self] , identifier[mask] : identifier[str] = literal[string] , identifier[char] : identifier[str] = literal[string] , identifier[digit] : identifier[str] = literal[string] )-> identifier[str] : literal[string] identifier[char_code] = identifier[ord] ( identifier[char] ) identifier[digit_code] = identifier[ord] ( identifier[digit] ) identifier[code] = identifier[bytearray] ( identifier[len] ( identifier[mask] )) keyword[def] identifier[random_int] ( identifier[a] : identifier[int] , identifier[b] : identifier[int] )-> identifier[int] : identifier[b] = identifier[b] - identifier[a] keyword[return] identifier[int] ( identifier[self] . identifier[random] ()* identifier[b] )+ identifier[a] identifier[_mask] = identifier[mask] . identifier[encode] () keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[_mask] ): keyword[if] identifier[p] == identifier[char_code] : identifier[a] = identifier[random_int] ( literal[int] , literal[int] ) keyword[elif] identifier[p] == identifier[digit_code] : identifier[a] = identifier[random_int] ( literal[int] , literal[int] ) keyword[else] : identifier[a] = identifier[p] identifier[code] [ identifier[i] ]= identifier[a] keyword[return] identifier[code] . identifier[decode] ()
def custom_code(self, mask: str='@###', char: str='@', digit: str='#') -> str: """Generate custom code using ascii uppercase and random integers. :param mask: Mask of code. :param char: Placeholder for characters. :param digit: Placeholder for digits. :return: Custom code. """ char_code = ord(char) digit_code = ord(digit) code = bytearray(len(mask)) def random_int(a: int, b: int) -> int: b = b - a return int(self.random() * b) + a _mask = mask.encode() for (i, p) in enumerate(_mask): if p == char_code: a = random_int(65, 91) # A-Z # depends on [control=['if'], data=[]] elif p == digit_code: a = random_int(48, 58) # 0-9 # depends on [control=['if'], data=[]] else: a = p code[i] = a # depends on [control=['for'], data=[]] return code.decode()
def check_confirmations_or_resend(self, use_open_peers=False, **kw): """ check if a tx is confirmed, else resend it. :param use_open_peers: select random peers fro api/peers endpoint """ if self.confirmations() == 0: self.send(use_open_peers, **kw)
def function[check_confirmations_or_resend, parameter[self, use_open_peers]]: constant[ check if a tx is confirmed, else resend it. :param use_open_peers: select random peers fro api/peers endpoint ] if compare[call[name[self].confirmations, parameter[]] equal[==] constant[0]] begin[:] call[name[self].send, parameter[name[use_open_peers]]]
keyword[def] identifier[check_confirmations_or_resend] ( identifier[self] , identifier[use_open_peers] = keyword[False] ,** identifier[kw] ): literal[string] keyword[if] identifier[self] . identifier[confirmations] ()== literal[int] : identifier[self] . identifier[send] ( identifier[use_open_peers] ,** identifier[kw] )
def check_confirmations_or_resend(self, use_open_peers=False, **kw): """ check if a tx is confirmed, else resend it. :param use_open_peers: select random peers fro api/peers endpoint """ if self.confirmations() == 0: self.send(use_open_peers, **kw) # depends on [control=['if'], data=[]]
def horizontal_padding(self, value): """ Setter for **self.__horizontal_padding** attribute. :param value: Attribute value. :type value: int """ if value is not None: assert type(value) is int, "'{0}' attribute: '{1}' type is not 'int'!".format("horizontal_padding", value) assert value >= 0, "'{0}' attribute: '{1}' need to be positive!".format("horizontal_padding", value) self.__horizontal_padding = value
def function[horizontal_padding, parameter[self, value]]: constant[ Setter for **self.__horizontal_padding** attribute. :param value: Attribute value. :type value: int ] if compare[name[value] is_not constant[None]] begin[:] assert[compare[call[name[type], parameter[name[value]]] is name[int]]] assert[compare[name[value] greater_or_equal[>=] constant[0]]] name[self].__horizontal_padding assign[=] name[value]
keyword[def] identifier[horizontal_padding] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[type] ( identifier[value] ) keyword[is] identifier[int] , literal[string] . identifier[format] ( literal[string] , identifier[value] ) keyword[assert] identifier[value] >= literal[int] , literal[string] . identifier[format] ( literal[string] , identifier[value] ) identifier[self] . identifier[__horizontal_padding] = identifier[value]
def horizontal_padding(self, value): """ Setter for **self.__horizontal_padding** attribute. :param value: Attribute value. :type value: int """ if value is not None: assert type(value) is int, "'{0}' attribute: '{1}' type is not 'int'!".format('horizontal_padding', value) assert value >= 0, "'{0}' attribute: '{1}' need to be positive!".format('horizontal_padding', value) # depends on [control=['if'], data=['value']] self.__horizontal_padding = value
def gaussian_filter(data, sigma=4., truncate = 4., normalize=True, res_g=None): """ blurs data with a gaussian kernel of given sigmas Parameters ---------- data: ndarray 2 or 3 dimensional array sigma: scalar or tuple the sigma of the gaussian truncate: float truncate the kernel after truncate*sigma normalize: bool uses a normalized kernel is true res_g: OCLArray used to store result if given Returns ------- blurred array """ if not len(data.shape) in [1, 2, 3]: raise ValueError("dim = %s not supported" % (len(data.shape))) if np.isscalar(sigma): sigma = [sigma] * data.ndim if any(tuple(s <= 0 for s in sigma)): raise ValueError("sigma = %s : all sigmas have to be positive!" % str(sigma)) if isinstance(data, OCLArray): return _gaussian_buf(data, sigma, res_g, normalize=normalize,truncate = truncate) elif isinstance(data, np.ndarray): return _gaussian_np(data, sigma, normalize=normalize,truncate = truncate) else: raise TypeError("unknown type (%s)" % (type(data)))
def function[gaussian_filter, parameter[data, sigma, truncate, normalize, res_g]]: constant[ blurs data with a gaussian kernel of given sigmas Parameters ---------- data: ndarray 2 or 3 dimensional array sigma: scalar or tuple the sigma of the gaussian truncate: float truncate the kernel after truncate*sigma normalize: bool uses a normalized kernel is true res_g: OCLArray used to store result if given Returns ------- blurred array ] if <ast.UnaryOp object at 0x7da2054a4e50> begin[:] <ast.Raise object at 0x7da2054a7340> if call[name[np].isscalar, parameter[name[sigma]]] begin[:] variable[sigma] assign[=] binary_operation[list[[<ast.Name object at 0x7da2054a7100>]] * name[data].ndim] if call[name[any], parameter[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da2054a6bf0>]]]] begin[:] <ast.Raise object at 0x7da2054a6d40> if call[name[isinstance], parameter[name[data], name[OCLArray]]] begin[:] return[call[name[_gaussian_buf], parameter[name[data], name[sigma], name[res_g]]]]
keyword[def] identifier[gaussian_filter] ( identifier[data] , identifier[sigma] = literal[int] , identifier[truncate] = literal[int] , identifier[normalize] = keyword[True] , identifier[res_g] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[len] ( identifier[data] . identifier[shape] ) keyword[in] [ literal[int] , literal[int] , literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] %( identifier[len] ( identifier[data] . identifier[shape] ))) keyword[if] identifier[np] . identifier[isscalar] ( identifier[sigma] ): identifier[sigma] =[ identifier[sigma] ]* identifier[data] . identifier[ndim] keyword[if] identifier[any] ( identifier[tuple] ( identifier[s] <= literal[int] keyword[for] identifier[s] keyword[in] identifier[sigma] )): keyword[raise] identifier[ValueError] ( literal[string] % identifier[str] ( identifier[sigma] )) keyword[if] identifier[isinstance] ( identifier[data] , identifier[OCLArray] ): keyword[return] identifier[_gaussian_buf] ( identifier[data] , identifier[sigma] , identifier[res_g] , identifier[normalize] = identifier[normalize] , identifier[truncate] = identifier[truncate] ) keyword[elif] identifier[isinstance] ( identifier[data] , identifier[np] . identifier[ndarray] ): keyword[return] identifier[_gaussian_np] ( identifier[data] , identifier[sigma] , identifier[normalize] = identifier[normalize] , identifier[truncate] = identifier[truncate] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] %( identifier[type] ( identifier[data] )))
def gaussian_filter(data, sigma=4.0, truncate=4.0, normalize=True, res_g=None): """ blurs data with a gaussian kernel of given sigmas Parameters ---------- data: ndarray 2 or 3 dimensional array sigma: scalar or tuple the sigma of the gaussian truncate: float truncate the kernel after truncate*sigma normalize: bool uses a normalized kernel is true res_g: OCLArray used to store result if given Returns ------- blurred array """ if not len(data.shape) in [1, 2, 3]: raise ValueError('dim = %s not supported' % len(data.shape)) # depends on [control=['if'], data=[]] if np.isscalar(sigma): sigma = [sigma] * data.ndim # depends on [control=['if'], data=[]] if any(tuple((s <= 0 for s in sigma))): raise ValueError('sigma = %s : all sigmas have to be positive!' % str(sigma)) # depends on [control=['if'], data=[]] if isinstance(data, OCLArray): return _gaussian_buf(data, sigma, res_g, normalize=normalize, truncate=truncate) # depends on [control=['if'], data=[]] elif isinstance(data, np.ndarray): return _gaussian_np(data, sigma, normalize=normalize, truncate=truncate) # depends on [control=['if'], data=[]] else: raise TypeError('unknown type (%s)' % type(data))
def to_er7(self, encoding_chars=None, trailing_children=False): """ Return the ER7-encoded string :type encoding_chars: ``dict`` :param encoding_chars: a dictionary containing the encoding chars or None to use the default (see :func:`get_default_encoding_chars <hl7apy.get_default_encoding_chars>`) :type trailing_children: ``bool`` :param trailing_children: if ``True``, trailing children will be added even if their value is None :return: the ER7-encoded string >>> pid = Segment("PID") >>> pid.pid_1 = '1' >>> pid.pid_5 = "EVERYMAN^ADAM" >>> print(pid.to_er7()) PID|1||||EVERYMAN^ADAM """ if encoding_chars is None: encoding_chars = self.encoding_chars separator = encoding_chars.get('FIELD') repetition = encoding_chars.get('REPETITION') s = [self.name] for child in self._get_children(trailing_children): if child is not None: s.append(repetition.join(item.to_er7(encoding_chars, trailing_children) for item in child)) else: try: s.append(self._handle_empty_children(encoding_chars)) except NotImplementedError: pass if self.name == 'MSH' and len(s) > 1: s.pop(1) return separator.join(s)
def function[to_er7, parameter[self, encoding_chars, trailing_children]]: constant[ Return the ER7-encoded string :type encoding_chars: ``dict`` :param encoding_chars: a dictionary containing the encoding chars or None to use the default (see :func:`get_default_encoding_chars <hl7apy.get_default_encoding_chars>`) :type trailing_children: ``bool`` :param trailing_children: if ``True``, trailing children will be added even if their value is None :return: the ER7-encoded string >>> pid = Segment("PID") >>> pid.pid_1 = '1' >>> pid.pid_5 = "EVERYMAN^ADAM" >>> print(pid.to_er7()) PID|1||||EVERYMAN^ADAM ] if compare[name[encoding_chars] is constant[None]] begin[:] variable[encoding_chars] assign[=] name[self].encoding_chars variable[separator] assign[=] call[name[encoding_chars].get, parameter[constant[FIELD]]] variable[repetition] assign[=] call[name[encoding_chars].get, parameter[constant[REPETITION]]] variable[s] assign[=] list[[<ast.Attribute object at 0x7da1b0d0d5d0>]] for taget[name[child]] in starred[call[name[self]._get_children, parameter[name[trailing_children]]]] begin[:] if compare[name[child] is_not constant[None]] begin[:] call[name[s].append, parameter[call[name[repetition].join, parameter[<ast.GeneratorExp object at 0x7da1b0d0f550>]]]] if <ast.BoolOp object at 0x7da1b0d0d5a0> begin[:] call[name[s].pop, parameter[constant[1]]] return[call[name[separator].join, parameter[name[s]]]]
keyword[def] identifier[to_er7] ( identifier[self] , identifier[encoding_chars] = keyword[None] , identifier[trailing_children] = keyword[False] ): literal[string] keyword[if] identifier[encoding_chars] keyword[is] keyword[None] : identifier[encoding_chars] = identifier[self] . identifier[encoding_chars] identifier[separator] = identifier[encoding_chars] . identifier[get] ( literal[string] ) identifier[repetition] = identifier[encoding_chars] . identifier[get] ( literal[string] ) identifier[s] =[ identifier[self] . identifier[name] ] keyword[for] identifier[child] keyword[in] identifier[self] . identifier[_get_children] ( identifier[trailing_children] ): keyword[if] identifier[child] keyword[is] keyword[not] keyword[None] : identifier[s] . identifier[append] ( identifier[repetition] . identifier[join] ( identifier[item] . identifier[to_er7] ( identifier[encoding_chars] , identifier[trailing_children] ) keyword[for] identifier[item] keyword[in] identifier[child] )) keyword[else] : keyword[try] : identifier[s] . identifier[append] ( identifier[self] . identifier[_handle_empty_children] ( identifier[encoding_chars] )) keyword[except] identifier[NotImplementedError] : keyword[pass] keyword[if] identifier[self] . identifier[name] == literal[string] keyword[and] identifier[len] ( identifier[s] )> literal[int] : identifier[s] . identifier[pop] ( literal[int] ) keyword[return] identifier[separator] . identifier[join] ( identifier[s] )
def to_er7(self, encoding_chars=None, trailing_children=False): """ Return the ER7-encoded string :type encoding_chars: ``dict`` :param encoding_chars: a dictionary containing the encoding chars or None to use the default (see :func:`get_default_encoding_chars <hl7apy.get_default_encoding_chars>`) :type trailing_children: ``bool`` :param trailing_children: if ``True``, trailing children will be added even if their value is None :return: the ER7-encoded string >>> pid = Segment("PID") >>> pid.pid_1 = '1' >>> pid.pid_5 = "EVERYMAN^ADAM" >>> print(pid.to_er7()) PID|1||||EVERYMAN^ADAM """ if encoding_chars is None: encoding_chars = self.encoding_chars # depends on [control=['if'], data=['encoding_chars']] separator = encoding_chars.get('FIELD') repetition = encoding_chars.get('REPETITION') s = [self.name] for child in self._get_children(trailing_children): if child is not None: s.append(repetition.join((item.to_er7(encoding_chars, trailing_children) for item in child))) # depends on [control=['if'], data=['child']] else: try: s.append(self._handle_empty_children(encoding_chars)) # depends on [control=['try'], data=[]] except NotImplementedError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['child']] if self.name == 'MSH' and len(s) > 1: s.pop(1) # depends on [control=['if'], data=[]] return separator.join(s)
def _format_argument(arg): """Format the output of a `click.Argument`.""" yield '.. option:: {}'.format(arg.human_readable_name) yield '' yield _indent('{} argument{}'.format( 'Required' if arg.required else 'Optional', '(s)' if arg.nargs != 1 else ''))
def function[_format_argument, parameter[arg]]: constant[Format the output of a `click.Argument`.] <ast.Yield object at 0x7da1b0e14820> <ast.Yield object at 0x7da1b0e179a0> <ast.Yield object at 0x7da1b0e15e40>
keyword[def] identifier[_format_argument] ( identifier[arg] ): literal[string] keyword[yield] literal[string] . identifier[format] ( identifier[arg] . identifier[human_readable_name] ) keyword[yield] literal[string] keyword[yield] identifier[_indent] ( literal[string] . identifier[format] ( literal[string] keyword[if] identifier[arg] . identifier[required] keyword[else] literal[string] , literal[string] keyword[if] identifier[arg] . identifier[nargs] != literal[int] keyword[else] literal[string] ))
def _format_argument(arg): """Format the output of a `click.Argument`.""" yield '.. option:: {}'.format(arg.human_readable_name) yield '' yield _indent('{} argument{}'.format('Required' if arg.required else 'Optional', '(s)' if arg.nargs != 1 else ''))
def cleanup(self): """ Cleanup resources used during execution """ if self.local_port is not None: logger.debug(("Stopping ssh tunnel {0}:{1}:{2} for " "{3}@{4}".format(self.local_port, self.remote_address, self.remote_port, self.username, self.address))) if self.forward is not None: self.forward.stop() self.forward.join() if self.transport is not None: self.transport.close()
def function[cleanup, parameter[self]]: constant[ Cleanup resources used during execution ] if compare[name[self].local_port is_not constant[None]] begin[:] call[name[logger].debug, parameter[call[constant[Stopping ssh tunnel {0}:{1}:{2} for {3}@{4}].format, parameter[name[self].local_port, name[self].remote_address, name[self].remote_port, name[self].username, name[self].address]]]] if compare[name[self].forward is_not constant[None]] begin[:] call[name[self].forward.stop, parameter[]] call[name[self].forward.join, parameter[]] if compare[name[self].transport is_not constant[None]] begin[:] call[name[self].transport.close, parameter[]]
keyword[def] identifier[cleanup] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[local_port] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[debug] (( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[local_port] , identifier[self] . identifier[remote_address] , identifier[self] . identifier[remote_port] , identifier[self] . identifier[username] , identifier[self] . identifier[address] ))) keyword[if] identifier[self] . identifier[forward] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[forward] . identifier[stop] () identifier[self] . identifier[forward] . identifier[join] () keyword[if] identifier[self] . identifier[transport] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[transport] . identifier[close] ()
def cleanup(self): """ Cleanup resources used during execution """ if self.local_port is not None: logger.debug('Stopping ssh tunnel {0}:{1}:{2} for {3}@{4}'.format(self.local_port, self.remote_address, self.remote_port, self.username, self.address)) if self.forward is not None: self.forward.stop() self.forward.join() # depends on [control=['if'], data=[]] if self.transport is not None: self.transport.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def _set_monitor(self, v, load=False): """ Setter method for monitor, mapped from YANG variable /overlay_gateway/monitor (list) If this variable is read-only (config: false) in the source YANG file, then _set_monitor is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_monitor() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("session",monitor.monitor, yang_name="monitor", rest_name="monitor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='session', extensions={u'tailf-common': {u'info': u'Configure SPAN for the tunnels of this gateway', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'TunnelSpanCallpoint'}}), is_container='list', yang_name="monitor", rest_name="monitor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure SPAN for the tunnels of this gateway', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'TunnelSpanCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """monitor must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("session",monitor.monitor, yang_name="monitor", rest_name="monitor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='session', extensions={u'tailf-common': {u'info': u'Configure SPAN for the tunnels of this gateway', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'TunnelSpanCallpoint'}}), is_container='list', yang_name="monitor", rest_name="monitor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure SPAN for the tunnels of this gateway', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'TunnelSpanCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='list', is_config=True)""", }) self.__monitor = t if hasattr(self, '_set'): self._set()
def function[_set_monitor, parameter[self, v, load]]: constant[ Setter method for monitor, mapped from YANG variable /overlay_gateway/monitor (list) If this variable is read-only (config: false) in the source YANG file, then _set_monitor is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_monitor() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c6c4280> name[self].__monitor assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_monitor] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[monitor] . identifier[monitor] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__monitor] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_monitor(self, v, load=False): """ Setter method for monitor, mapped from YANG variable /overlay_gateway/monitor (list) If this variable is read-only (config: false) in the source YANG file, then _set_monitor is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_monitor() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('session', monitor.monitor, yang_name='monitor', rest_name='monitor', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='session', extensions={u'tailf-common': {u'info': u'Configure SPAN for the tunnels of this gateway', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'TunnelSpanCallpoint'}}), is_container='list', yang_name='monitor', rest_name='monitor', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure SPAN for the tunnels of this gateway', u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'TunnelSpanCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'monitor must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("session",monitor.monitor, yang_name="monitor", rest_name="monitor", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'session\', extensions={u\'tailf-common\': {u\'info\': u\'Configure SPAN for the tunnels of this gateway\', u\'cli-suppress-mode\': None, u\'cli-incomplete-no\': None, u\'cli-suppress-list-no\': None, u\'cli-compact-syntax\': None, u\'cli-sequence-commands\': None, u\'cli-incomplete-command\': None, u\'callpoint\': u\'TunnelSpanCallpoint\'}}), is_container=\'list\', yang_name="monitor", rest_name="monitor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure SPAN for the tunnels of this gateway\', u\'cli-suppress-mode\': None, u\'cli-incomplete-no\': None, u\'cli-suppress-list-no\': None, u\'cli-compact-syntax\': None, u\'cli-sequence-commands\': None, u\'cli-incomplete-command\': None, u\'callpoint\': u\'TunnelSpanCallpoint\'}}, namespace=\'urn:brocade.com:mgmt:brocade-tunnels\', defining_module=\'brocade-tunnels\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__monitor = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def find_saas_perform_iops_price(package, size, iops): """Find the SaaS IOPS price for the specified size and iops :param package: The Storage As A Service product package :param size: The volume size for which a price is desired :param iops: The number of IOPS for which a price is desired :return: Returns the price for the size and IOPS, or an error if not found """ for item in package['items']: if 'itemCategory' not in item\ or 'categoryCode' not in item['itemCategory']\ or item['itemCategory']['categoryCode']\ != 'performance_storage_iops': continue if 'capacityMinimum' not in item or 'capacityMaximum' not in item: continue capacity_minimum = int(item['capacityMinimum']) capacity_maximum = int(item['capacityMaximum']) if iops < capacity_minimum or iops > capacity_maximum: continue price_id = _find_price_id(item['prices'], 'performance_storage_iops', 'STORAGE_SPACE', size) if price_id: return price_id raise ValueError("Could not find price for iops for the given volume")
def function[find_saas_perform_iops_price, parameter[package, size, iops]]: constant[Find the SaaS IOPS price for the specified size and iops :param package: The Storage As A Service product package :param size: The volume size for which a price is desired :param iops: The number of IOPS for which a price is desired :return: Returns the price for the size and IOPS, or an error if not found ] for taget[name[item]] in starred[call[name[package]][constant[items]]] begin[:] if <ast.BoolOp object at 0x7da18c4cfeb0> begin[:] continue if <ast.BoolOp object at 0x7da18c4cc910> begin[:] continue variable[capacity_minimum] assign[=] call[name[int], parameter[call[name[item]][constant[capacityMinimum]]]] variable[capacity_maximum] assign[=] call[name[int], parameter[call[name[item]][constant[capacityMaximum]]]] if <ast.BoolOp object at 0x7da18c4cd150> begin[:] continue variable[price_id] assign[=] call[name[_find_price_id], parameter[call[name[item]][constant[prices]], constant[performance_storage_iops], constant[STORAGE_SPACE], name[size]]] if name[price_id] begin[:] return[name[price_id]] <ast.Raise object at 0x7da18c4cf9d0>
keyword[def] identifier[find_saas_perform_iops_price] ( identifier[package] , identifier[size] , identifier[iops] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[package] [ literal[string] ]: keyword[if] literal[string] keyword[not] keyword[in] identifier[item] keyword[or] literal[string] keyword[not] keyword[in] identifier[item] [ literal[string] ] keyword[or] identifier[item] [ literal[string] ][ literal[string] ]!= literal[string] : keyword[continue] keyword[if] literal[string] keyword[not] keyword[in] identifier[item] keyword[or] literal[string] keyword[not] keyword[in] identifier[item] : keyword[continue] identifier[capacity_minimum] = identifier[int] ( identifier[item] [ literal[string] ]) identifier[capacity_maximum] = identifier[int] ( identifier[item] [ literal[string] ]) keyword[if] identifier[iops] < identifier[capacity_minimum] keyword[or] identifier[iops] > identifier[capacity_maximum] : keyword[continue] identifier[price_id] = identifier[_find_price_id] ( identifier[item] [ literal[string] ], literal[string] , literal[string] , identifier[size] ) keyword[if] identifier[price_id] : keyword[return] identifier[price_id] keyword[raise] identifier[ValueError] ( literal[string] )
def find_saas_perform_iops_price(package, size, iops): """Find the SaaS IOPS price for the specified size and iops :param package: The Storage As A Service product package :param size: The volume size for which a price is desired :param iops: The number of IOPS for which a price is desired :return: Returns the price for the size and IOPS, or an error if not found """ for item in package['items']: if 'itemCategory' not in item or 'categoryCode' not in item['itemCategory'] or item['itemCategory']['categoryCode'] != 'performance_storage_iops': continue # depends on [control=['if'], data=[]] if 'capacityMinimum' not in item or 'capacityMaximum' not in item: continue # depends on [control=['if'], data=[]] capacity_minimum = int(item['capacityMinimum']) capacity_maximum = int(item['capacityMaximum']) if iops < capacity_minimum or iops > capacity_maximum: continue # depends on [control=['if'], data=[]] price_id = _find_price_id(item['prices'], 'performance_storage_iops', 'STORAGE_SPACE', size) if price_id: return price_id # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] raise ValueError('Could not find price for iops for the given volume')
def principal_inertia_transform(self): """ A transform which moves the current mesh so the principal inertia vectors are on the X,Y, and Z axis, and the centroid is at the origin. Returns ---------- transform : (4, 4) float Homogenous transformation matrix """ order = np.argsort(self.principal_inertia_components)[1:][::-1] vectors = self.principal_inertia_vectors[order] vectors = np.vstack((vectors, np.cross(*vectors))) transform = np.eye(4) transform[:3, :3] = vectors transform = transformations.transform_around( matrix=transform, point=self.centroid) transform[:3, 3] -= self.centroid return transform
def function[principal_inertia_transform, parameter[self]]: constant[ A transform which moves the current mesh so the principal inertia vectors are on the X,Y, and Z axis, and the centroid is at the origin. Returns ---------- transform : (4, 4) float Homogenous transformation matrix ] variable[order] assign[=] call[call[call[name[np].argsort, parameter[name[self].principal_inertia_components]]][<ast.Slice object at 0x7da20c76d180>]][<ast.Slice object at 0x7da20c76e800>] variable[vectors] assign[=] call[name[self].principal_inertia_vectors][name[order]] variable[vectors] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da20c76f370>, <ast.Call object at 0x7da20c76d900>]]]] variable[transform] assign[=] call[name[np].eye, parameter[constant[4]]] call[name[transform]][tuple[[<ast.Slice object at 0x7da20c76ece0>, <ast.Slice object at 0x7da20c76f6d0>]]] assign[=] name[vectors] variable[transform] assign[=] call[name[transformations].transform_around, parameter[]] <ast.AugAssign object at 0x7da20c7c9810> return[name[transform]]
keyword[def] identifier[principal_inertia_transform] ( identifier[self] ): literal[string] identifier[order] = identifier[np] . identifier[argsort] ( identifier[self] . identifier[principal_inertia_components] )[ literal[int] :][::- literal[int] ] identifier[vectors] = identifier[self] . identifier[principal_inertia_vectors] [ identifier[order] ] identifier[vectors] = identifier[np] . identifier[vstack] (( identifier[vectors] , identifier[np] . identifier[cross] (* identifier[vectors] ))) identifier[transform] = identifier[np] . identifier[eye] ( literal[int] ) identifier[transform] [: literal[int] ,: literal[int] ]= identifier[vectors] identifier[transform] = identifier[transformations] . identifier[transform_around] ( identifier[matrix] = identifier[transform] , identifier[point] = identifier[self] . identifier[centroid] ) identifier[transform] [: literal[int] , literal[int] ]-= identifier[self] . identifier[centroid] keyword[return] identifier[transform]
def principal_inertia_transform(self): """ A transform which moves the current mesh so the principal inertia vectors are on the X,Y, and Z axis, and the centroid is at the origin. Returns ---------- transform : (4, 4) float Homogenous transformation matrix """ order = np.argsort(self.principal_inertia_components)[1:][::-1] vectors = self.principal_inertia_vectors[order] vectors = np.vstack((vectors, np.cross(*vectors))) transform = np.eye(4) transform[:3, :3] = vectors transform = transformations.transform_around(matrix=transform, point=self.centroid) transform[:3, 3] -= self.centroid return transform
def create(self): """Creates a new database""" self.db_attrs = self.consul.create_db( self.instance_name, self.instance_type, self.admin_username, self.admin_password, db_name=self.db_name, storage_size_gb=self.storage_size, timeout_s=self.launch_timeout_s, )
def function[create, parameter[self]]: constant[Creates a new database] name[self].db_attrs assign[=] call[name[self].consul.create_db, parameter[name[self].instance_name, name[self].instance_type, name[self].admin_username, name[self].admin_password]]
keyword[def] identifier[create] ( identifier[self] ): literal[string] identifier[self] . identifier[db_attrs] = identifier[self] . identifier[consul] . identifier[create_db] ( identifier[self] . identifier[instance_name] , identifier[self] . identifier[instance_type] , identifier[self] . identifier[admin_username] , identifier[self] . identifier[admin_password] , identifier[db_name] = identifier[self] . identifier[db_name] , identifier[storage_size_gb] = identifier[self] . identifier[storage_size] , identifier[timeout_s] = identifier[self] . identifier[launch_timeout_s] , )
def create(self): """Creates a new database""" self.db_attrs = self.consul.create_db(self.instance_name, self.instance_type, self.admin_username, self.admin_password, db_name=self.db_name, storage_size_gb=self.storage_size, timeout_s=self.launch_timeout_s)
def main(args_list=None): """ Script which loads variants and annotates them with overlapping genes and predicted coding effects. Example usage: varcode --vcf mutect.vcf \ --vcf strelka.vcf \ --maf tcga_brca.maf \ --variant chr1 498584 C G \ --json-variants more_variants.json """ print_version_info() if args_list is None: args_list = sys.argv[1:] args = arg_parser.parse_args(args_list) variants = variant_collection_from_args(args) effects = variants.effects() if args.only_coding: effects = effects.drop_silent_and_noncoding() if args.one_per_variant: variant_to_effect_dict = effects.top_priority_effect_per_variant() effects = effects.clone_with_new_elements(list(variant_to_effect_dict.values())) effects_dataframe = effects.to_dataframe() logger.info('\n%s', effects) if args.output_csv: effects_dataframe.to_csv(args.output_csv, index=False)
def function[main, parameter[args_list]]: constant[ Script which loads variants and annotates them with overlapping genes and predicted coding effects. Example usage: varcode --vcf mutect.vcf --vcf strelka.vcf --maf tcga_brca.maf --variant chr1 498584 C G --json-variants more_variants.json ] call[name[print_version_info], parameter[]] if compare[name[args_list] is constant[None]] begin[:] variable[args_list] assign[=] call[name[sys].argv][<ast.Slice object at 0x7da1b0534730>] variable[args] assign[=] call[name[arg_parser].parse_args, parameter[name[args_list]]] variable[variants] assign[=] call[name[variant_collection_from_args], parameter[name[args]]] variable[effects] assign[=] call[name[variants].effects, parameter[]] if name[args].only_coding begin[:] variable[effects] assign[=] call[name[effects].drop_silent_and_noncoding, parameter[]] if name[args].one_per_variant begin[:] variable[variant_to_effect_dict] assign[=] call[name[effects].top_priority_effect_per_variant, parameter[]] variable[effects] assign[=] call[name[effects].clone_with_new_elements, parameter[call[name[list], parameter[call[name[variant_to_effect_dict].values, parameter[]]]]]] variable[effects_dataframe] assign[=] call[name[effects].to_dataframe, parameter[]] call[name[logger].info, parameter[constant[ %s], name[effects]]] if name[args].output_csv begin[:] call[name[effects_dataframe].to_csv, parameter[name[args].output_csv]]
keyword[def] identifier[main] ( identifier[args_list] = keyword[None] ): literal[string] identifier[print_version_info] () keyword[if] identifier[args_list] keyword[is] keyword[None] : identifier[args_list] = identifier[sys] . identifier[argv] [ literal[int] :] identifier[args] = identifier[arg_parser] . identifier[parse_args] ( identifier[args_list] ) identifier[variants] = identifier[variant_collection_from_args] ( identifier[args] ) identifier[effects] = identifier[variants] . identifier[effects] () keyword[if] identifier[args] . identifier[only_coding] : identifier[effects] = identifier[effects] . identifier[drop_silent_and_noncoding] () keyword[if] identifier[args] . identifier[one_per_variant] : identifier[variant_to_effect_dict] = identifier[effects] . identifier[top_priority_effect_per_variant] () identifier[effects] = identifier[effects] . identifier[clone_with_new_elements] ( identifier[list] ( identifier[variant_to_effect_dict] . identifier[values] ())) identifier[effects_dataframe] = identifier[effects] . identifier[to_dataframe] () identifier[logger] . identifier[info] ( literal[string] , identifier[effects] ) keyword[if] identifier[args] . identifier[output_csv] : identifier[effects_dataframe] . identifier[to_csv] ( identifier[args] . identifier[output_csv] , identifier[index] = keyword[False] )
def main(args_list=None): """ Script which loads variants and annotates them with overlapping genes and predicted coding effects. Example usage: varcode --vcf mutect.vcf --vcf strelka.vcf --maf tcga_brca.maf --variant chr1 498584 C G --json-variants more_variants.json """ print_version_info() if args_list is None: args_list = sys.argv[1:] # depends on [control=['if'], data=['args_list']] args = arg_parser.parse_args(args_list) variants = variant_collection_from_args(args) effects = variants.effects() if args.only_coding: effects = effects.drop_silent_and_noncoding() # depends on [control=['if'], data=[]] if args.one_per_variant: variant_to_effect_dict = effects.top_priority_effect_per_variant() effects = effects.clone_with_new_elements(list(variant_to_effect_dict.values())) # depends on [control=['if'], data=[]] effects_dataframe = effects.to_dataframe() logger.info('\n%s', effects) if args.output_csv: effects_dataframe.to_csv(args.output_csv, index=False) # depends on [control=['if'], data=[]]
def _get_http_proxy_url(): ''' Returns the http_proxy_url if proxy_username, proxy_password, proxy_host, and proxy_port config values are set. Returns a string. ''' http_proxy_url = '' host = __salt__['config.option']('proxy_host') port = __salt__['config.option']('proxy_port') username = __salt__['config.option']('proxy_username') password = __salt__['config.option']('proxy_password') # Set http_proxy_url for use in various internet facing actions...eg apt-key adv if host and port: if username and password: http_proxy_url = 'http://{0}:{1}@{2}:{3}'.format( username, password, host, port ) else: http_proxy_url = 'http://{0}:{1}'.format( host, port ) return http_proxy_url
def function[_get_http_proxy_url, parameter[]]: constant[ Returns the http_proxy_url if proxy_username, proxy_password, proxy_host, and proxy_port config values are set. Returns a string. ] variable[http_proxy_url] assign[=] constant[] variable[host] assign[=] call[call[name[__salt__]][constant[config.option]], parameter[constant[proxy_host]]] variable[port] assign[=] call[call[name[__salt__]][constant[config.option]], parameter[constant[proxy_port]]] variable[username] assign[=] call[call[name[__salt__]][constant[config.option]], parameter[constant[proxy_username]]] variable[password] assign[=] call[call[name[__salt__]][constant[config.option]], parameter[constant[proxy_password]]] if <ast.BoolOp object at 0x7da20c6a9ab0> begin[:] if <ast.BoolOp object at 0x7da20c6ab280> begin[:] variable[http_proxy_url] assign[=] call[constant[http://{0}:{1}@{2}:{3}].format, parameter[name[username], name[password], name[host], name[port]]] return[name[http_proxy_url]]
keyword[def] identifier[_get_http_proxy_url] (): literal[string] identifier[http_proxy_url] = literal[string] identifier[host] = identifier[__salt__] [ literal[string] ]( literal[string] ) identifier[port] = identifier[__salt__] [ literal[string] ]( literal[string] ) identifier[username] = identifier[__salt__] [ literal[string] ]( literal[string] ) identifier[password] = identifier[__salt__] [ literal[string] ]( literal[string] ) keyword[if] identifier[host] keyword[and] identifier[port] : keyword[if] identifier[username] keyword[and] identifier[password] : identifier[http_proxy_url] = literal[string] . identifier[format] ( identifier[username] , identifier[password] , identifier[host] , identifier[port] ) keyword[else] : identifier[http_proxy_url] = literal[string] . identifier[format] ( identifier[host] , identifier[port] ) keyword[return] identifier[http_proxy_url]
def _get_http_proxy_url(): """ Returns the http_proxy_url if proxy_username, proxy_password, proxy_host, and proxy_port config values are set. Returns a string. """ http_proxy_url = '' host = __salt__['config.option']('proxy_host') port = __salt__['config.option']('proxy_port') username = __salt__['config.option']('proxy_username') password = __salt__['config.option']('proxy_password') # Set http_proxy_url for use in various internet facing actions...eg apt-key adv if host and port: if username and password: http_proxy_url = 'http://{0}:{1}@{2}:{3}'.format(username, password, host, port) # depends on [control=['if'], data=[]] else: http_proxy_url = 'http://{0}:{1}'.format(host, port) # depends on [control=['if'], data=[]] return http_proxy_url
def set_value(self, value: str): """ Sets the displayed digits based on the value string. :param value: a string containing an integer or float value :return: None """ [digit.clear() for digit in self._digits] grouped = self._group(value) # return the parts, reversed digits = self._digits[::-1] # reverse the digits # fill from right to left has_period = False for i, digit_value in enumerate(grouped): try: if has_period: digits[i].set_value(digit_value + '.') has_period = False elif grouped[i] == '.': has_period = True else: digits[i].set_value(digit_value) except IndexError: raise ValueError('the value "{}" contains too ' 'many digits'.format(value))
def function[set_value, parameter[self, value]]: constant[ Sets the displayed digits based on the value string. :param value: a string containing an integer or float value :return: None ] <ast.ListComp object at 0x7da1b0f45060> variable[grouped] assign[=] call[name[self]._group, parameter[name[value]]] variable[digits] assign[=] call[name[self]._digits][<ast.Slice object at 0x7da1b0f450c0>] variable[has_period] assign[=] constant[False] for taget[tuple[[<ast.Name object at 0x7da1b0f45090>, <ast.Name object at 0x7da1b0f47640>]]] in starred[call[name[enumerate], parameter[name[grouped]]]] begin[:] <ast.Try object at 0x7da1b0f46740>
keyword[def] identifier[set_value] ( identifier[self] , identifier[value] : identifier[str] ): literal[string] [ identifier[digit] . identifier[clear] () keyword[for] identifier[digit] keyword[in] identifier[self] . identifier[_digits] ] identifier[grouped] = identifier[self] . identifier[_group] ( identifier[value] ) identifier[digits] = identifier[self] . identifier[_digits] [::- literal[int] ] identifier[has_period] = keyword[False] keyword[for] identifier[i] , identifier[digit_value] keyword[in] identifier[enumerate] ( identifier[grouped] ): keyword[try] : keyword[if] identifier[has_period] : identifier[digits] [ identifier[i] ]. identifier[set_value] ( identifier[digit_value] + literal[string] ) identifier[has_period] = keyword[False] keyword[elif] identifier[grouped] [ identifier[i] ]== literal[string] : identifier[has_period] = keyword[True] keyword[else] : identifier[digits] [ identifier[i] ]. identifier[set_value] ( identifier[digit_value] ) keyword[except] identifier[IndexError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] ))
def set_value(self, value: str): """ Sets the displayed digits based on the value string. :param value: a string containing an integer or float value :return: None """ [digit.clear() for digit in self._digits] grouped = self._group(value) # return the parts, reversed digits = self._digits[::-1] # reverse the digits # fill from right to left has_period = False for (i, digit_value) in enumerate(grouped): try: if has_period: digits[i].set_value(digit_value + '.') has_period = False # depends on [control=['if'], data=[]] elif grouped[i] == '.': has_period = True # depends on [control=['if'], data=[]] else: digits[i].set_value(digit_value) # depends on [control=['try'], data=[]] except IndexError: raise ValueError('the value "{}" contains too many digits'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
def lon_lat_bins(bb, coord_bin_width): """ Define bin edges for disaggregation histograms. Given bins data as provided by :func:`collect_bin_data`, this function finds edges of histograms, taking into account maximum and minimum values of magnitude, distance and coordinates as well as requested sizes/numbers of bins. """ west, south, east, north = bb west = numpy.floor(west / coord_bin_width) * coord_bin_width east = numpy.ceil(east / coord_bin_width) * coord_bin_width lon_extent = get_longitudinal_extent(west, east) lon_bins, _, _ = npoints_between( west, 0, 0, east, 0, 0, numpy.round(lon_extent / coord_bin_width + 1)) lat_bins = coord_bin_width * numpy.arange( int(numpy.floor(south / coord_bin_width)), int(numpy.ceil(north / coord_bin_width) + 1)) return lon_bins, lat_bins
def function[lon_lat_bins, parameter[bb, coord_bin_width]]: constant[ Define bin edges for disaggregation histograms. Given bins data as provided by :func:`collect_bin_data`, this function finds edges of histograms, taking into account maximum and minimum values of magnitude, distance and coordinates as well as requested sizes/numbers of bins. ] <ast.Tuple object at 0x7da1b133e170> assign[=] name[bb] variable[west] assign[=] binary_operation[call[name[numpy].floor, parameter[binary_operation[name[west] / name[coord_bin_width]]]] * name[coord_bin_width]] variable[east] assign[=] binary_operation[call[name[numpy].ceil, parameter[binary_operation[name[east] / name[coord_bin_width]]]] * name[coord_bin_width]] variable[lon_extent] assign[=] call[name[get_longitudinal_extent], parameter[name[west], name[east]]] <ast.Tuple object at 0x7da1b133d900> assign[=] call[name[npoints_between], parameter[name[west], constant[0], constant[0], name[east], constant[0], constant[0], call[name[numpy].round, parameter[binary_operation[binary_operation[name[lon_extent] / name[coord_bin_width]] + constant[1]]]]]] variable[lat_bins] assign[=] binary_operation[name[coord_bin_width] * call[name[numpy].arange, parameter[call[name[int], parameter[call[name[numpy].floor, parameter[binary_operation[name[south] / name[coord_bin_width]]]]]], call[name[int], parameter[binary_operation[call[name[numpy].ceil, parameter[binary_operation[name[north] / name[coord_bin_width]]]] + constant[1]]]]]]] return[tuple[[<ast.Name object at 0x7da1b133e110>, <ast.Name object at 0x7da1b133d4e0>]]]
keyword[def] identifier[lon_lat_bins] ( identifier[bb] , identifier[coord_bin_width] ): literal[string] identifier[west] , identifier[south] , identifier[east] , identifier[north] = identifier[bb] identifier[west] = identifier[numpy] . identifier[floor] ( identifier[west] / identifier[coord_bin_width] )* identifier[coord_bin_width] identifier[east] = identifier[numpy] . identifier[ceil] ( identifier[east] / identifier[coord_bin_width] )* identifier[coord_bin_width] identifier[lon_extent] = identifier[get_longitudinal_extent] ( identifier[west] , identifier[east] ) identifier[lon_bins] , identifier[_] , identifier[_] = identifier[npoints_between] ( identifier[west] , literal[int] , literal[int] , identifier[east] , literal[int] , literal[int] , identifier[numpy] . identifier[round] ( identifier[lon_extent] / identifier[coord_bin_width] + literal[int] )) identifier[lat_bins] = identifier[coord_bin_width] * identifier[numpy] . identifier[arange] ( identifier[int] ( identifier[numpy] . identifier[floor] ( identifier[south] / identifier[coord_bin_width] )), identifier[int] ( identifier[numpy] . identifier[ceil] ( identifier[north] / identifier[coord_bin_width] )+ literal[int] )) keyword[return] identifier[lon_bins] , identifier[lat_bins]
def lon_lat_bins(bb, coord_bin_width): """ Define bin edges for disaggregation histograms. Given bins data as provided by :func:`collect_bin_data`, this function finds edges of histograms, taking into account maximum and minimum values of magnitude, distance and coordinates as well as requested sizes/numbers of bins. """ (west, south, east, north) = bb west = numpy.floor(west / coord_bin_width) * coord_bin_width east = numpy.ceil(east / coord_bin_width) * coord_bin_width lon_extent = get_longitudinal_extent(west, east) (lon_bins, _, _) = npoints_between(west, 0, 0, east, 0, 0, numpy.round(lon_extent / coord_bin_width + 1)) lat_bins = coord_bin_width * numpy.arange(int(numpy.floor(south / coord_bin_width)), int(numpy.ceil(north / coord_bin_width) + 1)) return (lon_bins, lat_bins)
def getMessage(self): """ Return the message for this LogRecord. Return the message for this LogRecord after merging any user-supplied \ arguments with the message. """ if isinstance(self.msg, numpy.ndarray): msg = self.array2string(self.msg) else: msg = str(self.msg) if self.args: a2s = self.array2string if isinstance(self.args, Dict): args = {k: (a2s(v) if isinstance(v, numpy.ndarray) else v) for (k, v) in self.args.items()} elif isinstance(self.args, Sequence): args = tuple((a2s(a) if isinstance(a, numpy.ndarray) else a) for a in self.args) else: raise TypeError("Unexpected input '%s' with type '%s'" % (self.args, type(self.args))) msg = msg % args return msg
def function[getMessage, parameter[self]]: constant[ Return the message for this LogRecord. Return the message for this LogRecord after merging any user-supplied arguments with the message. ] if call[name[isinstance], parameter[name[self].msg, name[numpy].ndarray]] begin[:] variable[msg] assign[=] call[name[self].array2string, parameter[name[self].msg]] if name[self].args begin[:] variable[a2s] assign[=] name[self].array2string if call[name[isinstance], parameter[name[self].args, name[Dict]]] begin[:] variable[args] assign[=] <ast.DictComp object at 0x7da1b0c93250> variable[msg] assign[=] binary_operation[name[msg] <ast.Mod object at 0x7da2590d6920> name[args]] return[name[msg]]
keyword[def] identifier[getMessage] ( identifier[self] ): literal[string] keyword[if] identifier[isinstance] ( identifier[self] . identifier[msg] , identifier[numpy] . identifier[ndarray] ): identifier[msg] = identifier[self] . identifier[array2string] ( identifier[self] . identifier[msg] ) keyword[else] : identifier[msg] = identifier[str] ( identifier[self] . identifier[msg] ) keyword[if] identifier[self] . identifier[args] : identifier[a2s] = identifier[self] . identifier[array2string] keyword[if] identifier[isinstance] ( identifier[self] . identifier[args] , identifier[Dict] ): identifier[args] ={ identifier[k] :( identifier[a2s] ( identifier[v] ) keyword[if] identifier[isinstance] ( identifier[v] , identifier[numpy] . identifier[ndarray] ) keyword[else] identifier[v] ) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[self] . identifier[args] . identifier[items] ()} keyword[elif] identifier[isinstance] ( identifier[self] . identifier[args] , identifier[Sequence] ): identifier[args] = identifier[tuple] (( identifier[a2s] ( identifier[a] ) keyword[if] identifier[isinstance] ( identifier[a] , identifier[numpy] . identifier[ndarray] ) keyword[else] identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[self] . identifier[args] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] %( identifier[self] . identifier[args] , identifier[type] ( identifier[self] . identifier[args] ))) identifier[msg] = identifier[msg] % identifier[args] keyword[return] identifier[msg]
def getMessage(self): """ Return the message for this LogRecord. Return the message for this LogRecord after merging any user-supplied arguments with the message. """ if isinstance(self.msg, numpy.ndarray): msg = self.array2string(self.msg) # depends on [control=['if'], data=[]] else: msg = str(self.msg) if self.args: a2s = self.array2string if isinstance(self.args, Dict): args = {k: a2s(v) if isinstance(v, numpy.ndarray) else v for (k, v) in self.args.items()} # depends on [control=['if'], data=[]] elif isinstance(self.args, Sequence): args = tuple((a2s(a) if isinstance(a, numpy.ndarray) else a for a in self.args)) # depends on [control=['if'], data=[]] else: raise TypeError("Unexpected input '%s' with type '%s'" % (self.args, type(self.args))) msg = msg % args # depends on [control=['if'], data=[]] return msg
def delete_user(self, username): """Deletes a JIRA User. :param username: Username to delete :type username: str :return: Success of user deletion :rtype: bool """ url = self._options['server'] + '/rest/api/latest/user/?username=%s' % username r = self._session.delete(url) if 200 <= r.status_code <= 299: return True else: logging.error(r.status_code) return False
def function[delete_user, parameter[self, username]]: constant[Deletes a JIRA User. :param username: Username to delete :type username: str :return: Success of user deletion :rtype: bool ] variable[url] assign[=] binary_operation[call[name[self]._options][constant[server]] + binary_operation[constant[/rest/api/latest/user/?username=%s] <ast.Mod object at 0x7da2590d6920> name[username]]] variable[r] assign[=] call[name[self]._session.delete, parameter[name[url]]] if compare[constant[200] less_or_equal[<=] name[r].status_code] begin[:] return[constant[True]]
keyword[def] identifier[delete_user] ( identifier[self] , identifier[username] ): literal[string] identifier[url] = identifier[self] . identifier[_options] [ literal[string] ]+ literal[string] % identifier[username] identifier[r] = identifier[self] . identifier[_session] . identifier[delete] ( identifier[url] ) keyword[if] literal[int] <= identifier[r] . identifier[status_code] <= literal[int] : keyword[return] keyword[True] keyword[else] : identifier[logging] . identifier[error] ( identifier[r] . identifier[status_code] ) keyword[return] keyword[False]
def delete_user(self, username): """Deletes a JIRA User. :param username: Username to delete :type username: str :return: Success of user deletion :rtype: bool """ url = self._options['server'] + '/rest/api/latest/user/?username=%s' % username r = self._session.delete(url) if 200 <= r.status_code <= 299: return True # depends on [control=['if'], data=[]] else: logging.error(r.status_code) return False
def attach_run_command(cmd): """ Run a command when attaching Please do not call directly, this will execvp the command. This is to be used in conjunction with the attach method of a container. """ if isinstance(cmd, tuple): return _lxc.attach_run_command(cmd) elif isinstance(cmd, list): return _lxc.attach_run_command((cmd[0], cmd)) else: return _lxc.attach_run_command((cmd, [cmd]))
def function[attach_run_command, parameter[cmd]]: constant[ Run a command when attaching Please do not call directly, this will execvp the command. This is to be used in conjunction with the attach method of a container. ] if call[name[isinstance], parameter[name[cmd], name[tuple]]] begin[:] return[call[name[_lxc].attach_run_command, parameter[name[cmd]]]]
keyword[def] identifier[attach_run_command] ( identifier[cmd] ): literal[string] keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[tuple] ): keyword[return] identifier[_lxc] . identifier[attach_run_command] ( identifier[cmd] ) keyword[elif] identifier[isinstance] ( identifier[cmd] , identifier[list] ): keyword[return] identifier[_lxc] . identifier[attach_run_command] (( identifier[cmd] [ literal[int] ], identifier[cmd] )) keyword[else] : keyword[return] identifier[_lxc] . identifier[attach_run_command] (( identifier[cmd] ,[ identifier[cmd] ]))
def attach_run_command(cmd): """ Run a command when attaching Please do not call directly, this will execvp the command. This is to be used in conjunction with the attach method of a container. """ if isinstance(cmd, tuple): return _lxc.attach_run_command(cmd) # depends on [control=['if'], data=[]] elif isinstance(cmd, list): return _lxc.attach_run_command((cmd[0], cmd)) # depends on [control=['if'], data=[]] else: return _lxc.attach_run_command((cmd, [cmd]))
def update(gandi, resource, ssl, private_key, poll_cert): """ Update a vhost. Right now you can only activate ssl on the vhost. """ gandi.hostedcert.activate_ssl(resource, ssl, private_key, poll_cert)
def function[update, parameter[gandi, resource, ssl, private_key, poll_cert]]: constant[ Update a vhost. Right now you can only activate ssl on the vhost. ] call[name[gandi].hostedcert.activate_ssl, parameter[name[resource], name[ssl], name[private_key], name[poll_cert]]]
keyword[def] identifier[update] ( identifier[gandi] , identifier[resource] , identifier[ssl] , identifier[private_key] , identifier[poll_cert] ): literal[string] identifier[gandi] . identifier[hostedcert] . identifier[activate_ssl] ( identifier[resource] , identifier[ssl] , identifier[private_key] , identifier[poll_cert] )
def update(gandi, resource, ssl, private_key, poll_cert): """ Update a vhost. Right now you can only activate ssl on the vhost. """ gandi.hostedcert.activate_ssl(resource, ssl, private_key, poll_cert)
def aggregate(self, rankings, epsilon, max_iters): """ Description: Minorization-Maximization algorithm which returns an estimate of the ground-truth parameters, gamma for the given data. Parameters: rankings: set of rankings to aggregate epsilon: convergence condition value, set to None for iteration only max_iters: maximum number of iterations of MM algorithm """ # compute the matrix w, the numbers of pairwise wins: w = np.zeros((self.m, self.m)) for ranking in rankings: localw = np.zeros((self.m, self.m)) for ind1, alt1 in enumerate(self.alts): for ind2, alt2 in enumerate(self.alts): if ind1 == ind2: continue alt1_rank = util.get_index_nested(ranking, alt1) alt2_rank = util.get_index_nested(ranking, alt2) if alt1_rank < alt2_rank: # alt 1 is ranked higher localw[ind1][ind2] = 1 w += localw W = w.sum(axis=1) # gamma_t is the value of gamma at time = t # gamma_t1 is the value of gamma at time t = t+1 (the next iteration) # initial arbitrary value for gamma: gamma_t = np.ones(self.m) / self.m gamma_t1 = np.empty(self.m) for f in range(max_iters): for i in range(self.m): s = 0 # sum of updating function for j in range(self.m): if j != i: s += (w[j][i] + w[i][j]) / (gamma_t[i]+gamma_t[j]) gamma_t1[i] = W[i] / s gamma_t1 /= np.sum(gamma_t1) if epsilon != None and np.all(np.absolute(gamma_t1 - gamma_t) < epsilon): alt_scores = {cand: gamma_t1[ind] for ind, cand in enumerate(self.alts)} self.create_rank_dicts(alt_scores) return gamma_t1 # convergence reached before max_iters gamma_t = gamma_t1 # update gamma_t for the next iteration alt_scores = {cand: gamma_t1[ind] for ind, cand in enumerate(self.alts)} self.create_rank_dicts(alt_scores) return gamma_t1
def function[aggregate, parameter[self, rankings, epsilon, max_iters]]: constant[ Description: Minorization-Maximization algorithm which returns an estimate of the ground-truth parameters, gamma for the given data. Parameters: rankings: set of rankings to aggregate epsilon: convergence condition value, set to None for iteration only max_iters: maximum number of iterations of MM algorithm ] variable[w] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da204566e30>, <ast.Attribute object at 0x7da18ede6050>]]]] for taget[name[ranking]] in starred[name[rankings]] begin[:] variable[localw] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da18ede5cc0>, <ast.Attribute object at 0x7da18ede7d90>]]]] for taget[tuple[[<ast.Name object at 0x7da18ede5f60>, <ast.Name object at 0x7da18ede6290>]]] in starred[call[name[enumerate], parameter[name[self].alts]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18ede5a50>, <ast.Name object at 0x7da18ede5c90>]]] in starred[call[name[enumerate], parameter[name[self].alts]]] begin[:] if compare[name[ind1] equal[==] name[ind2]] begin[:] continue variable[alt1_rank] assign[=] call[name[util].get_index_nested, parameter[name[ranking], name[alt1]]] variable[alt2_rank] assign[=] call[name[util].get_index_nested, parameter[name[ranking], name[alt2]]] if compare[name[alt1_rank] less[<] name[alt2_rank]] begin[:] call[call[name[localw]][name[ind1]]][name[ind2]] assign[=] constant[1] <ast.AugAssign object at 0x7da18ede7f70> variable[W] assign[=] call[name[w].sum, parameter[]] variable[gamma_t] assign[=] binary_operation[call[name[np].ones, parameter[name[self].m]] / name[self].m] variable[gamma_t1] assign[=] call[name[np].empty, parameter[name[self].m]] for taget[name[f]] in starred[call[name[range], parameter[name[max_iters]]]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[self].m]]] begin[:] variable[s] assign[=] constant[0] for taget[name[j]] in starred[call[name[range], parameter[name[self].m]]] begin[:] if compare[name[j] not_equal[!=] name[i]] begin[:] <ast.AugAssign object at 0x7da18ede5c00> call[name[gamma_t1]][name[i]] assign[=] binary_operation[call[name[W]][name[i]] / name[s]] <ast.AugAssign object at 0x7da18ede5930> if <ast.BoolOp object at 0x7da20e9602e0> begin[:] variable[alt_scores] assign[=] <ast.DictComp object at 0x7da20e961a20> call[name[self].create_rank_dicts, parameter[name[alt_scores]]] return[name[gamma_t1]] variable[gamma_t] assign[=] name[gamma_t1] variable[alt_scores] assign[=] <ast.DictComp object at 0x7da20e960730> call[name[self].create_rank_dicts, parameter[name[alt_scores]]] return[name[gamma_t1]]
keyword[def] identifier[aggregate] ( identifier[self] , identifier[rankings] , identifier[epsilon] , identifier[max_iters] ): literal[string] identifier[w] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[m] , identifier[self] . identifier[m] )) keyword[for] identifier[ranking] keyword[in] identifier[rankings] : identifier[localw] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[m] , identifier[self] . identifier[m] )) keyword[for] identifier[ind1] , identifier[alt1] keyword[in] identifier[enumerate] ( identifier[self] . identifier[alts] ): keyword[for] identifier[ind2] , identifier[alt2] keyword[in] identifier[enumerate] ( identifier[self] . identifier[alts] ): keyword[if] identifier[ind1] == identifier[ind2] : keyword[continue] identifier[alt1_rank] = identifier[util] . identifier[get_index_nested] ( identifier[ranking] , identifier[alt1] ) identifier[alt2_rank] = identifier[util] . identifier[get_index_nested] ( identifier[ranking] , identifier[alt2] ) keyword[if] identifier[alt1_rank] < identifier[alt2_rank] : identifier[localw] [ identifier[ind1] ][ identifier[ind2] ]= literal[int] identifier[w] += identifier[localw] identifier[W] = identifier[w] . identifier[sum] ( identifier[axis] = literal[int] ) identifier[gamma_t] = identifier[np] . identifier[ones] ( identifier[self] . identifier[m] )/ identifier[self] . identifier[m] identifier[gamma_t1] = identifier[np] . identifier[empty] ( identifier[self] . identifier[m] ) keyword[for] identifier[f] keyword[in] identifier[range] ( identifier[max_iters] ): keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[m] ): identifier[s] = literal[int] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[m] ): keyword[if] identifier[j] != identifier[i] : identifier[s] +=( identifier[w] [ identifier[j] ][ identifier[i] ]+ identifier[w] [ identifier[i] ][ identifier[j] ])/( identifier[gamma_t] [ identifier[i] ]+ identifier[gamma_t] [ identifier[j] ]) identifier[gamma_t1] [ identifier[i] ]= identifier[W] [ identifier[i] ]/ identifier[s] identifier[gamma_t1] /= identifier[np] . identifier[sum] ( identifier[gamma_t1] ) keyword[if] identifier[epsilon] != keyword[None] keyword[and] identifier[np] . identifier[all] ( identifier[np] . identifier[absolute] ( identifier[gamma_t1] - identifier[gamma_t] )< identifier[epsilon] ): identifier[alt_scores] ={ identifier[cand] : identifier[gamma_t1] [ identifier[ind] ] keyword[for] identifier[ind] , identifier[cand] keyword[in] identifier[enumerate] ( identifier[self] . identifier[alts] )} identifier[self] . identifier[create_rank_dicts] ( identifier[alt_scores] ) keyword[return] identifier[gamma_t1] identifier[gamma_t] = identifier[gamma_t1] identifier[alt_scores] ={ identifier[cand] : identifier[gamma_t1] [ identifier[ind] ] keyword[for] identifier[ind] , identifier[cand] keyword[in] identifier[enumerate] ( identifier[self] . identifier[alts] )} identifier[self] . identifier[create_rank_dicts] ( identifier[alt_scores] ) keyword[return] identifier[gamma_t1]
def aggregate(self, rankings, epsilon, max_iters): """ Description: Minorization-Maximization algorithm which returns an estimate of the ground-truth parameters, gamma for the given data. Parameters: rankings: set of rankings to aggregate epsilon: convergence condition value, set to None for iteration only max_iters: maximum number of iterations of MM algorithm """ # compute the matrix w, the numbers of pairwise wins: w = np.zeros((self.m, self.m)) for ranking in rankings: localw = np.zeros((self.m, self.m)) for (ind1, alt1) in enumerate(self.alts): for (ind2, alt2) in enumerate(self.alts): if ind1 == ind2: continue # depends on [control=['if'], data=[]] alt1_rank = util.get_index_nested(ranking, alt1) alt2_rank = util.get_index_nested(ranking, alt2) if alt1_rank < alt2_rank: # alt 1 is ranked higher localw[ind1][ind2] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] w += localw # depends on [control=['for'], data=['ranking']] W = w.sum(axis=1) # gamma_t is the value of gamma at time = t # gamma_t1 is the value of gamma at time t = t+1 (the next iteration) # initial arbitrary value for gamma: gamma_t = np.ones(self.m) / self.m gamma_t1 = np.empty(self.m) for f in range(max_iters): for i in range(self.m): s = 0 # sum of updating function for j in range(self.m): if j != i: s += (w[j][i] + w[i][j]) / (gamma_t[i] + gamma_t[j]) # depends on [control=['if'], data=['j', 'i']] # depends on [control=['for'], data=['j']] gamma_t1[i] = W[i] / s # depends on [control=['for'], data=['i']] gamma_t1 /= np.sum(gamma_t1) if epsilon != None and np.all(np.absolute(gamma_t1 - gamma_t) < epsilon): alt_scores = {cand: gamma_t1[ind] for (ind, cand) in enumerate(self.alts)} self.create_rank_dicts(alt_scores) return gamma_t1 # convergence reached before max_iters # depends on [control=['if'], data=[]] gamma_t = gamma_t1 # update gamma_t for the next iteration # depends on [control=['for'], data=[]] alt_scores = {cand: gamma_t1[ind] for (ind, cand) in enumerate(self.alts)} self.create_rank_dicts(alt_scores) return gamma_t1
def getArcs(domains, constraints): """ Return a dictionary mapping pairs (arcs) of constrained variables @attention: Currently unused. """ arcs = {} for x in constraints: constraint, variables = x if len(variables) == 2: variable1, variable2 = variables arcs.setdefault(variable1, {}).setdefault(variable2, []).append(x) arcs.setdefault(variable2, {}).setdefault(variable1, []).append(x) return arcs
def function[getArcs, parameter[domains, constraints]]: constant[ Return a dictionary mapping pairs (arcs) of constrained variables @attention: Currently unused. ] variable[arcs] assign[=] dictionary[[], []] for taget[name[x]] in starred[name[constraints]] begin[:] <ast.Tuple object at 0x7da18f58c310> assign[=] name[x] if compare[call[name[len], parameter[name[variables]]] equal[==] constant[2]] begin[:] <ast.Tuple object at 0x7da18f58e440> assign[=] name[variables] call[call[call[name[arcs].setdefault, parameter[name[variable1], dictionary[[], []]]].setdefault, parameter[name[variable2], list[[]]]].append, parameter[name[x]]] call[call[call[name[arcs].setdefault, parameter[name[variable2], dictionary[[], []]]].setdefault, parameter[name[variable1], list[[]]]].append, parameter[name[x]]] return[name[arcs]]
keyword[def] identifier[getArcs] ( identifier[domains] , identifier[constraints] ): literal[string] identifier[arcs] ={} keyword[for] identifier[x] keyword[in] identifier[constraints] : identifier[constraint] , identifier[variables] = identifier[x] keyword[if] identifier[len] ( identifier[variables] )== literal[int] : identifier[variable1] , identifier[variable2] = identifier[variables] identifier[arcs] . identifier[setdefault] ( identifier[variable1] ,{}). identifier[setdefault] ( identifier[variable2] ,[]). identifier[append] ( identifier[x] ) identifier[arcs] . identifier[setdefault] ( identifier[variable2] ,{}). identifier[setdefault] ( identifier[variable1] ,[]). identifier[append] ( identifier[x] ) keyword[return] identifier[arcs]
def getArcs(domains, constraints): """ Return a dictionary mapping pairs (arcs) of constrained variables @attention: Currently unused. """ arcs = {} for x in constraints: (constraint, variables) = x if len(variables) == 2: (variable1, variable2) = variables arcs.setdefault(variable1, {}).setdefault(variable2, []).append(x) arcs.setdefault(variable2, {}).setdefault(variable1, []).append(x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] return arcs
def inspect(self, *args, **kwargs): """ Recursively inspect all given SCSS files to find imported dependencies. This does not return anything. Just fill internal buffers about inspected files. Note: This will ignore orphan files (files that are not imported from any of given SCSS files). Args: *args: One or multiple arguments, each one for a source file path to inspect. Keyword Arguments: library_paths (list): List of directory paths for libraries to resolve paths if resolving fails on the base source path. Default to None. """ library_paths = kwargs.get('library_paths', None) for sourcepath in args: self.look_source(sourcepath, library_paths=library_paths)
def function[inspect, parameter[self]]: constant[ Recursively inspect all given SCSS files to find imported dependencies. This does not return anything. Just fill internal buffers about inspected files. Note: This will ignore orphan files (files that are not imported from any of given SCSS files). Args: *args: One or multiple arguments, each one for a source file path to inspect. Keyword Arguments: library_paths (list): List of directory paths for libraries to resolve paths if resolving fails on the base source path. Default to None. ] variable[library_paths] assign[=] call[name[kwargs].get, parameter[constant[library_paths], constant[None]]] for taget[name[sourcepath]] in starred[name[args]] begin[:] call[name[self].look_source, parameter[name[sourcepath]]]
keyword[def] identifier[inspect] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[library_paths] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[for] identifier[sourcepath] keyword[in] identifier[args] : identifier[self] . identifier[look_source] ( identifier[sourcepath] , identifier[library_paths] = identifier[library_paths] )
def inspect(self, *args, **kwargs): """ Recursively inspect all given SCSS files to find imported dependencies. This does not return anything. Just fill internal buffers about inspected files. Note: This will ignore orphan files (files that are not imported from any of given SCSS files). Args: *args: One or multiple arguments, each one for a source file path to inspect. Keyword Arguments: library_paths (list): List of directory paths for libraries to resolve paths if resolving fails on the base source path. Default to None. """ library_paths = kwargs.get('library_paths', None) for sourcepath in args: self.look_source(sourcepath, library_paths=library_paths) # depends on [control=['for'], data=['sourcepath']]
def database_exist(self): """Create databases for each engine and return a new :class:`.Mapper`. """ binds = {} for key, engine in self.keys_engines(): key = key if key else 'default' binds[key] = self._database_exist(engine) return binds
def function[database_exist, parameter[self]]: constant[Create databases for each engine and return a new :class:`.Mapper`. ] variable[binds] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da2054a4b20>, <ast.Name object at 0x7da2054a70d0>]]] in starred[call[name[self].keys_engines, parameter[]]] begin[:] variable[key] assign[=] <ast.IfExp object at 0x7da2054a6440> call[name[binds]][name[key]] assign[=] call[name[self]._database_exist, parameter[name[engine]]] return[name[binds]]
keyword[def] identifier[database_exist] ( identifier[self] ): literal[string] identifier[binds] ={} keyword[for] identifier[key] , identifier[engine] keyword[in] identifier[self] . identifier[keys_engines] (): identifier[key] = identifier[key] keyword[if] identifier[key] keyword[else] literal[string] identifier[binds] [ identifier[key] ]= identifier[self] . identifier[_database_exist] ( identifier[engine] ) keyword[return] identifier[binds]
def database_exist(self): """Create databases for each engine and return a new :class:`.Mapper`. """ binds = {} for (key, engine) in self.keys_engines(): key = key if key else 'default' binds[key] = self._database_exist(engine) # depends on [control=['for'], data=[]] return binds
def addOutHeaderInfo(self, name, type, namespace, element_type=0, mustUnderstand=0): """Add an output SOAP header description to the call info.""" headerinfo = HeaderInfo(name, type, namespace, element_type) if mustUnderstand: headerinfo.mustUnderstand = 1 self.outheaders.append(headerinfo) return headerinfo
def function[addOutHeaderInfo, parameter[self, name, type, namespace, element_type, mustUnderstand]]: constant[Add an output SOAP header description to the call info.] variable[headerinfo] assign[=] call[name[HeaderInfo], parameter[name[name], name[type], name[namespace], name[element_type]]] if name[mustUnderstand] begin[:] name[headerinfo].mustUnderstand assign[=] constant[1] call[name[self].outheaders.append, parameter[name[headerinfo]]] return[name[headerinfo]]
keyword[def] identifier[addOutHeaderInfo] ( identifier[self] , identifier[name] , identifier[type] , identifier[namespace] , identifier[element_type] = literal[int] , identifier[mustUnderstand] = literal[int] ): literal[string] identifier[headerinfo] = identifier[HeaderInfo] ( identifier[name] , identifier[type] , identifier[namespace] , identifier[element_type] ) keyword[if] identifier[mustUnderstand] : identifier[headerinfo] . identifier[mustUnderstand] = literal[int] identifier[self] . identifier[outheaders] . identifier[append] ( identifier[headerinfo] ) keyword[return] identifier[headerinfo]
def addOutHeaderInfo(self, name, type, namespace, element_type=0, mustUnderstand=0): """Add an output SOAP header description to the call info.""" headerinfo = HeaderInfo(name, type, namespace, element_type) if mustUnderstand: headerinfo.mustUnderstand = 1 # depends on [control=['if'], data=[]] self.outheaders.append(headerinfo) return headerinfo
def info(ctx, objects): """ Obtain all kinds of information """ if not objects: t = PrettyTable(["Key", "Value"]) t.align = "l" info = ctx.peerplays.rpc.get_dynamic_global_properties() for key in info: t.add_row([key, info[key]]) click.echo(t.get_string(sortby="Key")) for obj in objects: # Block if re.match("^[0-9]*$", obj): block = Block(obj, peerplays_instance=ctx.peerplays) if block: t = PrettyTable(["Key", "Value"]) t.align = "l" for key in sorted(block): value = block[key] if key == "transactions": value = json.dumps(value, indent=4) t.add_row([key, value]) click.echo(t) else: click.echo("Block number %s unknown" % obj) # Object Id elif len(obj.split(".")) == 3: data = ctx.peerplays.rpc.get_object(obj) if data: t = PrettyTable(["Key", "Value"]) t.align = "l" for key in sorted(data): value = data[key] if isinstance(value, dict) or isinstance(value, list): value = json.dumps(value, indent=4) t.add_row([key, value]) click.echo(t) else: click.echo("Object %s unknown" % obj) # Asset elif obj.upper() == obj: data = Asset(obj) t = PrettyTable(["Key", "Value"]) t.align = "l" for key in sorted(data): value = data[key] if isinstance(value, dict): value = json.dumps(value, indent=4) t.add_row([key, value]) click.echo(t) # Public Key elif re.match("^PPY.{48,55}$", obj): account = ctx.peerplays.wallet.getAccountFromPublicKey(obj) if account: t = PrettyTable(["Account"]) t.align = "l" t.add_row([account]) click.echo(t) else: click.echo("Public Key not known" % obj) # Account name elif re.match("^[a-zA-Z0-9\-\._]{2,64}$", obj): account = Account(obj, full=True) if account: t = PrettyTable(["Key", "Value"]) t.align = "l" for key in sorted(account): value = account[key] if isinstance(value, dict) or isinstance(value, list): value = json.dumps(value, indent=4) t.add_row([key, value]) click.echo(t) else: click.echo("Account %s unknown" % obj) else: click.echo("Couldn't identify object to read")
def function[info, parameter[ctx, objects]]: constant[ Obtain all kinds of information ] if <ast.UnaryOp object at 0x7da1b106d6f0> begin[:] variable[t] assign[=] call[name[PrettyTable], parameter[list[[<ast.Constant object at 0x7da1b106dc30>, <ast.Constant object at 0x7da1b106e9e0>]]]] name[t].align assign[=] constant[l] variable[info] assign[=] call[name[ctx].peerplays.rpc.get_dynamic_global_properties, parameter[]] for taget[name[key]] in starred[name[info]] begin[:] call[name[t].add_row, parameter[list[[<ast.Name object at 0x7da1b106e7a0>, <ast.Subscript object at 0x7da1b106e500>]]]] call[name[click].echo, parameter[call[name[t].get_string, parameter[]]]] for taget[name[obj]] in starred[name[objects]] begin[:] if call[name[re].match, parameter[constant[^[0-9]*$], name[obj]]] begin[:] variable[block] assign[=] call[name[Block], parameter[name[obj]]] if name[block] begin[:] variable[t] assign[=] call[name[PrettyTable], parameter[list[[<ast.Constant object at 0x7da1b106eb90>, <ast.Constant object at 0x7da1b106f550>]]]] name[t].align assign[=] constant[l] for taget[name[key]] in starred[call[name[sorted], parameter[name[block]]]] begin[:] variable[value] assign[=] call[name[block]][name[key]] if compare[name[key] equal[==] constant[transactions]] begin[:] variable[value] assign[=] call[name[json].dumps, parameter[name[value]]] call[name[t].add_row, parameter[list[[<ast.Name object at 0x7da1b106df00>, <ast.Name object at 0x7da1b106e1d0>]]]] call[name[click].echo, parameter[name[t]]]
keyword[def] identifier[info] ( identifier[ctx] , identifier[objects] ): literal[string] keyword[if] keyword[not] identifier[objects] : identifier[t] = identifier[PrettyTable] ([ literal[string] , literal[string] ]) identifier[t] . identifier[align] = literal[string] identifier[info] = identifier[ctx] . identifier[peerplays] . identifier[rpc] . identifier[get_dynamic_global_properties] () keyword[for] identifier[key] keyword[in] identifier[info] : identifier[t] . identifier[add_row] ([ identifier[key] , identifier[info] [ identifier[key] ]]) identifier[click] . identifier[echo] ( identifier[t] . identifier[get_string] ( identifier[sortby] = literal[string] )) keyword[for] identifier[obj] keyword[in] identifier[objects] : keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[obj] ): identifier[block] = identifier[Block] ( identifier[obj] , identifier[peerplays_instance] = identifier[ctx] . identifier[peerplays] ) keyword[if] identifier[block] : identifier[t] = identifier[PrettyTable] ([ literal[string] , literal[string] ]) identifier[t] . identifier[align] = literal[string] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[block] ): identifier[value] = identifier[block] [ identifier[key] ] keyword[if] identifier[key] == literal[string] : identifier[value] = identifier[json] . identifier[dumps] ( identifier[value] , identifier[indent] = literal[int] ) identifier[t] . identifier[add_row] ([ identifier[key] , identifier[value] ]) identifier[click] . identifier[echo] ( identifier[t] ) keyword[else] : identifier[click] . identifier[echo] ( literal[string] % identifier[obj] ) keyword[elif] identifier[len] ( identifier[obj] . identifier[split] ( literal[string] ))== literal[int] : identifier[data] = identifier[ctx] . identifier[peerplays] . identifier[rpc] . identifier[get_object] ( identifier[obj] ) keyword[if] identifier[data] : identifier[t] = identifier[PrettyTable] ([ literal[string] , literal[string] ]) identifier[t] . identifier[align] = literal[string] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[data] ): identifier[value] = identifier[data] [ identifier[key] ] keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[list] ): identifier[value] = identifier[json] . identifier[dumps] ( identifier[value] , identifier[indent] = literal[int] ) identifier[t] . identifier[add_row] ([ identifier[key] , identifier[value] ]) identifier[click] . identifier[echo] ( identifier[t] ) keyword[else] : identifier[click] . identifier[echo] ( literal[string] % identifier[obj] ) keyword[elif] identifier[obj] . identifier[upper] ()== identifier[obj] : identifier[data] = identifier[Asset] ( identifier[obj] ) identifier[t] = identifier[PrettyTable] ([ literal[string] , literal[string] ]) identifier[t] . identifier[align] = literal[string] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[data] ): identifier[value] = identifier[data] [ identifier[key] ] keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ): identifier[value] = identifier[json] . identifier[dumps] ( identifier[value] , identifier[indent] = literal[int] ) identifier[t] . identifier[add_row] ([ identifier[key] , identifier[value] ]) identifier[click] . identifier[echo] ( identifier[t] ) keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[obj] ): identifier[account] = identifier[ctx] . identifier[peerplays] . identifier[wallet] . identifier[getAccountFromPublicKey] ( identifier[obj] ) keyword[if] identifier[account] : identifier[t] = identifier[PrettyTable] ([ literal[string] ]) identifier[t] . identifier[align] = literal[string] identifier[t] . identifier[add_row] ([ identifier[account] ]) identifier[click] . identifier[echo] ( identifier[t] ) keyword[else] : identifier[click] . identifier[echo] ( literal[string] % identifier[obj] ) keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[obj] ): identifier[account] = identifier[Account] ( identifier[obj] , identifier[full] = keyword[True] ) keyword[if] identifier[account] : identifier[t] = identifier[PrettyTable] ([ literal[string] , literal[string] ]) identifier[t] . identifier[align] = literal[string] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[account] ): identifier[value] = identifier[account] [ identifier[key] ] keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[list] ): identifier[value] = identifier[json] . identifier[dumps] ( identifier[value] , identifier[indent] = literal[int] ) identifier[t] . identifier[add_row] ([ identifier[key] , identifier[value] ]) identifier[click] . identifier[echo] ( identifier[t] ) keyword[else] : identifier[click] . identifier[echo] ( literal[string] % identifier[obj] ) keyword[else] : identifier[click] . identifier[echo] ( literal[string] )
def info(ctx, objects): """ Obtain all kinds of information """ if not objects: t = PrettyTable(['Key', 'Value']) t.align = 'l' info = ctx.peerplays.rpc.get_dynamic_global_properties() for key in info: t.add_row([key, info[key]]) # depends on [control=['for'], data=['key']] click.echo(t.get_string(sortby='Key')) # depends on [control=['if'], data=[]] for obj in objects: # Block if re.match('^[0-9]*$', obj): block = Block(obj, peerplays_instance=ctx.peerplays) if block: t = PrettyTable(['Key', 'Value']) t.align = 'l' for key in sorted(block): value = block[key] if key == 'transactions': value = json.dumps(value, indent=4) # depends on [control=['if'], data=[]] t.add_row([key, value]) # depends on [control=['for'], data=['key']] click.echo(t) # depends on [control=['if'], data=[]] else: click.echo('Block number %s unknown' % obj) # depends on [control=['if'], data=[]] # Object Id elif len(obj.split('.')) == 3: data = ctx.peerplays.rpc.get_object(obj) if data: t = PrettyTable(['Key', 'Value']) t.align = 'l' for key in sorted(data): value = data[key] if isinstance(value, dict) or isinstance(value, list): value = json.dumps(value, indent=4) # depends on [control=['if'], data=[]] t.add_row([key, value]) # depends on [control=['for'], data=['key']] click.echo(t) # depends on [control=['if'], data=[]] else: click.echo('Object %s unknown' % obj) # depends on [control=['if'], data=[]] # Asset elif obj.upper() == obj: data = Asset(obj) t = PrettyTable(['Key', 'Value']) t.align = 'l' for key in sorted(data): value = data[key] if isinstance(value, dict): value = json.dumps(value, indent=4) # depends on [control=['if'], data=[]] t.add_row([key, value]) # depends on [control=['for'], data=['key']] click.echo(t) # depends on [control=['if'], data=['obj']] # Public Key elif re.match('^PPY.{48,55}$', obj): account = ctx.peerplays.wallet.getAccountFromPublicKey(obj) if account: t = PrettyTable(['Account']) t.align = 'l' t.add_row([account]) click.echo(t) # depends on [control=['if'], data=[]] else: click.echo('Public Key not known' % obj) # depends on [control=['if'], data=[]] # Account name elif re.match('^[a-zA-Z0-9\\-\\._]{2,64}$', obj): account = Account(obj, full=True) if account: t = PrettyTable(['Key', 'Value']) t.align = 'l' for key in sorted(account): value = account[key] if isinstance(value, dict) or isinstance(value, list): value = json.dumps(value, indent=4) # depends on [control=['if'], data=[]] t.add_row([key, value]) # depends on [control=['for'], data=['key']] click.echo(t) # depends on [control=['if'], data=[]] else: click.echo('Account %s unknown' % obj) # depends on [control=['if'], data=[]] else: click.echo("Couldn't identify object to read") # depends on [control=['for'], data=['obj']]
def obfuscatable_class(tokens, index, **kwargs): """ Given a list of *tokens* and an *index* (representing the current position), returns the token string if it is a class name that can be safely obfuscated. """ tok = tokens[index] token_type = tok[0] token_string = tok[1] if index > 0: prev_tok = tokens[index-1] else: # Pretend it's a newline (for simplicity) prev_tok = (54, '\n', (1, 1), (1, 2), '#\n') prev_tok_string = prev_tok[1] if token_type != tokenize.NAME: return None # Skip this token if token_string.startswith('__'): # Don't mess with specials return None if prev_tok_string == "class": return token_string
def function[obfuscatable_class, parameter[tokens, index]]: constant[ Given a list of *tokens* and an *index* (representing the current position), returns the token string if it is a class name that can be safely obfuscated. ] variable[tok] assign[=] call[name[tokens]][name[index]] variable[token_type] assign[=] call[name[tok]][constant[0]] variable[token_string] assign[=] call[name[tok]][constant[1]] if compare[name[index] greater[>] constant[0]] begin[:] variable[prev_tok] assign[=] call[name[tokens]][binary_operation[name[index] - constant[1]]] variable[prev_tok_string] assign[=] call[name[prev_tok]][constant[1]] if compare[name[token_type] not_equal[!=] name[tokenize].NAME] begin[:] return[constant[None]] if call[name[token_string].startswith, parameter[constant[__]]] begin[:] return[constant[None]] if compare[name[prev_tok_string] equal[==] constant[class]] begin[:] return[name[token_string]]
keyword[def] identifier[obfuscatable_class] ( identifier[tokens] , identifier[index] ,** identifier[kwargs] ): literal[string] identifier[tok] = identifier[tokens] [ identifier[index] ] identifier[token_type] = identifier[tok] [ literal[int] ] identifier[token_string] = identifier[tok] [ literal[int] ] keyword[if] identifier[index] > literal[int] : identifier[prev_tok] = identifier[tokens] [ identifier[index] - literal[int] ] keyword[else] : identifier[prev_tok] =( literal[int] , literal[string] ,( literal[int] , literal[int] ),( literal[int] , literal[int] ), literal[string] ) identifier[prev_tok_string] = identifier[prev_tok] [ literal[int] ] keyword[if] identifier[token_type] != identifier[tokenize] . identifier[NAME] : keyword[return] keyword[None] keyword[if] identifier[token_string] . identifier[startswith] ( literal[string] ): keyword[return] keyword[None] keyword[if] identifier[prev_tok_string] == literal[string] : keyword[return] identifier[token_string]
def obfuscatable_class(tokens, index, **kwargs): """ Given a list of *tokens* and an *index* (representing the current position), returns the token string if it is a class name that can be safely obfuscated. """ tok = tokens[index] token_type = tok[0] token_string = tok[1] if index > 0: prev_tok = tokens[index - 1] # depends on [control=['if'], data=['index']] else: # Pretend it's a newline (for simplicity) prev_tok = (54, '\n', (1, 1), (1, 2), '#\n') prev_tok_string = prev_tok[1] if token_type != tokenize.NAME: return None # Skip this token # depends on [control=['if'], data=[]] if token_string.startswith('__'): # Don't mess with specials return None # depends on [control=['if'], data=[]] if prev_tok_string == 'class': return token_string # depends on [control=['if'], data=[]]
def state(self, state): """Set state.""" self._state = state self._manager[ATTR_STATE] = state _LOGGER.info('state changed to %s', state)
def function[state, parameter[self, state]]: constant[Set state.] name[self]._state assign[=] name[state] call[name[self]._manager][name[ATTR_STATE]] assign[=] name[state] call[name[_LOGGER].info, parameter[constant[state changed to %s], name[state]]]
keyword[def] identifier[state] ( identifier[self] , identifier[state] ): literal[string] identifier[self] . identifier[_state] = identifier[state] identifier[self] . identifier[_manager] [ identifier[ATTR_STATE] ]= identifier[state] identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[state] )
def state(self, state): """Set state.""" self._state = state self._manager[ATTR_STATE] = state _LOGGER.info('state changed to %s', state)
def get_labels(filename, logger=None): """Returns a dictionary of alternative sequence labels, or None - filename - path to file containing tab-separated table of labels Input files should be formatted as <key>\t<label>, one pair per line. """ labeldict = {} if filename is not None: if logger: logger.info("Reading labels from %s", filename) with open(filename, "r") as ifh: count = 0 for line in ifh.readlines(): count += 1 try: key, label = line.strip().split("\t") except ValueError: if logger: logger.warning("Problem with class file: %s", filename) logger.warning("%d: %s", (count, line.strip())) logger.warning("(skipping line)") continue else: labeldict[key] = label return labeldict
def function[get_labels, parameter[filename, logger]]: constant[Returns a dictionary of alternative sequence labels, or None - filename - path to file containing tab-separated table of labels Input files should be formatted as <key> <label>, one pair per line. ] variable[labeldict] assign[=] dictionary[[], []] if compare[name[filename] is_not constant[None]] begin[:] if name[logger] begin[:] call[name[logger].info, parameter[constant[Reading labels from %s], name[filename]]] with call[name[open], parameter[name[filename], constant[r]]] begin[:] variable[count] assign[=] constant[0] for taget[name[line]] in starred[call[name[ifh].readlines, parameter[]]] begin[:] <ast.AugAssign object at 0x7da1b0d41150> <ast.Try object at 0x7da1b0ef1e40> return[name[labeldict]]
keyword[def] identifier[get_labels] ( identifier[filename] , identifier[logger] = keyword[None] ): literal[string] identifier[labeldict] ={} keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[logger] : identifier[logger] . identifier[info] ( literal[string] , identifier[filename] ) keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[ifh] : identifier[count] = literal[int] keyword[for] identifier[line] keyword[in] identifier[ifh] . identifier[readlines] (): identifier[count] += literal[int] keyword[try] : identifier[key] , identifier[label] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] ) keyword[except] identifier[ValueError] : keyword[if] identifier[logger] : identifier[logger] . identifier[warning] ( literal[string] , identifier[filename] ) identifier[logger] . identifier[warning] ( literal[string] ,( identifier[count] , identifier[line] . identifier[strip] ())) identifier[logger] . identifier[warning] ( literal[string] ) keyword[continue] keyword[else] : identifier[labeldict] [ identifier[key] ]= identifier[label] keyword[return] identifier[labeldict]
def get_labels(filename, logger=None): """Returns a dictionary of alternative sequence labels, or None - filename - path to file containing tab-separated table of labels Input files should be formatted as <key> <label>, one pair per line. """ labeldict = {} if filename is not None: if logger: logger.info('Reading labels from %s', filename) # depends on [control=['if'], data=[]] with open(filename, 'r') as ifh: count = 0 for line in ifh.readlines(): count += 1 try: (key, label) = line.strip().split('\t') # depends on [control=['try'], data=[]] except ValueError: if logger: logger.warning('Problem with class file: %s', filename) logger.warning('%d: %s', (count, line.strip())) logger.warning('(skipping line)') # depends on [control=['if'], data=[]] continue # depends on [control=['except'], data=[]] else: labeldict[key] = label # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['ifh']] # depends on [control=['if'], data=['filename']] return labeldict
def wrapAtom(xml, id, title, author=None, updated=None, author_uri=None, alt=None, alt_type="text/html"): """ Create an Atom entry tag and embed the passed XML within it """ entryTag = etree.Element(ATOM + "entry", nsmap=ATOM_NSMAP) titleTag = etree.SubElement(entryTag, ATOM + "title") titleTag.text = title idTag = etree.SubElement(entryTag, ATOM + "id") idTag.text = id updatedTag = etree.SubElement(entryTag, ATOM + "updated") if alt: etree.SubElement( entryTag, ATOM + "link", rel='alternate', href=alt, type=alt_type) if updated is not None: # If updated is a naive datetime, set its timezone to the local one # So the xs:datetime value will include an explicit offset if updated.tzinfo is None: updated = localize_datetime(updated) updatedTag.text = xsDateTime_format(updated) else: updatedTag.text = xsDateTime_format(localize_datetime(datetime.now())) if author or author_uri: authorTag = etree.SubElement(entryTag, ATOM + "author") if author: nameTag = etree.SubElement(authorTag, ATOM + "name") nameTag.text = author if author_uri: nameUriTag = etree.SubElement(authorTag, ATOM + "uri") nameUriTag.text = author_uri contentTag = etree.SubElement(entryTag, ATOM + "content") contentTag.set("type", "application/xml") contentTag.append(xml) return entryTag
def function[wrapAtom, parameter[xml, id, title, author, updated, author_uri, alt, alt_type]]: constant[ Create an Atom entry tag and embed the passed XML within it ] variable[entryTag] assign[=] call[name[etree].Element, parameter[binary_operation[name[ATOM] + constant[entry]]]] variable[titleTag] assign[=] call[name[etree].SubElement, parameter[name[entryTag], binary_operation[name[ATOM] + constant[title]]]] name[titleTag].text assign[=] name[title] variable[idTag] assign[=] call[name[etree].SubElement, parameter[name[entryTag], binary_operation[name[ATOM] + constant[id]]]] name[idTag].text assign[=] name[id] variable[updatedTag] assign[=] call[name[etree].SubElement, parameter[name[entryTag], binary_operation[name[ATOM] + constant[updated]]]] if name[alt] begin[:] call[name[etree].SubElement, parameter[name[entryTag], binary_operation[name[ATOM] + constant[link]]]] if compare[name[updated] is_not constant[None]] begin[:] if compare[name[updated].tzinfo is constant[None]] begin[:] variable[updated] assign[=] call[name[localize_datetime], parameter[name[updated]]] name[updatedTag].text assign[=] call[name[xsDateTime_format], parameter[name[updated]]] if <ast.BoolOp object at 0x7da1b09d1c30> begin[:] variable[authorTag] assign[=] call[name[etree].SubElement, parameter[name[entryTag], binary_operation[name[ATOM] + constant[author]]]] if name[author] begin[:] variable[nameTag] assign[=] call[name[etree].SubElement, parameter[name[authorTag], binary_operation[name[ATOM] + constant[name]]]] name[nameTag].text assign[=] name[author] if name[author_uri] begin[:] variable[nameUriTag] assign[=] call[name[etree].SubElement, parameter[name[authorTag], binary_operation[name[ATOM] + constant[uri]]]] name[nameUriTag].text assign[=] name[author_uri] variable[contentTag] assign[=] call[name[etree].SubElement, parameter[name[entryTag], binary_operation[name[ATOM] + constant[content]]]] call[name[contentTag].set, parameter[constant[type], constant[application/xml]]] call[name[contentTag].append, parameter[name[xml]]] return[name[entryTag]]
keyword[def] identifier[wrapAtom] ( identifier[xml] , identifier[id] , identifier[title] , identifier[author] = keyword[None] , identifier[updated] = keyword[None] , identifier[author_uri] = keyword[None] , identifier[alt] = keyword[None] , identifier[alt_type] = literal[string] ): literal[string] identifier[entryTag] = identifier[etree] . identifier[Element] ( identifier[ATOM] + literal[string] , identifier[nsmap] = identifier[ATOM_NSMAP] ) identifier[titleTag] = identifier[etree] . identifier[SubElement] ( identifier[entryTag] , identifier[ATOM] + literal[string] ) identifier[titleTag] . identifier[text] = identifier[title] identifier[idTag] = identifier[etree] . identifier[SubElement] ( identifier[entryTag] , identifier[ATOM] + literal[string] ) identifier[idTag] . identifier[text] = identifier[id] identifier[updatedTag] = identifier[etree] . identifier[SubElement] ( identifier[entryTag] , identifier[ATOM] + literal[string] ) keyword[if] identifier[alt] : identifier[etree] . identifier[SubElement] ( identifier[entryTag] , identifier[ATOM] + literal[string] , identifier[rel] = literal[string] , identifier[href] = identifier[alt] , identifier[type] = identifier[alt_type] ) keyword[if] identifier[updated] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[updated] . identifier[tzinfo] keyword[is] keyword[None] : identifier[updated] = identifier[localize_datetime] ( identifier[updated] ) identifier[updatedTag] . identifier[text] = identifier[xsDateTime_format] ( identifier[updated] ) keyword[else] : identifier[updatedTag] . identifier[text] = identifier[xsDateTime_format] ( identifier[localize_datetime] ( identifier[datetime] . identifier[now] ())) keyword[if] identifier[author] keyword[or] identifier[author_uri] : identifier[authorTag] = identifier[etree] . identifier[SubElement] ( identifier[entryTag] , identifier[ATOM] + literal[string] ) keyword[if] identifier[author] : identifier[nameTag] = identifier[etree] . identifier[SubElement] ( identifier[authorTag] , identifier[ATOM] + literal[string] ) identifier[nameTag] . identifier[text] = identifier[author] keyword[if] identifier[author_uri] : identifier[nameUriTag] = identifier[etree] . identifier[SubElement] ( identifier[authorTag] , identifier[ATOM] + literal[string] ) identifier[nameUriTag] . identifier[text] = identifier[author_uri] identifier[contentTag] = identifier[etree] . identifier[SubElement] ( identifier[entryTag] , identifier[ATOM] + literal[string] ) identifier[contentTag] . identifier[set] ( literal[string] , literal[string] ) identifier[contentTag] . identifier[append] ( identifier[xml] ) keyword[return] identifier[entryTag]
def wrapAtom(xml, id, title, author=None, updated=None, author_uri=None, alt=None, alt_type='text/html'): """ Create an Atom entry tag and embed the passed XML within it """ entryTag = etree.Element(ATOM + 'entry', nsmap=ATOM_NSMAP) titleTag = etree.SubElement(entryTag, ATOM + 'title') titleTag.text = title idTag = etree.SubElement(entryTag, ATOM + 'id') idTag.text = id updatedTag = etree.SubElement(entryTag, ATOM + 'updated') if alt: etree.SubElement(entryTag, ATOM + 'link', rel='alternate', href=alt, type=alt_type) # depends on [control=['if'], data=[]] if updated is not None: # If updated is a naive datetime, set its timezone to the local one # So the xs:datetime value will include an explicit offset if updated.tzinfo is None: updated = localize_datetime(updated) # depends on [control=['if'], data=[]] updatedTag.text = xsDateTime_format(updated) # depends on [control=['if'], data=['updated']] else: updatedTag.text = xsDateTime_format(localize_datetime(datetime.now())) if author or author_uri: authorTag = etree.SubElement(entryTag, ATOM + 'author') if author: nameTag = etree.SubElement(authorTag, ATOM + 'name') nameTag.text = author # depends on [control=['if'], data=[]] if author_uri: nameUriTag = etree.SubElement(authorTag, ATOM + 'uri') nameUriTag.text = author_uri # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] contentTag = etree.SubElement(entryTag, ATOM + 'content') contentTag.set('type', 'application/xml') contentTag.append(xml) return entryTag
def get_local_output_dir(): """Create a local output directory per execution. We've seen occassional (1/100000) perm issues with lambda on temp directory and changing unix execution users (2015-2018), so use a per execution temp space. With firecracker lambdas this may be outdated. """ output_dir = os.environ.get('C7N_OUTPUT_DIR', '/tmp/' + str(uuid.uuid4())) if not os.path.exists(output_dir): try: os.mkdir(output_dir) except OSError as error: log.warning("Unable to make output directory: {}".format(error)) return output_dir
def function[get_local_output_dir, parameter[]]: constant[Create a local output directory per execution. We've seen occassional (1/100000) perm issues with lambda on temp directory and changing unix execution users (2015-2018), so use a per execution temp space. With firecracker lambdas this may be outdated. ] variable[output_dir] assign[=] call[name[os].environ.get, parameter[constant[C7N_OUTPUT_DIR], binary_operation[constant[/tmp/] + call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]]]]] if <ast.UnaryOp object at 0x7da1b1f37160> begin[:] <ast.Try object at 0x7da1b1f37250> return[name[output_dir]]
keyword[def] identifier[get_local_output_dir] (): literal[string] identifier[output_dir] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] + identifier[str] ( identifier[uuid] . identifier[uuid4] ())) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[output_dir] ): keyword[try] : identifier[os] . identifier[mkdir] ( identifier[output_dir] ) keyword[except] identifier[OSError] keyword[as] identifier[error] : identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[error] )) keyword[return] identifier[output_dir]
def get_local_output_dir(): """Create a local output directory per execution. We've seen occassional (1/100000) perm issues with lambda on temp directory and changing unix execution users (2015-2018), so use a per execution temp space. With firecracker lambdas this may be outdated. """ output_dir = os.environ.get('C7N_OUTPUT_DIR', '/tmp/' + str(uuid.uuid4())) if not os.path.exists(output_dir): try: os.mkdir(output_dir) # depends on [control=['try'], data=[]] except OSError as error: log.warning('Unable to make output directory: {}'.format(error)) # depends on [control=['except'], data=['error']] # depends on [control=['if'], data=[]] return output_dir
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'used_bytes') and self.used_bytes is not None: _dict['used_bytes'] = self.used_bytes return _dict
def function[_to_dict, parameter[self]]: constant[Return a json dictionary representing this model.] variable[_dict] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da204621990> begin[:] call[name[_dict]][constant[used_bytes]] assign[=] name[self].used_bytes return[name[_dict]]
keyword[def] identifier[_to_dict] ( identifier[self] ): literal[string] identifier[_dict] ={} keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[used_bytes] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[used_bytes] keyword[return] identifier[_dict]
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'used_bytes') and self.used_bytes is not None: _dict['used_bytes'] = self.used_bytes # depends on [control=['if'], data=[]] return _dict
def write_unitth(suites, out_dir): """ Write UnitTH-style test reports Args: suites (:obj:`dict`): dictionary of test suites out_dir (:obj:`str`): path to save UnitTH-style test reports """ if not os.path.isdir(out_dir): os.mkdir(out_dir) for classname, cases in suites.items(): doc_xml = minidom.Document() suite_xml = doc_xml.createElement('testsuite') suite_xml.setAttribute('name', classname) suite_xml.setAttribute('tests', str(len(cases))) suite_xml.setAttribute('errors', str(sum('error' in case for case in cases))) suite_xml.setAttribute('failures', str(sum('failure' in case for case in cases))) suite_xml.setAttribute('skipped', str(sum('skipped' in case for case in cases))) suite_xml.setAttribute('time', '{:.3f}'.format(sum(case['time'] for case in cases))) doc_xml.appendChild(suite_xml) for case in cases: case_xml = doc_xml.createElement('testcase') case_xml.setAttribute('classname', classname) case_xml.setAttribute('name', case['name']) case_xml.setAttribute('time', '{:.3f}'.format(case['time'])) suite_xml.appendChild(case_xml) if 'skipped' in case: skipped_xml = doc_xml.createElement('skipped') skipped_xml.setAttribute('type', case['skipped']['type']) skipped_xml.setAttribute('message', case['skipped']['message']) case_xml.appendChild(skipped_xml) skipped_text_xml = doc_xml.createCDATASection(case['skipped']['text']) skipped_xml.appendChild(skipped_text_xml) if 'failure' in case: failure_xml = doc_xml.createElement('failure') failure_xml.setAttribute('type', case['failure']['type']) failure_xml.setAttribute('message', case['failure']['message']) case_xml.appendChild(failure_xml) failure_text_xml = doc_xml.createCDATASection(case['failure']['text']) failure_xml.appendChild(failure_text_xml) if 'error' in case: error_xml = doc_xml.createElement('error') error_xml.setAttribute('type', case['error']['type']) error_xml.setAttribute('message', case['error']['message']) case_xml.appendChild(error_xml) error_text_xml = doc_xml.createCDATASection(case['error']['text']) error_xml.appendChild(error_text_xml) with open(os.path.join(out_dir, '{}.xml'.format(classname)), 'w') as output: doc_xml.writexml(output, encoding='utf-8', addindent='', newl="") doc_xml.unlink()
def function[write_unitth, parameter[suites, out_dir]]: constant[ Write UnitTH-style test reports Args: suites (:obj:`dict`): dictionary of test suites out_dir (:obj:`str`): path to save UnitTH-style test reports ] if <ast.UnaryOp object at 0x7da1b185dcf0> begin[:] call[name[os].mkdir, parameter[name[out_dir]]] for taget[tuple[[<ast.Name object at 0x7da1b185e1a0>, <ast.Name object at 0x7da1b185e6b0>]]] in starred[call[name[suites].items, parameter[]]] begin[:] variable[doc_xml] assign[=] call[name[minidom].Document, parameter[]] variable[suite_xml] assign[=] call[name[doc_xml].createElement, parameter[constant[testsuite]]] call[name[suite_xml].setAttribute, parameter[constant[name], name[classname]]] call[name[suite_xml].setAttribute, parameter[constant[tests], call[name[str], parameter[call[name[len], parameter[name[cases]]]]]]] call[name[suite_xml].setAttribute, parameter[constant[errors], call[name[str], parameter[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b19705b0>]]]]]] call[name[suite_xml].setAttribute, parameter[constant[failures], call[name[str], parameter[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b1970c40>]]]]]] call[name[suite_xml].setAttribute, parameter[constant[skipped], call[name[str], parameter[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b19706d0>]]]]]] call[name[suite_xml].setAttribute, parameter[constant[time], call[constant[{:.3f}].format, parameter[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b185c820>]]]]]] call[name[doc_xml].appendChild, parameter[name[suite_xml]]] for taget[name[case]] in starred[name[cases]] begin[:] variable[case_xml] assign[=] call[name[doc_xml].createElement, parameter[constant[testcase]]] call[name[case_xml].setAttribute, parameter[constant[classname], name[classname]]] call[name[case_xml].setAttribute, parameter[constant[name], call[name[case]][constant[name]]]] call[name[case_xml].setAttribute, parameter[constant[time], call[constant[{:.3f}].format, parameter[call[name[case]][constant[time]]]]]] call[name[suite_xml].appendChild, parameter[name[case_xml]]] if compare[constant[skipped] in name[case]] begin[:] variable[skipped_xml] assign[=] call[name[doc_xml].createElement, parameter[constant[skipped]]] call[name[skipped_xml].setAttribute, parameter[constant[type], call[call[name[case]][constant[skipped]]][constant[type]]]] call[name[skipped_xml].setAttribute, parameter[constant[message], call[call[name[case]][constant[skipped]]][constant[message]]]] call[name[case_xml].appendChild, parameter[name[skipped_xml]]] variable[skipped_text_xml] assign[=] call[name[doc_xml].createCDATASection, parameter[call[call[name[case]][constant[skipped]]][constant[text]]]] call[name[skipped_xml].appendChild, parameter[name[skipped_text_xml]]] if compare[constant[failure] in name[case]] begin[:] variable[failure_xml] assign[=] call[name[doc_xml].createElement, parameter[constant[failure]]] call[name[failure_xml].setAttribute, parameter[constant[type], call[call[name[case]][constant[failure]]][constant[type]]]] call[name[failure_xml].setAttribute, parameter[constant[message], call[call[name[case]][constant[failure]]][constant[message]]]] call[name[case_xml].appendChild, parameter[name[failure_xml]]] variable[failure_text_xml] assign[=] call[name[doc_xml].createCDATASection, parameter[call[call[name[case]][constant[failure]]][constant[text]]]] call[name[failure_xml].appendChild, parameter[name[failure_text_xml]]] if compare[constant[error] in name[case]] begin[:] variable[error_xml] assign[=] call[name[doc_xml].createElement, parameter[constant[error]]] call[name[error_xml].setAttribute, parameter[constant[type], call[call[name[case]][constant[error]]][constant[type]]]] call[name[error_xml].setAttribute, parameter[constant[message], call[call[name[case]][constant[error]]][constant[message]]]] call[name[case_xml].appendChild, parameter[name[error_xml]]] variable[error_text_xml] assign[=] call[name[doc_xml].createCDATASection, parameter[call[call[name[case]][constant[error]]][constant[text]]]] call[name[error_xml].appendChild, parameter[name[error_text_xml]]] with call[name[open], parameter[call[name[os].path.join, parameter[name[out_dir], call[constant[{}.xml].format, parameter[name[classname]]]]], constant[w]]] begin[:] call[name[doc_xml].writexml, parameter[name[output]]] call[name[doc_xml].unlink, parameter[]]
keyword[def] identifier[write_unitth] ( identifier[suites] , identifier[out_dir] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[out_dir] ): identifier[os] . identifier[mkdir] ( identifier[out_dir] ) keyword[for] identifier[classname] , identifier[cases] keyword[in] identifier[suites] . identifier[items] (): identifier[doc_xml] = identifier[minidom] . identifier[Document] () identifier[suite_xml] = identifier[doc_xml] . identifier[createElement] ( literal[string] ) identifier[suite_xml] . identifier[setAttribute] ( literal[string] , identifier[classname] ) identifier[suite_xml] . identifier[setAttribute] ( literal[string] , identifier[str] ( identifier[len] ( identifier[cases] ))) identifier[suite_xml] . identifier[setAttribute] ( literal[string] , identifier[str] ( identifier[sum] ( literal[string] keyword[in] identifier[case] keyword[for] identifier[case] keyword[in] identifier[cases] ))) identifier[suite_xml] . identifier[setAttribute] ( literal[string] , identifier[str] ( identifier[sum] ( literal[string] keyword[in] identifier[case] keyword[for] identifier[case] keyword[in] identifier[cases] ))) identifier[suite_xml] . identifier[setAttribute] ( literal[string] , identifier[str] ( identifier[sum] ( literal[string] keyword[in] identifier[case] keyword[for] identifier[case] keyword[in] identifier[cases] ))) identifier[suite_xml] . identifier[setAttribute] ( literal[string] , literal[string] . identifier[format] ( identifier[sum] ( identifier[case] [ literal[string] ] keyword[for] identifier[case] keyword[in] identifier[cases] ))) identifier[doc_xml] . identifier[appendChild] ( identifier[suite_xml] ) keyword[for] identifier[case] keyword[in] identifier[cases] : identifier[case_xml] = identifier[doc_xml] . identifier[createElement] ( literal[string] ) identifier[case_xml] . identifier[setAttribute] ( literal[string] , identifier[classname] ) identifier[case_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ]) identifier[case_xml] . identifier[setAttribute] ( literal[string] , literal[string] . identifier[format] ( identifier[case] [ literal[string] ])) identifier[suite_xml] . identifier[appendChild] ( identifier[case_xml] ) keyword[if] literal[string] keyword[in] identifier[case] : identifier[skipped_xml] = identifier[doc_xml] . identifier[createElement] ( literal[string] ) identifier[skipped_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ][ literal[string] ]) identifier[skipped_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ][ literal[string] ]) identifier[case_xml] . identifier[appendChild] ( identifier[skipped_xml] ) identifier[skipped_text_xml] = identifier[doc_xml] . identifier[createCDATASection] ( identifier[case] [ literal[string] ][ literal[string] ]) identifier[skipped_xml] . identifier[appendChild] ( identifier[skipped_text_xml] ) keyword[if] literal[string] keyword[in] identifier[case] : identifier[failure_xml] = identifier[doc_xml] . identifier[createElement] ( literal[string] ) identifier[failure_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ][ literal[string] ]) identifier[failure_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ][ literal[string] ]) identifier[case_xml] . identifier[appendChild] ( identifier[failure_xml] ) identifier[failure_text_xml] = identifier[doc_xml] . identifier[createCDATASection] ( identifier[case] [ literal[string] ][ literal[string] ]) identifier[failure_xml] . identifier[appendChild] ( identifier[failure_text_xml] ) keyword[if] literal[string] keyword[in] identifier[case] : identifier[error_xml] = identifier[doc_xml] . identifier[createElement] ( literal[string] ) identifier[error_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ][ literal[string] ]) identifier[error_xml] . identifier[setAttribute] ( literal[string] , identifier[case] [ literal[string] ][ literal[string] ]) identifier[case_xml] . identifier[appendChild] ( identifier[error_xml] ) identifier[error_text_xml] = identifier[doc_xml] . identifier[createCDATASection] ( identifier[case] [ literal[string] ][ literal[string] ]) identifier[error_xml] . identifier[appendChild] ( identifier[error_text_xml] ) keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , literal[string] . identifier[format] ( identifier[classname] )), literal[string] ) keyword[as] identifier[output] : identifier[doc_xml] . identifier[writexml] ( identifier[output] , identifier[encoding] = literal[string] , identifier[addindent] = literal[string] , identifier[newl] = literal[string] ) identifier[doc_xml] . identifier[unlink] ()
def write_unitth(suites, out_dir): """ Write UnitTH-style test reports Args: suites (:obj:`dict`): dictionary of test suites out_dir (:obj:`str`): path to save UnitTH-style test reports """ if not os.path.isdir(out_dir): os.mkdir(out_dir) # depends on [control=['if'], data=[]] for (classname, cases) in suites.items(): doc_xml = minidom.Document() suite_xml = doc_xml.createElement('testsuite') suite_xml.setAttribute('name', classname) suite_xml.setAttribute('tests', str(len(cases))) suite_xml.setAttribute('errors', str(sum(('error' in case for case in cases)))) suite_xml.setAttribute('failures', str(sum(('failure' in case for case in cases)))) suite_xml.setAttribute('skipped', str(sum(('skipped' in case for case in cases)))) suite_xml.setAttribute('time', '{:.3f}'.format(sum((case['time'] for case in cases)))) doc_xml.appendChild(suite_xml) for case in cases: case_xml = doc_xml.createElement('testcase') case_xml.setAttribute('classname', classname) case_xml.setAttribute('name', case['name']) case_xml.setAttribute('time', '{:.3f}'.format(case['time'])) suite_xml.appendChild(case_xml) if 'skipped' in case: skipped_xml = doc_xml.createElement('skipped') skipped_xml.setAttribute('type', case['skipped']['type']) skipped_xml.setAttribute('message', case['skipped']['message']) case_xml.appendChild(skipped_xml) skipped_text_xml = doc_xml.createCDATASection(case['skipped']['text']) skipped_xml.appendChild(skipped_text_xml) # depends on [control=['if'], data=['case']] if 'failure' in case: failure_xml = doc_xml.createElement('failure') failure_xml.setAttribute('type', case['failure']['type']) failure_xml.setAttribute('message', case['failure']['message']) case_xml.appendChild(failure_xml) failure_text_xml = doc_xml.createCDATASection(case['failure']['text']) failure_xml.appendChild(failure_text_xml) # depends on [control=['if'], data=['case']] if 'error' in case: error_xml = doc_xml.createElement('error') error_xml.setAttribute('type', case['error']['type']) error_xml.setAttribute('message', case['error']['message']) case_xml.appendChild(error_xml) error_text_xml = doc_xml.createCDATASection(case['error']['text']) error_xml.appendChild(error_text_xml) # depends on [control=['if'], data=['case']] # depends on [control=['for'], data=['case']] with open(os.path.join(out_dir, '{}.xml'.format(classname)), 'w') as output: doc_xml.writexml(output, encoding='utf-8', addindent='', newl='') # depends on [control=['with'], data=['output']] doc_xml.unlink() # depends on [control=['for'], data=[]]
def _experience(self, previousState, action, previousAction, reward): """ This is an altered version of the experience function for used in the standard Roth-Erev algorithm. Like in RELearner, propensities for all actions are updated and similarity does not come into play. If the actionIndex points to the action the reward is associated with (usually the last action taken) then simply adjust the weight by the experimentation. Otherwise increase the weight of the action by a small portion of its current propensity. If j is the index of the last action chosen, r_j is the reward received for performing j, i is the current action being updated, q_i is the propensity for i, n is the size of the action domain and e is the experimentation parameter, then this experience function can be expressed as:: | r_j * (1-e) if i = j E(i, r_j) = | |_ q_i * (e /(n-1)) if i != j """ e = self.experimentation if action == previousAction: experience = reward * (1 - e) else: propensity = self.module.getValue(previousState, action) experience = propensity * (e / (self.module.numActions - 1)) return experience
def function[_experience, parameter[self, previousState, action, previousAction, reward]]: constant[ This is an altered version of the experience function for used in the standard Roth-Erev algorithm. Like in RELearner, propensities for all actions are updated and similarity does not come into play. If the actionIndex points to the action the reward is associated with (usually the last action taken) then simply adjust the weight by the experimentation. Otherwise increase the weight of the action by a small portion of its current propensity. If j is the index of the last action chosen, r_j is the reward received for performing j, i is the current action being updated, q_i is the propensity for i, n is the size of the action domain and e is the experimentation parameter, then this experience function can be expressed as:: | r_j * (1-e) if i = j E(i, r_j) = | |_ q_i * (e /(n-1)) if i != j ] variable[e] assign[=] name[self].experimentation if compare[name[action] equal[==] name[previousAction]] begin[:] variable[experience] assign[=] binary_operation[name[reward] * binary_operation[constant[1] - name[e]]] return[name[experience]]
keyword[def] identifier[_experience] ( identifier[self] , identifier[previousState] , identifier[action] , identifier[previousAction] , identifier[reward] ): literal[string] identifier[e] = identifier[self] . identifier[experimentation] keyword[if] identifier[action] == identifier[previousAction] : identifier[experience] = identifier[reward] *( literal[int] - identifier[e] ) keyword[else] : identifier[propensity] = identifier[self] . identifier[module] . identifier[getValue] ( identifier[previousState] , identifier[action] ) identifier[experience] = identifier[propensity] *( identifier[e] /( identifier[self] . identifier[module] . identifier[numActions] - literal[int] )) keyword[return] identifier[experience]
def _experience(self, previousState, action, previousAction, reward): """ This is an altered version of the experience function for used in the standard Roth-Erev algorithm. Like in RELearner, propensities for all actions are updated and similarity does not come into play. If the actionIndex points to the action the reward is associated with (usually the last action taken) then simply adjust the weight by the experimentation. Otherwise increase the weight of the action by a small portion of its current propensity. If j is the index of the last action chosen, r_j is the reward received for performing j, i is the current action being updated, q_i is the propensity for i, n is the size of the action domain and e is the experimentation parameter, then this experience function can be expressed as:: | r_j * (1-e) if i = j E(i, r_j) = | |_ q_i * (e /(n-1)) if i != j """ e = self.experimentation if action == previousAction: experience = reward * (1 - e) # depends on [control=['if'], data=[]] else: propensity = self.module.getValue(previousState, action) experience = propensity * (e / (self.module.numActions - 1)) return experience
def award_group_principal_award_recipient(tag): """ Find the award group principal award recipient, one for each item found in the get_funding_group section """ award_group_principal_award_recipient = [] principal_award_recipients = extract_nodes(tag, "principal-award-recipient") for t in principal_award_recipients: principal_award_recipient_text = "" institution = node_text(first(extract_nodes(t, "institution"))) surname = node_text(first(extract_nodes(t, "surname"))) given_names = node_text(first(extract_nodes(t, "given-names"))) string_name = node_text(first(raw_parser.string_name(t))) # Concatenate name and institution values if found # while filtering out excess whitespace if(given_names): principal_award_recipient_text += given_names if(principal_award_recipient_text != ""): principal_award_recipient_text += " " if(surname): principal_award_recipient_text += surname if(institution): principal_award_recipient_text += institution if(string_name): principal_award_recipient_text += string_name award_group_principal_award_recipient.append(principal_award_recipient_text) return award_group_principal_award_recipient
def function[award_group_principal_award_recipient, parameter[tag]]: constant[ Find the award group principal award recipient, one for each item found in the get_funding_group section ] variable[award_group_principal_award_recipient] assign[=] list[[]] variable[principal_award_recipients] assign[=] call[name[extract_nodes], parameter[name[tag], constant[principal-award-recipient]]] for taget[name[t]] in starred[name[principal_award_recipients]] begin[:] variable[principal_award_recipient_text] assign[=] constant[] variable[institution] assign[=] call[name[node_text], parameter[call[name[first], parameter[call[name[extract_nodes], parameter[name[t], constant[institution]]]]]]] variable[surname] assign[=] call[name[node_text], parameter[call[name[first], parameter[call[name[extract_nodes], parameter[name[t], constant[surname]]]]]]] variable[given_names] assign[=] call[name[node_text], parameter[call[name[first], parameter[call[name[extract_nodes], parameter[name[t], constant[given-names]]]]]]] variable[string_name] assign[=] call[name[node_text], parameter[call[name[first], parameter[call[name[raw_parser].string_name, parameter[name[t]]]]]]] if name[given_names] begin[:] <ast.AugAssign object at 0x7da1b11e9fc0> if compare[name[principal_award_recipient_text] not_equal[!=] constant[]] begin[:] <ast.AugAssign object at 0x7da1b11ea590> if name[surname] begin[:] <ast.AugAssign object at 0x7da1b11e9d80> if name[institution] begin[:] <ast.AugAssign object at 0x7da1b11e95a0> if name[string_name] begin[:] <ast.AugAssign object at 0x7da1b11ea1a0> call[name[award_group_principal_award_recipient].append, parameter[name[principal_award_recipient_text]]] return[name[award_group_principal_award_recipient]]
keyword[def] identifier[award_group_principal_award_recipient] ( identifier[tag] ): literal[string] identifier[award_group_principal_award_recipient] =[] identifier[principal_award_recipients] = identifier[extract_nodes] ( identifier[tag] , literal[string] ) keyword[for] identifier[t] keyword[in] identifier[principal_award_recipients] : identifier[principal_award_recipient_text] = literal[string] identifier[institution] = identifier[node_text] ( identifier[first] ( identifier[extract_nodes] ( identifier[t] , literal[string] ))) identifier[surname] = identifier[node_text] ( identifier[first] ( identifier[extract_nodes] ( identifier[t] , literal[string] ))) identifier[given_names] = identifier[node_text] ( identifier[first] ( identifier[extract_nodes] ( identifier[t] , literal[string] ))) identifier[string_name] = identifier[node_text] ( identifier[first] ( identifier[raw_parser] . identifier[string_name] ( identifier[t] ))) keyword[if] ( identifier[given_names] ): identifier[principal_award_recipient_text] += identifier[given_names] keyword[if] ( identifier[principal_award_recipient_text] != literal[string] ): identifier[principal_award_recipient_text] += literal[string] keyword[if] ( identifier[surname] ): identifier[principal_award_recipient_text] += identifier[surname] keyword[if] ( identifier[institution] ): identifier[principal_award_recipient_text] += identifier[institution] keyword[if] ( identifier[string_name] ): identifier[principal_award_recipient_text] += identifier[string_name] identifier[award_group_principal_award_recipient] . identifier[append] ( identifier[principal_award_recipient_text] ) keyword[return] identifier[award_group_principal_award_recipient]
def award_group_principal_award_recipient(tag): """ Find the award group principal award recipient, one for each item found in the get_funding_group section """ award_group_principal_award_recipient = [] principal_award_recipients = extract_nodes(tag, 'principal-award-recipient') for t in principal_award_recipients: principal_award_recipient_text = '' institution = node_text(first(extract_nodes(t, 'institution'))) surname = node_text(first(extract_nodes(t, 'surname'))) given_names = node_text(first(extract_nodes(t, 'given-names'))) string_name = node_text(first(raw_parser.string_name(t))) # Concatenate name and institution values if found # while filtering out excess whitespace if given_names: principal_award_recipient_text += given_names # depends on [control=['if'], data=[]] if principal_award_recipient_text != '': principal_award_recipient_text += ' ' # depends on [control=['if'], data=['principal_award_recipient_text']] if surname: principal_award_recipient_text += surname # depends on [control=['if'], data=[]] if institution: principal_award_recipient_text += institution # depends on [control=['if'], data=[]] if string_name: principal_award_recipient_text += string_name # depends on [control=['if'], data=[]] award_group_principal_award_recipient.append(principal_award_recipient_text) # depends on [control=['for'], data=['t']] return award_group_principal_award_recipient
def _load_cache(self, filename): """Load the cached page references from `<filename>.ptc`.""" try: with open(filename + self.CACHE_EXTENSION, 'rb') as file: prev_number_of_pages, prev_page_references = pickle.load(file) except (IOError, TypeError): prev_number_of_pages, prev_page_references = {}, {} return prev_number_of_pages, prev_page_references
def function[_load_cache, parameter[self, filename]]: constant[Load the cached page references from `<filename>.ptc`.] <ast.Try object at 0x7da18f58f2b0> return[tuple[[<ast.Name object at 0x7da18f58fa90>, <ast.Name object at 0x7da18f58c4f0>]]]
keyword[def] identifier[_load_cache] ( identifier[self] , identifier[filename] ): literal[string] keyword[try] : keyword[with] identifier[open] ( identifier[filename] + identifier[self] . identifier[CACHE_EXTENSION] , literal[string] ) keyword[as] identifier[file] : identifier[prev_number_of_pages] , identifier[prev_page_references] = identifier[pickle] . identifier[load] ( identifier[file] ) keyword[except] ( identifier[IOError] , identifier[TypeError] ): identifier[prev_number_of_pages] , identifier[prev_page_references] ={},{} keyword[return] identifier[prev_number_of_pages] , identifier[prev_page_references]
def _load_cache(self, filename): """Load the cached page references from `<filename>.ptc`.""" try: with open(filename + self.CACHE_EXTENSION, 'rb') as file: (prev_number_of_pages, prev_page_references) = pickle.load(file) # depends on [control=['with'], data=['file']] # depends on [control=['try'], data=[]] except (IOError, TypeError): (prev_number_of_pages, prev_page_references) = ({}, {}) # depends on [control=['except'], data=[]] return (prev_number_of_pages, prev_page_references)
def t_asm(t): r'\b[aA][sS][mM]\b' global ASM, ASMLINENO, IN_STATE t.lexer.begin('asm') ASM = '' ASMLINENO = t.lexer.lineno IN_STATE = True
def function[t_asm, parameter[t]]: constant[\b[aA][sS][mM]\b] <ast.Global object at 0x7da20c76e8f0> call[name[t].lexer.begin, parameter[constant[asm]]] variable[ASM] assign[=] constant[] variable[ASMLINENO] assign[=] name[t].lexer.lineno variable[IN_STATE] assign[=] constant[True]
keyword[def] identifier[t_asm] ( identifier[t] ): literal[string] keyword[global] identifier[ASM] , identifier[ASMLINENO] , identifier[IN_STATE] identifier[t] . identifier[lexer] . identifier[begin] ( literal[string] ) identifier[ASM] = literal[string] identifier[ASMLINENO] = identifier[t] . identifier[lexer] . identifier[lineno] identifier[IN_STATE] = keyword[True]
def t_asm(t): """\\b[aA][sS][mM]\\b""" global ASM, ASMLINENO, IN_STATE t.lexer.begin('asm') ASM = '' ASMLINENO = t.lexer.lineno IN_STATE = True
def _shrink(v, gamma): """Soft-shrinkage of an array with parameter gamma. Parameters ---------- v : array Array containing the values to be applied to the shrinkage operator gamma : float Shrinkage parameter. Returns ------- v : array The same input array after the shrinkage operator was applied. """ pos = v > gamma neg = v < -gamma v[pos] -= gamma v[neg] += gamma v[np.logical_and(~pos, ~neg)] = .0 return v
def function[_shrink, parameter[v, gamma]]: constant[Soft-shrinkage of an array with parameter gamma. Parameters ---------- v : array Array containing the values to be applied to the shrinkage operator gamma : float Shrinkage parameter. Returns ------- v : array The same input array after the shrinkage operator was applied. ] variable[pos] assign[=] compare[name[v] greater[>] name[gamma]] variable[neg] assign[=] compare[name[v] less[<] <ast.UnaryOp object at 0x7da1b0744490>] <ast.AugAssign object at 0x7da1b07444f0> <ast.AugAssign object at 0x7da1b0746170> call[name[v]][call[name[np].logical_and, parameter[<ast.UnaryOp object at 0x7da1b0745fc0>, <ast.UnaryOp object at 0x7da1b0746020>]]] assign[=] constant[0.0] return[name[v]]
keyword[def] identifier[_shrink] ( identifier[v] , identifier[gamma] ): literal[string] identifier[pos] = identifier[v] > identifier[gamma] identifier[neg] = identifier[v] <- identifier[gamma] identifier[v] [ identifier[pos] ]-= identifier[gamma] identifier[v] [ identifier[neg] ]+= identifier[gamma] identifier[v] [ identifier[np] . identifier[logical_and] (~ identifier[pos] ,~ identifier[neg] )]= literal[int] keyword[return] identifier[v]
def _shrink(v, gamma): """Soft-shrinkage of an array with parameter gamma. Parameters ---------- v : array Array containing the values to be applied to the shrinkage operator gamma : float Shrinkage parameter. Returns ------- v : array The same input array after the shrinkage operator was applied. """ pos = v > gamma neg = v < -gamma v[pos] -= gamma v[neg] += gamma v[np.logical_and(~pos, ~neg)] = 0.0 return v
def get_ladders_metadata(session, parsed): """Get metadata for all ladders.""" ladders = {} for ladder in parsed.find_all('a', href=re.compile(LADDER_URL_REGEX)): ladders[ladder.text] = get_ladder_metadata(session, ladder['href']) return ladders
def function[get_ladders_metadata, parameter[session, parsed]]: constant[Get metadata for all ladders.] variable[ladders] assign[=] dictionary[[], []] for taget[name[ladder]] in starred[call[name[parsed].find_all, parameter[constant[a]]]] begin[:] call[name[ladders]][name[ladder].text] assign[=] call[name[get_ladder_metadata], parameter[name[session], call[name[ladder]][constant[href]]]] return[name[ladders]]
keyword[def] identifier[get_ladders_metadata] ( identifier[session] , identifier[parsed] ): literal[string] identifier[ladders] ={} keyword[for] identifier[ladder] keyword[in] identifier[parsed] . identifier[find_all] ( literal[string] , identifier[href] = identifier[re] . identifier[compile] ( identifier[LADDER_URL_REGEX] )): identifier[ladders] [ identifier[ladder] . identifier[text] ]= identifier[get_ladder_metadata] ( identifier[session] , identifier[ladder] [ literal[string] ]) keyword[return] identifier[ladders]
def get_ladders_metadata(session, parsed): """Get metadata for all ladders.""" ladders = {} for ladder in parsed.find_all('a', href=re.compile(LADDER_URL_REGEX)): ladders[ladder.text] = get_ladder_metadata(session, ladder['href']) # depends on [control=['for'], data=['ladder']] return ladders
def build_authorization_request(username, method, uri, nonce_count, digest_challenge=None, realm=None, nonce=None, opaque=None, password=None, request_digest=None, client_nonce=None): ''' Builds an authorization request that may be sent as the value of the 'Authorization' header in an HTTP request. Either a digest_challenge object (as returned from parse_digest_challenge) or its required component parameters (nonce, realm, opaque) must be provided. The nonce_count should be the last used nonce_count plus one. Either the password or the request_digest should be provided - if provided, the password will be used to generate a request digest. The client_nonce is optional - if not provided, a random value will be generated. ''' if not client_nonce: client_nonce = ''.join([random.choice('0123456789ABCDEF') for x in range(32)]) if digest_challenge and (realm or nonce or opaque): raise Exception("Both digest_challenge and one or more of realm, nonce, and opaque" "were sent.") if digest_challenge: if isinstance(digest_challenge, types.StringType): digest_challenge_header = digest_challenge digest_challenge = parse_digest_challenge(digest_challenge_header) if not digest_challenge: raise Exception("The provided digest challenge header could not be parsed: %s" % digest_challenge_header) realm = digest_challenge.realm nonce = digest_challenge.nonce opaque = digest_challenge.opaque elif not (realm and nonce and opaque): raise Exception("Either digest_challenge or realm, nonce, and opaque must be sent.") if password and request_digest: raise Exception("Both password and calculated request_digest were sent.") elif not request_digest: if not password: raise Exception("Either password or calculated request_digest must be provided.") partial_digest = calculate_partial_digest(username, realm, password) request_digest = calculate_request_digest(method, partial_digest, uri=uri, nonce=nonce, nonce_count=nonce_count, client_nonce=client_nonce) return 'Digest %s' % format_parts(username=username, realm=realm, nonce=nonce, uri=uri, response=request_digest, algorithm='MD5', opaque=opaque, qop='auth', nc='%08x' % nonce_count, cnonce=client_nonce)
def function[build_authorization_request, parameter[username, method, uri, nonce_count, digest_challenge, realm, nonce, opaque, password, request_digest, client_nonce]]: constant[ Builds an authorization request that may be sent as the value of the 'Authorization' header in an HTTP request. Either a digest_challenge object (as returned from parse_digest_challenge) or its required component parameters (nonce, realm, opaque) must be provided. The nonce_count should be the last used nonce_count plus one. Either the password or the request_digest should be provided - if provided, the password will be used to generate a request digest. The client_nonce is optional - if not provided, a random value will be generated. ] if <ast.UnaryOp object at 0x7da20c7c8100> begin[:] variable[client_nonce] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da20c7cb2e0>]] if <ast.BoolOp object at 0x7da20c7c8b20> begin[:] <ast.Raise object at 0x7da1b0a36fb0> if name[digest_challenge] begin[:] if call[name[isinstance], parameter[name[digest_challenge], name[types].StringType]] begin[:] variable[digest_challenge_header] assign[=] name[digest_challenge] variable[digest_challenge] assign[=] call[name[parse_digest_challenge], parameter[name[digest_challenge_header]]] if <ast.UnaryOp object at 0x7da1b0a36dd0> begin[:] <ast.Raise object at 0x7da1b0a34fd0> variable[realm] assign[=] name[digest_challenge].realm variable[nonce] assign[=] name[digest_challenge].nonce variable[opaque] assign[=] name[digest_challenge].opaque if <ast.BoolOp object at 0x7da18bcca8f0> begin[:] <ast.Raise object at 0x7da18bcca650> return[binary_operation[constant[Digest %s] <ast.Mod object at 0x7da2590d6920> call[name[format_parts], parameter[]]]]
keyword[def] identifier[build_authorization_request] ( identifier[username] , identifier[method] , identifier[uri] , identifier[nonce_count] , identifier[digest_challenge] = keyword[None] , identifier[realm] = keyword[None] , identifier[nonce] = keyword[None] , identifier[opaque] = keyword[None] , identifier[password] = keyword[None] , identifier[request_digest] = keyword[None] , identifier[client_nonce] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[client_nonce] : identifier[client_nonce] = literal[string] . identifier[join] ([ identifier[random] . identifier[choice] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] )]) keyword[if] identifier[digest_challenge] keyword[and] ( identifier[realm] keyword[or] identifier[nonce] keyword[or] identifier[opaque] ): keyword[raise] identifier[Exception] ( literal[string] literal[string] ) keyword[if] identifier[digest_challenge] : keyword[if] identifier[isinstance] ( identifier[digest_challenge] , identifier[types] . identifier[StringType] ): identifier[digest_challenge_header] = identifier[digest_challenge] identifier[digest_challenge] = identifier[parse_digest_challenge] ( identifier[digest_challenge_header] ) keyword[if] keyword[not] identifier[digest_challenge] : keyword[raise] identifier[Exception] ( literal[string] % identifier[digest_challenge_header] ) identifier[realm] = identifier[digest_challenge] . identifier[realm] identifier[nonce] = identifier[digest_challenge] . identifier[nonce] identifier[opaque] = identifier[digest_challenge] . identifier[opaque] keyword[elif] keyword[not] ( identifier[realm] keyword[and] identifier[nonce] keyword[and] identifier[opaque] ): keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[password] keyword[and] identifier[request_digest] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[elif] keyword[not] identifier[request_digest] : keyword[if] keyword[not] identifier[password] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[partial_digest] = identifier[calculate_partial_digest] ( identifier[username] , identifier[realm] , identifier[password] ) identifier[request_digest] = identifier[calculate_request_digest] ( identifier[method] , identifier[partial_digest] , identifier[uri] = identifier[uri] , identifier[nonce] = identifier[nonce] , identifier[nonce_count] = identifier[nonce_count] , identifier[client_nonce] = identifier[client_nonce] ) keyword[return] literal[string] % identifier[format_parts] ( identifier[username] = identifier[username] , identifier[realm] = identifier[realm] , identifier[nonce] = identifier[nonce] , identifier[uri] = identifier[uri] , identifier[response] = identifier[request_digest] , identifier[algorithm] = literal[string] , identifier[opaque] = identifier[opaque] , identifier[qop] = literal[string] , identifier[nc] = literal[string] % identifier[nonce_count] , identifier[cnonce] = identifier[client_nonce] )
def build_authorization_request(username, method, uri, nonce_count, digest_challenge=None, realm=None, nonce=None, opaque=None, password=None, request_digest=None, client_nonce=None): """ Builds an authorization request that may be sent as the value of the 'Authorization' header in an HTTP request. Either a digest_challenge object (as returned from parse_digest_challenge) or its required component parameters (nonce, realm, opaque) must be provided. The nonce_count should be the last used nonce_count plus one. Either the password or the request_digest should be provided - if provided, the password will be used to generate a request digest. The client_nonce is optional - if not provided, a random value will be generated. """ if not client_nonce: client_nonce = ''.join([random.choice('0123456789ABCDEF') for x in range(32)]) # depends on [control=['if'], data=[]] if digest_challenge and (realm or nonce or opaque): raise Exception('Both digest_challenge and one or more of realm, nonce, and opaquewere sent.') # depends on [control=['if'], data=[]] if digest_challenge: if isinstance(digest_challenge, types.StringType): digest_challenge_header = digest_challenge digest_challenge = parse_digest_challenge(digest_challenge_header) if not digest_challenge: raise Exception('The provided digest challenge header could not be parsed: %s' % digest_challenge_header) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] realm = digest_challenge.realm nonce = digest_challenge.nonce opaque = digest_challenge.opaque # depends on [control=['if'], data=[]] elif not (realm and nonce and opaque): raise Exception('Either digest_challenge or realm, nonce, and opaque must be sent.') # depends on [control=['if'], data=[]] if password and request_digest: raise Exception('Both password and calculated request_digest were sent.') # depends on [control=['if'], data=[]] elif not request_digest: if not password: raise Exception('Either password or calculated request_digest must be provided.') # depends on [control=['if'], data=[]] partial_digest = calculate_partial_digest(username, realm, password) request_digest = calculate_request_digest(method, partial_digest, uri=uri, nonce=nonce, nonce_count=nonce_count, client_nonce=client_nonce) # depends on [control=['if'], data=[]] return 'Digest %s' % format_parts(username=username, realm=realm, nonce=nonce, uri=uri, response=request_digest, algorithm='MD5', opaque=opaque, qop='auth', nc='%08x' % nonce_count, cnonce=client_nonce)
def export(self, export_auto_config=False): """ Export the cluster template for the given cluster. ccluster must have host templates defined. It cluster does not have host templates defined it will export host templates based on roles assignment. @param export_auto_config: Also export auto configured configs @return: Return cluster template @since: API v12 """ return self._get("export", ApiClusterTemplate, False, params=dict(exportAutoConfig=export_auto_config), api_version=12)
def function[export, parameter[self, export_auto_config]]: constant[ Export the cluster template for the given cluster. ccluster must have host templates defined. It cluster does not have host templates defined it will export host templates based on roles assignment. @param export_auto_config: Also export auto configured configs @return: Return cluster template @since: API v12 ] return[call[name[self]._get, parameter[constant[export], name[ApiClusterTemplate], constant[False]]]]
keyword[def] identifier[export] ( identifier[self] , identifier[export_auto_config] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[_get] ( literal[string] , identifier[ApiClusterTemplate] , keyword[False] , identifier[params] = identifier[dict] ( identifier[exportAutoConfig] = identifier[export_auto_config] ), identifier[api_version] = literal[int] )
def export(self, export_auto_config=False): """ Export the cluster template for the given cluster. ccluster must have host templates defined. It cluster does not have host templates defined it will export host templates based on roles assignment. @param export_auto_config: Also export auto configured configs @return: Return cluster template @since: API v12 """ return self._get('export', ApiClusterTemplate, False, params=dict(exportAutoConfig=export_auto_config), api_version=12)
def _aggr_weighted_mean(inList, params): """ Weighted mean uses params (must be the same size as inList) and makes weighed mean of inList""" assert(len(inList) == len(params)) # If all weights are 0, then the value is not defined, return None (missing) weightsSum = sum(params) if weightsSum == 0: return None weightedMean = 0 for i, elem in enumerate(inList): weightedMean += elem * params[i] return weightedMean / weightsSum
def function[_aggr_weighted_mean, parameter[inList, params]]: constant[ Weighted mean uses params (must be the same size as inList) and makes weighed mean of inList] assert[compare[call[name[len], parameter[name[inList]]] equal[==] call[name[len], parameter[name[params]]]]] variable[weightsSum] assign[=] call[name[sum], parameter[name[params]]] if compare[name[weightsSum] equal[==] constant[0]] begin[:] return[constant[None]] variable[weightedMean] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da20c990520>, <ast.Name object at 0x7da20c991300>]]] in starred[call[name[enumerate], parameter[name[inList]]]] begin[:] <ast.AugAssign object at 0x7da20c991b40> return[binary_operation[name[weightedMean] / name[weightsSum]]]
keyword[def] identifier[_aggr_weighted_mean] ( identifier[inList] , identifier[params] ): literal[string] keyword[assert] ( identifier[len] ( identifier[inList] )== identifier[len] ( identifier[params] )) identifier[weightsSum] = identifier[sum] ( identifier[params] ) keyword[if] identifier[weightsSum] == literal[int] : keyword[return] keyword[None] identifier[weightedMean] = literal[int] keyword[for] identifier[i] , identifier[elem] keyword[in] identifier[enumerate] ( identifier[inList] ): identifier[weightedMean] += identifier[elem] * identifier[params] [ identifier[i] ] keyword[return] identifier[weightedMean] / identifier[weightsSum]
def _aggr_weighted_mean(inList, params): """ Weighted mean uses params (must be the same size as inList) and makes weighed mean of inList""" assert len(inList) == len(params) # If all weights are 0, then the value is not defined, return None (missing) weightsSum = sum(params) if weightsSum == 0: return None # depends on [control=['if'], data=[]] weightedMean = 0 for (i, elem) in enumerate(inList): weightedMean += elem * params[i] # depends on [control=['for'], data=[]] return weightedMean / weightsSum
def _results(r): r"""Select from a tuple of(root, funccalls, iterations, flag)""" x, funcalls, iterations, flag = r return results(x, funcalls, iterations, flag == 0)
def function[_results, parameter[r]]: constant[Select from a tuple of(root, funccalls, iterations, flag)] <ast.Tuple object at 0x7da1b21b9390> assign[=] name[r] return[call[name[results], parameter[name[x], name[funcalls], name[iterations], compare[name[flag] equal[==] constant[0]]]]]
keyword[def] identifier[_results] ( identifier[r] ): literal[string] identifier[x] , identifier[funcalls] , identifier[iterations] , identifier[flag] = identifier[r] keyword[return] identifier[results] ( identifier[x] , identifier[funcalls] , identifier[iterations] , identifier[flag] == literal[int] )
def _results(r): """Select from a tuple of(root, funccalls, iterations, flag)""" (x, funcalls, iterations, flag) = r return results(x, funcalls, iterations, flag == 0)
def _signed_bounds(self): """ Get lower bound and upper bound for `self` in signed arithmetic. :return: a list of (lower_bound, upper_bound) tuples """ nsplit = self._nsplit() if len(nsplit) == 1: lb = nsplit[0].lower_bound ub = nsplit[0].upper_bound lb = self._unsigned_to_signed(lb, self.bits) ub = self._unsigned_to_signed(ub, self.bits) return [(lb, ub)] elif len(nsplit) == 2: # nsplit[0] is on the left hemisphere, and nsplit[1] is on the right hemisphere # The left one lb_1 = nsplit[0].lower_bound ub_1 = nsplit[0].upper_bound # The right one lb_2 = nsplit[1].lower_bound ub_2 = nsplit[1].upper_bound # Then convert them to negative numbers lb_2 = self._unsigned_to_signed(lb_2, self.bits) ub_2 = self._unsigned_to_signed(ub_2, self.bits) return [ (lb_1, ub_1), (lb_2, ub_2) ] else: raise Exception('WTF')
def function[_signed_bounds, parameter[self]]: constant[ Get lower bound and upper bound for `self` in signed arithmetic. :return: a list of (lower_bound, upper_bound) tuples ] variable[nsplit] assign[=] call[name[self]._nsplit, parameter[]] if compare[call[name[len], parameter[name[nsplit]]] equal[==] constant[1]] begin[:] variable[lb] assign[=] call[name[nsplit]][constant[0]].lower_bound variable[ub] assign[=] call[name[nsplit]][constant[0]].upper_bound variable[lb] assign[=] call[name[self]._unsigned_to_signed, parameter[name[lb], name[self].bits]] variable[ub] assign[=] call[name[self]._unsigned_to_signed, parameter[name[ub], name[self].bits]] return[list[[<ast.Tuple object at 0x7da18eb57970>]]]
keyword[def] identifier[_signed_bounds] ( identifier[self] ): literal[string] identifier[nsplit] = identifier[self] . identifier[_nsplit] () keyword[if] identifier[len] ( identifier[nsplit] )== literal[int] : identifier[lb] = identifier[nsplit] [ literal[int] ]. identifier[lower_bound] identifier[ub] = identifier[nsplit] [ literal[int] ]. identifier[upper_bound] identifier[lb] = identifier[self] . identifier[_unsigned_to_signed] ( identifier[lb] , identifier[self] . identifier[bits] ) identifier[ub] = identifier[self] . identifier[_unsigned_to_signed] ( identifier[ub] , identifier[self] . identifier[bits] ) keyword[return] [( identifier[lb] , identifier[ub] )] keyword[elif] identifier[len] ( identifier[nsplit] )== literal[int] : identifier[lb_1] = identifier[nsplit] [ literal[int] ]. identifier[lower_bound] identifier[ub_1] = identifier[nsplit] [ literal[int] ]. identifier[upper_bound] identifier[lb_2] = identifier[nsplit] [ literal[int] ]. identifier[lower_bound] identifier[ub_2] = identifier[nsplit] [ literal[int] ]. identifier[upper_bound] identifier[lb_2] = identifier[self] . identifier[_unsigned_to_signed] ( identifier[lb_2] , identifier[self] . identifier[bits] ) identifier[ub_2] = identifier[self] . identifier[_unsigned_to_signed] ( identifier[ub_2] , identifier[self] . identifier[bits] ) keyword[return] [( identifier[lb_1] , identifier[ub_1] ),( identifier[lb_2] , identifier[ub_2] )] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def _signed_bounds(self): """ Get lower bound and upper bound for `self` in signed arithmetic. :return: a list of (lower_bound, upper_bound) tuples """ nsplit = self._nsplit() if len(nsplit) == 1: lb = nsplit[0].lower_bound ub = nsplit[0].upper_bound lb = self._unsigned_to_signed(lb, self.bits) ub = self._unsigned_to_signed(ub, self.bits) return [(lb, ub)] # depends on [control=['if'], data=[]] elif len(nsplit) == 2: # nsplit[0] is on the left hemisphere, and nsplit[1] is on the right hemisphere # The left one lb_1 = nsplit[0].lower_bound ub_1 = nsplit[0].upper_bound # The right one lb_2 = nsplit[1].lower_bound ub_2 = nsplit[1].upper_bound # Then convert them to negative numbers lb_2 = self._unsigned_to_signed(lb_2, self.bits) ub_2 = self._unsigned_to_signed(ub_2, self.bits) return [(lb_1, ub_1), (lb_2, ub_2)] # depends on [control=['if'], data=[]] else: raise Exception('WTF')
def marshal_with_model(model, excludes=None, only=None, extends=None): """With this decorator, you can return ORM model instance, or ORM query in view function directly. We'll transform these objects to standard python data structures, like Flask-RESTFul's `marshal_with` decorator. And, you don't need define fields at all. You can specific columns to be returned, by `excludes` or `only` parameter. (Don't use these tow parameters at the same time, otherwise only `excludes` parameter will be used.) If you want return fields that outside of model, or overwrite the type of some fields, use `extends` parameter to specify them. Notice: this function only support `Flask-SQLAlchemy` Example: class Student(db.Model): id = Column(Integer, primary_key=True) name = Column(String(100)) age = Column(Integer) class SomeApi(Resource): @marshal_with_model(Student, excludes=['id']) def get(self): return Student.query # response: [{"name": "student_a", "age": "16"}, {"name": "student_b", "age": 18}] class AnotherApi(Resource): @marshal_with_model(Student, extends={"nice_guy": fields.Boolean, "age": fields.String}) def get(self): student = Student.query.get(1) student.nice_guy = True student.age = "young" if student.age < 18 else "old" # transform int field to string return student """ if isinstance(excludes, six.string_types): excludes = [excludes] if excludes and only: only = None elif isinstance(only, six.string_types): only = [only] field_definition = {} for col in model.__table__.columns: if only: if col.name not in only: continue elif excludes and col.name in excludes: continue field_definition[col.name] = _type_map[col.type.python_type.__name__] if extends is not None: for k, v in extends.items(): field_definition[k] = v def decorated(f): @wraps(f) @_marshal_with(field_definition) def wrapper(*args, **kwargs): result = f(*args, **kwargs) return result if not _fields.is_indexable_but_not_string(result) else [v for v in result] return wrapper return decorated
def function[marshal_with_model, parameter[model, excludes, only, extends]]: constant[With this decorator, you can return ORM model instance, or ORM query in view function directly. We'll transform these objects to standard python data structures, like Flask-RESTFul's `marshal_with` decorator. And, you don't need define fields at all. You can specific columns to be returned, by `excludes` or `only` parameter. (Don't use these tow parameters at the same time, otherwise only `excludes` parameter will be used.) If you want return fields that outside of model, or overwrite the type of some fields, use `extends` parameter to specify them. Notice: this function only support `Flask-SQLAlchemy` Example: class Student(db.Model): id = Column(Integer, primary_key=True) name = Column(String(100)) age = Column(Integer) class SomeApi(Resource): @marshal_with_model(Student, excludes=['id']) def get(self): return Student.query # response: [{"name": "student_a", "age": "16"}, {"name": "student_b", "age": 18}] class AnotherApi(Resource): @marshal_with_model(Student, extends={"nice_guy": fields.Boolean, "age": fields.String}) def get(self): student = Student.query.get(1) student.nice_guy = True student.age = "young" if student.age < 18 else "old" # transform int field to string return student ] if call[name[isinstance], parameter[name[excludes], name[six].string_types]] begin[:] variable[excludes] assign[=] list[[<ast.Name object at 0x7da18f00d0c0>]] if <ast.BoolOp object at 0x7da18f00e560> begin[:] variable[only] assign[=] constant[None] variable[field_definition] assign[=] dictionary[[], []] for taget[name[col]] in starred[name[model].__table__.columns] begin[:] if name[only] begin[:] if compare[name[col].name <ast.NotIn object at 0x7da2590d7190> name[only]] begin[:] continue call[name[field_definition]][name[col].name] assign[=] call[name[_type_map]][name[col].type.python_type.__name__] if compare[name[extends] is_not constant[None]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b28209d0>, <ast.Name object at 0x7da1b28207c0>]]] in starred[call[name[extends].items, parameter[]]] begin[:] call[name[field_definition]][name[k]] assign[=] name[v] def function[decorated, parameter[f]]: def function[wrapper, parameter[]]: variable[result] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da20c76e0b0>]] return[<ast.IfExp object at 0x7da20c76c9d0>] return[name[wrapper]] return[name[decorated]]
keyword[def] identifier[marshal_with_model] ( identifier[model] , identifier[excludes] = keyword[None] , identifier[only] = keyword[None] , identifier[extends] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[excludes] , identifier[six] . identifier[string_types] ): identifier[excludes] =[ identifier[excludes] ] keyword[if] identifier[excludes] keyword[and] identifier[only] : identifier[only] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[only] , identifier[six] . identifier[string_types] ): identifier[only] =[ identifier[only] ] identifier[field_definition] ={} keyword[for] identifier[col] keyword[in] identifier[model] . identifier[__table__] . identifier[columns] : keyword[if] identifier[only] : keyword[if] identifier[col] . identifier[name] keyword[not] keyword[in] identifier[only] : keyword[continue] keyword[elif] identifier[excludes] keyword[and] identifier[col] . identifier[name] keyword[in] identifier[excludes] : keyword[continue] identifier[field_definition] [ identifier[col] . identifier[name] ]= identifier[_type_map] [ identifier[col] . identifier[type] . identifier[python_type] . identifier[__name__] ] keyword[if] identifier[extends] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[k] , identifier[v] keyword[in] identifier[extends] . identifier[items] (): identifier[field_definition] [ identifier[k] ]= identifier[v] keyword[def] identifier[decorated] ( identifier[f] ): @ identifier[wraps] ( identifier[f] ) @ identifier[_marshal_with] ( identifier[field_definition] ) keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): identifier[result] = identifier[f] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[result] keyword[if] keyword[not] identifier[_fields] . identifier[is_indexable_but_not_string] ( identifier[result] ) keyword[else] [ identifier[v] keyword[for] identifier[v] keyword[in] identifier[result] ] keyword[return] identifier[wrapper] keyword[return] identifier[decorated]
def marshal_with_model(model, excludes=None, only=None, extends=None): """With this decorator, you can return ORM model instance, or ORM query in view function directly. We'll transform these objects to standard python data structures, like Flask-RESTFul's `marshal_with` decorator. And, you don't need define fields at all. You can specific columns to be returned, by `excludes` or `only` parameter. (Don't use these tow parameters at the same time, otherwise only `excludes` parameter will be used.) If you want return fields that outside of model, or overwrite the type of some fields, use `extends` parameter to specify them. Notice: this function only support `Flask-SQLAlchemy` Example: class Student(db.Model): id = Column(Integer, primary_key=True) name = Column(String(100)) age = Column(Integer) class SomeApi(Resource): @marshal_with_model(Student, excludes=['id']) def get(self): return Student.query # response: [{"name": "student_a", "age": "16"}, {"name": "student_b", "age": 18}] class AnotherApi(Resource): @marshal_with_model(Student, extends={"nice_guy": fields.Boolean, "age": fields.String}) def get(self): student = Student.query.get(1) student.nice_guy = True student.age = "young" if student.age < 18 else "old" # transform int field to string return student """ if isinstance(excludes, six.string_types): excludes = [excludes] # depends on [control=['if'], data=[]] if excludes and only: only = None # depends on [control=['if'], data=[]] elif isinstance(only, six.string_types): only = [only] # depends on [control=['if'], data=[]] field_definition = {} for col in model.__table__.columns: if only: if col.name not in only: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif excludes and col.name in excludes: continue # depends on [control=['if'], data=[]] field_definition[col.name] = _type_map[col.type.python_type.__name__] # depends on [control=['for'], data=['col']] if extends is not None: for (k, v) in extends.items(): field_definition[k] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['extends']] def decorated(f): @wraps(f) @_marshal_with(field_definition) def wrapper(*args, **kwargs): result = f(*args, **kwargs) return result if not _fields.is_indexable_but_not_string(result) else [v for v in result] return wrapper return decorated
def session_to_hour(timestamp): """:param timestamp: as string in YYYYMMDDHHmmSS format :return string in YYYYMMDDHH format""" t = datetime.strptime(timestamp, SYNERGY_SESSION_PATTERN) return t.strftime(SYNERGY_HOURLY_PATTERN)
def function[session_to_hour, parameter[timestamp]]: constant[:param timestamp: as string in YYYYMMDDHHmmSS format :return string in YYYYMMDDHH format] variable[t] assign[=] call[name[datetime].strptime, parameter[name[timestamp], name[SYNERGY_SESSION_PATTERN]]] return[call[name[t].strftime, parameter[name[SYNERGY_HOURLY_PATTERN]]]]
keyword[def] identifier[session_to_hour] ( identifier[timestamp] ): literal[string] identifier[t] = identifier[datetime] . identifier[strptime] ( identifier[timestamp] , identifier[SYNERGY_SESSION_PATTERN] ) keyword[return] identifier[t] . identifier[strftime] ( identifier[SYNERGY_HOURLY_PATTERN] )
def session_to_hour(timestamp): """:param timestamp: as string in YYYYMMDDHHmmSS format :return string in YYYYMMDDHH format""" t = datetime.strptime(timestamp, SYNERGY_SESSION_PATTERN) return t.strftime(SYNERGY_HOURLY_PATTERN)
def search_webhook_for_facets(self, **kwargs): # noqa: E501 """Lists the values of one or more facets over the customer's webhooks # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_webhook_for_facets(async_req=True) >>> result = thread.get() :param async_req bool :param FacetsSearchRequestContainer body: :return: ResponseContainerFacetsResponseContainer If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_webhook_for_facets_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_webhook_for_facets_with_http_info(**kwargs) # noqa: E501 return data
def function[search_webhook_for_facets, parameter[self]]: constant[Lists the values of one or more facets over the customer's webhooks # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_webhook_for_facets(async_req=True) >>> result = thread.get() :param async_req bool :param FacetsSearchRequestContainer body: :return: ResponseContainerFacetsResponseContainer If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].search_webhook_for_facets_with_http_info, parameter[]]]
keyword[def] identifier[search_webhook_for_facets] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[search_webhook_for_facets_with_http_info] (** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[search_webhook_for_facets_with_http_info] (** identifier[kwargs] ) keyword[return] identifier[data]
def search_webhook_for_facets(self, **kwargs): # noqa: E501 "Lists the values of one or more facets over the customer's webhooks # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.search_webhook_for_facets(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param FacetsSearchRequestContainer body:\n :return: ResponseContainerFacetsResponseContainer\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_webhook_for_facets_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.search_webhook_for_facets_with_http_info(**kwargs) # noqa: E501 return data
def create_local_copy(cookie_file): """Make a local copy of the sqlite cookie database and return the new filename. This is necessary in case this database is still being written to while the user browses to avoid sqlite locking errors. """ # if type of cookie_file is a list, use the first element in the list if isinstance(cookie_file, list): cookie_file = cookie_file[0] # check if cookie file exists if os.path.exists(cookie_file): # copy to random name in tmp folder tmp_cookie_file = tempfile.NamedTemporaryFile(suffix='.sqlite').name open(tmp_cookie_file, 'wb').write(open(cookie_file, 'rb').read()) return tmp_cookie_file else: raise BrowserCookieError('Can not find cookie file at: ' + cookie_file)
def function[create_local_copy, parameter[cookie_file]]: constant[Make a local copy of the sqlite cookie database and return the new filename. This is necessary in case this database is still being written to while the user browses to avoid sqlite locking errors. ] if call[name[isinstance], parameter[name[cookie_file], name[list]]] begin[:] variable[cookie_file] assign[=] call[name[cookie_file]][constant[0]] if call[name[os].path.exists, parameter[name[cookie_file]]] begin[:] variable[tmp_cookie_file] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]].name call[call[name[open], parameter[name[tmp_cookie_file], constant[wb]]].write, parameter[call[call[name[open], parameter[name[cookie_file], constant[rb]]].read, parameter[]]]] return[name[tmp_cookie_file]]
keyword[def] identifier[create_local_copy] ( identifier[cookie_file] ): literal[string] keyword[if] identifier[isinstance] ( identifier[cookie_file] , identifier[list] ): identifier[cookie_file] = identifier[cookie_file] [ literal[int] ] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[cookie_file] ): identifier[tmp_cookie_file] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[suffix] = literal[string] ). identifier[name] identifier[open] ( identifier[tmp_cookie_file] , literal[string] ). identifier[write] ( identifier[open] ( identifier[cookie_file] , literal[string] ). identifier[read] ()) keyword[return] identifier[tmp_cookie_file] keyword[else] : keyword[raise] identifier[BrowserCookieError] ( literal[string] + identifier[cookie_file] )
def create_local_copy(cookie_file): """Make a local copy of the sqlite cookie database and return the new filename. This is necessary in case this database is still being written to while the user browses to avoid sqlite locking errors. """ # if type of cookie_file is a list, use the first element in the list if isinstance(cookie_file, list): cookie_file = cookie_file[0] # depends on [control=['if'], data=[]] # check if cookie file exists if os.path.exists(cookie_file): # copy to random name in tmp folder tmp_cookie_file = tempfile.NamedTemporaryFile(suffix='.sqlite').name open(tmp_cookie_file, 'wb').write(open(cookie_file, 'rb').read()) return tmp_cookie_file # depends on [control=['if'], data=[]] else: raise BrowserCookieError('Can not find cookie file at: ' + cookie_file)
def _statuscode2string(status_code): """Return a short message for a CIM status code.""" try: s = _STATUSCODE2STRING[status_code] except KeyError: s = _format("Invalid status code {0}", status_code) return s
def function[_statuscode2string, parameter[status_code]]: constant[Return a short message for a CIM status code.] <ast.Try object at 0x7da1b0ef48b0> return[name[s]]
keyword[def] identifier[_statuscode2string] ( identifier[status_code] ): literal[string] keyword[try] : identifier[s] = identifier[_STATUSCODE2STRING] [ identifier[status_code] ] keyword[except] identifier[KeyError] : identifier[s] = identifier[_format] ( literal[string] , identifier[status_code] ) keyword[return] identifier[s]
def _statuscode2string(status_code): """Return a short message for a CIM status code.""" try: s = _STATUSCODE2STRING[status_code] # depends on [control=['try'], data=[]] except KeyError: s = _format('Invalid status code {0}', status_code) # depends on [control=['except'], data=[]] return s
def tempallow(ip=None, ttl=None, port=None, direction=None, comment=''): ''' Add an rule to the temporary ip allow list. See :func:`_access_rule`. 1- Add an IP: CLI Example: .. code-block:: bash salt '*' csf.tempallow 127.0.0.1 3600 port=22 direction='in' comment='# Temp dev ssh access' ''' return _tmp_access_rule('tempallow', ip, ttl, port, direction, comment)
def function[tempallow, parameter[ip, ttl, port, direction, comment]]: constant[ Add an rule to the temporary ip allow list. See :func:`_access_rule`. 1- Add an IP: CLI Example: .. code-block:: bash salt '*' csf.tempallow 127.0.0.1 3600 port=22 direction='in' comment='# Temp dev ssh access' ] return[call[name[_tmp_access_rule], parameter[constant[tempallow], name[ip], name[ttl], name[port], name[direction], name[comment]]]]
keyword[def] identifier[tempallow] ( identifier[ip] = keyword[None] , identifier[ttl] = keyword[None] , identifier[port] = keyword[None] , identifier[direction] = keyword[None] , identifier[comment] = literal[string] ): literal[string] keyword[return] identifier[_tmp_access_rule] ( literal[string] , identifier[ip] , identifier[ttl] , identifier[port] , identifier[direction] , identifier[comment] )
def tempallow(ip=None, ttl=None, port=None, direction=None, comment=''): """ Add an rule to the temporary ip allow list. See :func:`_access_rule`. 1- Add an IP: CLI Example: .. code-block:: bash salt '*' csf.tempallow 127.0.0.1 3600 port=22 direction='in' comment='# Temp dev ssh access' """ return _tmp_access_rule('tempallow', ip, ttl, port, direction, comment)
def set_data(self, xdata=[1,2,3,4,5], ydata=[1.7,2,3,4,3], eydata=None, **kwargs): """ This will handle the different types of supplied data and put everything in a standard format for processing. Parameters ---------- xdata, ydata These can be a single array of data or a list of data arrays. eydata=None Error bars for ydata. These can be None (for guessed error) or data / numbers matching the dimensionality of xdata and ydata Notes ----- xdata, ydata, and eydata can all be scripts or lists of scripts that produce arrays. Any python code will work, and the scripts automatically know about all numpy functions, the guessed parameters, and the data itself (as x, y, ey). However, the scripts are executed in order -- xdata, ydata, and eydata -- so the xdata script cannot know about ydata or eydata, the ydata script cannot know about eydata, and the eydata script knows about xdata and ydata. Example: xdata = [1,2,3,4,5] ydata = [[1,2,1,2,1], 'cos(x[0])'] eydata = ['arctan(y[1])*a+b', 5] In this example, there will be two data sets to fit (so there better be two functions!), they will share the same xdata, the second ydata set will be the array cos([1,2,3,4,5]) (note since there are multiple data sets assumed (always), you have to select the data set with an index on x and y), the error on the first data set will be this weird functional dependence on the second ydata set and fit parameters a and b (note, if a and b are not fit parameters, then you must send them as keyword arguments so that they are defined) and the second data set error bar will be a constant, 5. Note this function is "somewhat" smart about reshaping the input data to ease life a bit, but it can't handle ambiguities. If you want to play it safe, supply lists for all three arguments that match in dimensionality. results can be obtained by calling get_data() Additional optional keyword arguments are added to the globals for script evaluation. """ # SET UP DATA SETS TO MATCH EACH OTHER AND NUMBER OF FUNCTIONS # At this stage: # xdata, ydata 'script', [1,2,3], [[1,2,3],'script'], ['script', [1,2,3]] # eydata, exdata 'script', [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], 3, [3,[1,2,3]], None # if xdata, ydata, or eydata are bare scripts, make them into lists if type(xdata) is str: xdata = [xdata] if type(ydata) is str: ydata = [ydata] if type(eydata) is str or _s.fun.is_a_number(eydata) or eydata is None: eydata = [eydata] #if type(exdata) is str or _s.fun.is_a_number(exdata) or exdata is None: exdata = [exdata] # xdata and ydata ['script'], [1,2,3], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script'], [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None] # if the first element of data is a number, then this is a normal array if _s.fun.is_a_number(xdata[0]): xdata = [xdata] if _s.fun.is_a_number(ydata[0]): ydata = [ydata] # xdata and ydata ['script'], [[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script'], [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None] # if the first element of eydata is a number, this could also just be an error bar value # Note: there is some ambiguity here, if the number of data sets equals the number of data points! if _s.fun.is_a_number(eydata[0]) and len(eydata) == len(ydata[0]): eydata = [eydata] #if _s.fun.is_a_number(exdata[0]) and len(exdata) == len(xdata[0]): exdata = [exdata] # xdata and ydata ['script'], [[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script'], [[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None] # Inflate the x, ex, and ey data sets to match the ydata sets while len(xdata) < len(ydata): xdata .append( xdata[0]) while len(ydata) < len(xdata): ydata .append( ydata[0]) #while len(exdata) < len(xdata): exdata.append(exdata[0]) while len(eydata) < len(ydata): eydata.append(eydata[0]) # make sure these lists are the same length as the number of functions while len(ydata) < len(self.f): ydata.append(ydata[0]) while len(xdata) < len(self.f): xdata.append(xdata[0]) while len(eydata) < len(self.f): eydata.append(eydata[0]) #while len(exdata) < len(self.f): exdata.append(exdata[0]) # xdata and ydata ['script','script'], [[1,2,3],[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script','script'], [[1,1,1],[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3,3], [3,[1,2,3]], [None,None] # Clean up exdata. If any element isn't None, the other None elements need # to be set to 0 so that ODR works. # if not exdata.count(None) == len(exdata): # # Search for and replace all None's with 0 # for n in range(len(exdata)): # if exdata[n] == None: exdata[n] = 0 # # store the data, script, or whatever it is! self._set_xdata = xdata self._set_ydata = ydata self._set_eydata = eydata #self._set_exdata = exdata self._set_data_globals.update(kwargs) # set the eyscale to 1 for each data set self['scale_eydata'] = [1.0]*len(self._set_xdata) #self['scale_exdata'] = [1.0]*len(self._set_xdata) # Update the settings so they match the number of data sets. for k in self._settings.keys(): self[k] = self[k] # Plot if necessary if self['autoplot']: self.plot() return self
def function[set_data, parameter[self, xdata, ydata, eydata]]: constant[ This will handle the different types of supplied data and put everything in a standard format for processing. Parameters ---------- xdata, ydata These can be a single array of data or a list of data arrays. eydata=None Error bars for ydata. These can be None (for guessed error) or data / numbers matching the dimensionality of xdata and ydata Notes ----- xdata, ydata, and eydata can all be scripts or lists of scripts that produce arrays. Any python code will work, and the scripts automatically know about all numpy functions, the guessed parameters, and the data itself (as x, y, ey). However, the scripts are executed in order -- xdata, ydata, and eydata -- so the xdata script cannot know about ydata or eydata, the ydata script cannot know about eydata, and the eydata script knows about xdata and ydata. Example: xdata = [1,2,3,4,5] ydata = [[1,2,1,2,1], 'cos(x[0])'] eydata = ['arctan(y[1])*a+b', 5] In this example, there will be two data sets to fit (so there better be two functions!), they will share the same xdata, the second ydata set will be the array cos([1,2,3,4,5]) (note since there are multiple data sets assumed (always), you have to select the data set with an index on x and y), the error on the first data set will be this weird functional dependence on the second ydata set and fit parameters a and b (note, if a and b are not fit parameters, then you must send them as keyword arguments so that they are defined) and the second data set error bar will be a constant, 5. Note this function is "somewhat" smart about reshaping the input data to ease life a bit, but it can't handle ambiguities. If you want to play it safe, supply lists for all three arguments that match in dimensionality. results can be obtained by calling get_data() Additional optional keyword arguments are added to the globals for script evaluation. ] if compare[call[name[type], parameter[name[xdata]]] is name[str]] begin[:] variable[xdata] assign[=] list[[<ast.Name object at 0x7da18eb54ca0>]] if compare[call[name[type], parameter[name[ydata]]] is name[str]] begin[:] variable[ydata] assign[=] list[[<ast.Name object at 0x7da18eb57820>]] if <ast.BoolOp object at 0x7da18eb54310> begin[:] variable[eydata] assign[=] list[[<ast.Name object at 0x7da18eb55ab0>]] if call[name[_s].fun.is_a_number, parameter[call[name[xdata]][constant[0]]]] begin[:] variable[xdata] assign[=] list[[<ast.Name object at 0x7da18eb57700>]] if call[name[_s].fun.is_a_number, parameter[call[name[ydata]][constant[0]]]] begin[:] variable[ydata] assign[=] list[[<ast.Name object at 0x7da18eb54ac0>]] if <ast.BoolOp object at 0x7da18eb56740> begin[:] variable[eydata] assign[=] list[[<ast.Name object at 0x7da18eb56ef0>]] while compare[call[name[len], parameter[name[xdata]]] less[<] call[name[len], parameter[name[ydata]]]] begin[:] call[name[xdata].append, parameter[call[name[xdata]][constant[0]]]] while compare[call[name[len], parameter[name[ydata]]] less[<] call[name[len], parameter[name[xdata]]]] begin[:] call[name[ydata].append, parameter[call[name[ydata]][constant[0]]]] while compare[call[name[len], parameter[name[eydata]]] less[<] call[name[len], parameter[name[ydata]]]] begin[:] call[name[eydata].append, parameter[call[name[eydata]][constant[0]]]] while compare[call[name[len], parameter[name[ydata]]] less[<] call[name[len], parameter[name[self].f]]] begin[:] call[name[ydata].append, parameter[call[name[ydata]][constant[0]]]] while compare[call[name[len], parameter[name[xdata]]] less[<] call[name[len], parameter[name[self].f]]] begin[:] call[name[xdata].append, parameter[call[name[xdata]][constant[0]]]] while compare[call[name[len], parameter[name[eydata]]] less[<] call[name[len], parameter[name[self].f]]] begin[:] call[name[eydata].append, parameter[call[name[eydata]][constant[0]]]] name[self]._set_xdata assign[=] name[xdata] name[self]._set_ydata assign[=] name[ydata] name[self]._set_eydata assign[=] name[eydata] call[name[self]._set_data_globals.update, parameter[name[kwargs]]] call[name[self]][constant[scale_eydata]] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18f09c1f0>]] * call[name[len], parameter[name[self]._set_xdata]]] for taget[name[k]] in starred[call[name[self]._settings.keys, parameter[]]] begin[:] call[name[self]][name[k]] assign[=] call[name[self]][name[k]] if call[name[self]][constant[autoplot]] begin[:] call[name[self].plot, parameter[]] return[name[self]]
keyword[def] identifier[set_data] ( identifier[self] , identifier[xdata] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ], identifier[ydata] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ], identifier[eydata] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[type] ( identifier[xdata] ) keyword[is] identifier[str] : identifier[xdata] =[ identifier[xdata] ] keyword[if] identifier[type] ( identifier[ydata] ) keyword[is] identifier[str] : identifier[ydata] =[ identifier[ydata] ] keyword[if] identifier[type] ( identifier[eydata] ) keyword[is] identifier[str] keyword[or] identifier[_s] . identifier[fun] . identifier[is_a_number] ( identifier[eydata] ) keyword[or] identifier[eydata] keyword[is] keyword[None] : identifier[eydata] =[ identifier[eydata] ] keyword[if] identifier[_s] . identifier[fun] . identifier[is_a_number] ( identifier[xdata] [ literal[int] ]): identifier[xdata] =[ identifier[xdata] ] keyword[if] identifier[_s] . identifier[fun] . identifier[is_a_number] ( identifier[ydata] [ literal[int] ]): identifier[ydata] =[ identifier[ydata] ] keyword[if] identifier[_s] . identifier[fun] . identifier[is_a_number] ( identifier[eydata] [ literal[int] ]) keyword[and] identifier[len] ( identifier[eydata] )== identifier[len] ( identifier[ydata] [ literal[int] ]): identifier[eydata] =[ identifier[eydata] ] keyword[while] identifier[len] ( identifier[xdata] )< identifier[len] ( identifier[ydata] ): identifier[xdata] . identifier[append] ( identifier[xdata] [ literal[int] ]) keyword[while] identifier[len] ( identifier[ydata] )< identifier[len] ( identifier[xdata] ): identifier[ydata] . identifier[append] ( identifier[ydata] [ literal[int] ]) keyword[while] identifier[len] ( identifier[eydata] )< identifier[len] ( identifier[ydata] ): identifier[eydata] . identifier[append] ( identifier[eydata] [ literal[int] ]) keyword[while] identifier[len] ( identifier[ydata] )< identifier[len] ( identifier[self] . identifier[f] ): identifier[ydata] . identifier[append] ( identifier[ydata] [ literal[int] ]) keyword[while] identifier[len] ( identifier[xdata] )< identifier[len] ( identifier[self] . identifier[f] ): identifier[xdata] . identifier[append] ( identifier[xdata] [ literal[int] ]) keyword[while] identifier[len] ( identifier[eydata] )< identifier[len] ( identifier[self] . identifier[f] ): identifier[eydata] . identifier[append] ( identifier[eydata] [ literal[int] ]) identifier[self] . identifier[_set_xdata] = identifier[xdata] identifier[self] . identifier[_set_ydata] = identifier[ydata] identifier[self] . identifier[_set_eydata] = identifier[eydata] identifier[self] . identifier[_set_data_globals] . identifier[update] ( identifier[kwargs] ) identifier[self] [ literal[string] ]=[ literal[int] ]* identifier[len] ( identifier[self] . identifier[_set_xdata] ) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_settings] . identifier[keys] (): identifier[self] [ identifier[k] ]= identifier[self] [ identifier[k] ] keyword[if] identifier[self] [ literal[string] ]: identifier[self] . identifier[plot] () keyword[return] identifier[self]
def set_data(self, xdata=[1, 2, 3, 4, 5], ydata=[1.7, 2, 3, 4, 3], eydata=None, **kwargs): """ This will handle the different types of supplied data and put everything in a standard format for processing. Parameters ---------- xdata, ydata These can be a single array of data or a list of data arrays. eydata=None Error bars for ydata. These can be None (for guessed error) or data / numbers matching the dimensionality of xdata and ydata Notes ----- xdata, ydata, and eydata can all be scripts or lists of scripts that produce arrays. Any python code will work, and the scripts automatically know about all numpy functions, the guessed parameters, and the data itself (as x, y, ey). However, the scripts are executed in order -- xdata, ydata, and eydata -- so the xdata script cannot know about ydata or eydata, the ydata script cannot know about eydata, and the eydata script knows about xdata and ydata. Example: xdata = [1,2,3,4,5] ydata = [[1,2,1,2,1], 'cos(x[0])'] eydata = ['arctan(y[1])*a+b', 5] In this example, there will be two data sets to fit (so there better be two functions!), they will share the same xdata, the second ydata set will be the array cos([1,2,3,4,5]) (note since there are multiple data sets assumed (always), you have to select the data set with an index on x and y), the error on the first data set will be this weird functional dependence on the second ydata set and fit parameters a and b (note, if a and b are not fit parameters, then you must send them as keyword arguments so that they are defined) and the second data set error bar will be a constant, 5. Note this function is "somewhat" smart about reshaping the input data to ease life a bit, but it can't handle ambiguities. If you want to play it safe, supply lists for all three arguments that match in dimensionality. results can be obtained by calling get_data() Additional optional keyword arguments are added to the globals for script evaluation. """ # SET UP DATA SETS TO MATCH EACH OTHER AND NUMBER OF FUNCTIONS # At this stage: # xdata, ydata 'script', [1,2,3], [[1,2,3],'script'], ['script', [1,2,3]] # eydata, exdata 'script', [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], 3, [3,[1,2,3]], None # if xdata, ydata, or eydata are bare scripts, make them into lists if type(xdata) is str: xdata = [xdata] # depends on [control=['if'], data=[]] if type(ydata) is str: ydata = [ydata] # depends on [control=['if'], data=[]] if type(eydata) is str or _s.fun.is_a_number(eydata) or eydata is None: eydata = [eydata] # depends on [control=['if'], data=[]] #if type(exdata) is str or _s.fun.is_a_number(exdata) or exdata is None: exdata = [exdata] # xdata and ydata ['script'], [1,2,3], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script'], [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None] # if the first element of data is a number, then this is a normal array if _s.fun.is_a_number(xdata[0]): xdata = [xdata] # depends on [control=['if'], data=[]] if _s.fun.is_a_number(ydata[0]): ydata = [ydata] # depends on [control=['if'], data=[]] # xdata and ydata ['script'], [[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script'], [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None] # if the first element of eydata is a number, this could also just be an error bar value # Note: there is some ambiguity here, if the number of data sets equals the number of data points! if _s.fun.is_a_number(eydata[0]) and len(eydata) == len(ydata[0]): eydata = [eydata] # depends on [control=['if'], data=[]] #if _s.fun.is_a_number(exdata[0]) and len(exdata) == len(xdata[0]): exdata = [exdata] # xdata and ydata ['script'], [[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script'], [[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None] # Inflate the x, ex, and ey data sets to match the ydata sets while len(xdata) < len(ydata): xdata.append(xdata[0]) # depends on [control=['while'], data=[]] while len(ydata) < len(xdata): ydata.append(ydata[0]) # depends on [control=['while'], data=[]] #while len(exdata) < len(xdata): exdata.append(exdata[0]) while len(eydata) < len(ydata): eydata.append(eydata[0]) # depends on [control=['while'], data=[]] # make sure these lists are the same length as the number of functions while len(ydata) < len(self.f): ydata.append(ydata[0]) # depends on [control=['while'], data=[]] while len(xdata) < len(self.f): xdata.append(xdata[0]) # depends on [control=['while'], data=[]] while len(eydata) < len(self.f): eydata.append(eydata[0]) # depends on [control=['while'], data=[]] #while len(exdata) < len(self.f): exdata.append(exdata[0]) # xdata and ydata ['script','script'], [[1,2,3],[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]] # eydata ['script','script'], [[1,1,1],[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3,3], [3,[1,2,3]], [None,None] # Clean up exdata. If any element isn't None, the other None elements need # to be set to 0 so that ODR works. # if not exdata.count(None) == len(exdata): # # Search for and replace all None's with 0 # for n in range(len(exdata)): # if exdata[n] == None: exdata[n] = 0 # # store the data, script, or whatever it is! self._set_xdata = xdata self._set_ydata = ydata self._set_eydata = eydata #self._set_exdata = exdata self._set_data_globals.update(kwargs) # set the eyscale to 1 for each data set self['scale_eydata'] = [1.0] * len(self._set_xdata) #self['scale_exdata'] = [1.0]*len(self._set_xdata) # Update the settings so they match the number of data sets. for k in self._settings.keys(): self[k] = self[k] # depends on [control=['for'], data=['k']] # Plot if necessary if self['autoplot']: self.plot() # depends on [control=['if'], data=[]] return self
def fromlist(items, accessor=None, index=None, labels=None, dtype=None, npartitions=None, engine=None): """ Load series data from a list with an optional accessor function. Will call accessor function on each item from the list, providing a generic interface for data loading. Parameters ---------- items : list A list of items to load. accessor : function, optional, default = None A function to apply to each item in the list during loading. index : array, optional, default = None Index for records, if not provided will use (0,1,...,N) where N is the length of each record. labels : array, optional, default = None Labels for records. If provided, should have same length as items. dtype : string, default = None Data numerical type (if provided will avoid check) npartitions : int, default = None Number of partitions for parallelization (Spark only) engine : object, default = None Computational engine (e.g. a SparkContext for Spark) """ if spark and isinstance(engine, spark): if dtype is None: dtype = accessor(items[0]).dtype if accessor else items[0].dtype nrecords = len(items) keys = map(lambda k: (k, ), range(len(items))) if not npartitions: npartitions = engine.defaultParallelism items = zip(keys, items) rdd = engine.parallelize(items, npartitions) if accessor: rdd = rdd.mapValues(accessor) return fromrdd(rdd, nrecords=nrecords, index=index, labels=labels, dtype=dtype, ordered=True) else: if accessor: items = [accessor(i) for i in items] return fromarray(items, index=index, labels=labels)
def function[fromlist, parameter[items, accessor, index, labels, dtype, npartitions, engine]]: constant[ Load series data from a list with an optional accessor function. Will call accessor function on each item from the list, providing a generic interface for data loading. Parameters ---------- items : list A list of items to load. accessor : function, optional, default = None A function to apply to each item in the list during loading. index : array, optional, default = None Index for records, if not provided will use (0,1,...,N) where N is the length of each record. labels : array, optional, default = None Labels for records. If provided, should have same length as items. dtype : string, default = None Data numerical type (if provided will avoid check) npartitions : int, default = None Number of partitions for parallelization (Spark only) engine : object, default = None Computational engine (e.g. a SparkContext for Spark) ] if <ast.BoolOp object at 0x7da18f09e290> begin[:] if compare[name[dtype] is constant[None]] begin[:] variable[dtype] assign[=] <ast.IfExp object at 0x7da18f09d6f0> variable[nrecords] assign[=] call[name[len], parameter[name[items]]] variable[keys] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18f09ca60>, call[name[range], parameter[call[name[len], parameter[name[items]]]]]]] if <ast.UnaryOp object at 0x7da18f09df30> begin[:] variable[npartitions] assign[=] name[engine].defaultParallelism variable[items] assign[=] call[name[zip], parameter[name[keys], name[items]]] variable[rdd] assign[=] call[name[engine].parallelize, parameter[name[items], name[npartitions]]] if name[accessor] begin[:] variable[rdd] assign[=] call[name[rdd].mapValues, parameter[name[accessor]]] return[call[name[fromrdd], parameter[name[rdd]]]]
keyword[def] identifier[fromlist] ( identifier[items] , identifier[accessor] = keyword[None] , identifier[index] = keyword[None] , identifier[labels] = keyword[None] , identifier[dtype] = keyword[None] , identifier[npartitions] = keyword[None] , identifier[engine] = keyword[None] ): literal[string] keyword[if] identifier[spark] keyword[and] identifier[isinstance] ( identifier[engine] , identifier[spark] ): keyword[if] identifier[dtype] keyword[is] keyword[None] : identifier[dtype] = identifier[accessor] ( identifier[items] [ literal[int] ]). identifier[dtype] keyword[if] identifier[accessor] keyword[else] identifier[items] [ literal[int] ]. identifier[dtype] identifier[nrecords] = identifier[len] ( identifier[items] ) identifier[keys] = identifier[map] ( keyword[lambda] identifier[k] :( identifier[k] ,), identifier[range] ( identifier[len] ( identifier[items] ))) keyword[if] keyword[not] identifier[npartitions] : identifier[npartitions] = identifier[engine] . identifier[defaultParallelism] identifier[items] = identifier[zip] ( identifier[keys] , identifier[items] ) identifier[rdd] = identifier[engine] . identifier[parallelize] ( identifier[items] , identifier[npartitions] ) keyword[if] identifier[accessor] : identifier[rdd] = identifier[rdd] . identifier[mapValues] ( identifier[accessor] ) keyword[return] identifier[fromrdd] ( identifier[rdd] , identifier[nrecords] = identifier[nrecords] , identifier[index] = identifier[index] , identifier[labels] = identifier[labels] , identifier[dtype] = identifier[dtype] , identifier[ordered] = keyword[True] ) keyword[else] : keyword[if] identifier[accessor] : identifier[items] =[ identifier[accessor] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[items] ] keyword[return] identifier[fromarray] ( identifier[items] , identifier[index] = identifier[index] , identifier[labels] = identifier[labels] )
def fromlist(items, accessor=None, index=None, labels=None, dtype=None, npartitions=None, engine=None): """ Load series data from a list with an optional accessor function. Will call accessor function on each item from the list, providing a generic interface for data loading. Parameters ---------- items : list A list of items to load. accessor : function, optional, default = None A function to apply to each item in the list during loading. index : array, optional, default = None Index for records, if not provided will use (0,1,...,N) where N is the length of each record. labels : array, optional, default = None Labels for records. If provided, should have same length as items. dtype : string, default = None Data numerical type (if provided will avoid check) npartitions : int, default = None Number of partitions for parallelization (Spark only) engine : object, default = None Computational engine (e.g. a SparkContext for Spark) """ if spark and isinstance(engine, spark): if dtype is None: dtype = accessor(items[0]).dtype if accessor else items[0].dtype # depends on [control=['if'], data=['dtype']] nrecords = len(items) keys = map(lambda k: (k,), range(len(items))) if not npartitions: npartitions = engine.defaultParallelism # depends on [control=['if'], data=[]] items = zip(keys, items) rdd = engine.parallelize(items, npartitions) if accessor: rdd = rdd.mapValues(accessor) # depends on [control=['if'], data=[]] return fromrdd(rdd, nrecords=nrecords, index=index, labels=labels, dtype=dtype, ordered=True) # depends on [control=['if'], data=[]] else: if accessor: items = [accessor(i) for i in items] # depends on [control=['if'], data=[]] return fromarray(items, index=index, labels=labels)
def open(self, path: str, mode: str='r') -> IO: """Open an IO-like object for `path`. .. note:: Mode *must* be either 'r' or 'w', as the underlying objects do not understand the full range of modes. :param path: The path to open. :param mode: The mode of the file being opened, either 'r' or 'w'. """ entry = self.path_map.get(path) if entry is None: raise FileNotFoundError() if isinstance(entry, str): with open(entry, 'rb' if mode == 'r' else mode) as source: yield source elif isinstance(entry, ZipFile): yield io.BytesIO(entry.read(path)) else: raise NotImplementedError()
def function[open, parameter[self, path, mode]]: constant[Open an IO-like object for `path`. .. note:: Mode *must* be either 'r' or 'w', as the underlying objects do not understand the full range of modes. :param path: The path to open. :param mode: The mode of the file being opened, either 'r' or 'w'. ] variable[entry] assign[=] call[name[self].path_map.get, parameter[name[path]]] if compare[name[entry] is constant[None]] begin[:] <ast.Raise object at 0x7da1b256f700> if call[name[isinstance], parameter[name[entry], name[str]]] begin[:] with call[name[open], parameter[name[entry], <ast.IfExp object at 0x7da1b256e320>]] begin[:] <ast.Yield object at 0x7da1b256d270>
keyword[def] identifier[open] ( identifier[self] , identifier[path] : identifier[str] , identifier[mode] : identifier[str] = literal[string] )-> identifier[IO] : literal[string] identifier[entry] = identifier[self] . identifier[path_map] . identifier[get] ( identifier[path] ) keyword[if] identifier[entry] keyword[is] keyword[None] : keyword[raise] identifier[FileNotFoundError] () keyword[if] identifier[isinstance] ( identifier[entry] , identifier[str] ): keyword[with] identifier[open] ( identifier[entry] , literal[string] keyword[if] identifier[mode] == literal[string] keyword[else] identifier[mode] ) keyword[as] identifier[source] : keyword[yield] identifier[source] keyword[elif] identifier[isinstance] ( identifier[entry] , identifier[ZipFile] ): keyword[yield] identifier[io] . identifier[BytesIO] ( identifier[entry] . identifier[read] ( identifier[path] )) keyword[else] : keyword[raise] identifier[NotImplementedError] ()
def open(self, path: str, mode: str='r') -> IO: """Open an IO-like object for `path`. .. note:: Mode *must* be either 'r' or 'w', as the underlying objects do not understand the full range of modes. :param path: The path to open. :param mode: The mode of the file being opened, either 'r' or 'w'. """ entry = self.path_map.get(path) if entry is None: raise FileNotFoundError() # depends on [control=['if'], data=[]] if isinstance(entry, str): with open(entry, 'rb' if mode == 'r' else mode) as source: yield source # depends on [control=['with'], data=['source']] # depends on [control=['if'], data=[]] elif isinstance(entry, ZipFile): yield io.BytesIO(entry.read(path)) # depends on [control=['if'], data=[]] else: raise NotImplementedError()
def console_fill_foreground( con: tcod.console.Console, r: Sequence[int], g: Sequence[int], b: Sequence[int], ) -> None: """Fill the foregound of a console with r,g,b. Args: con (Console): Any Console instance. r (Sequence[int]): An array of integers with a length of width*height. g (Sequence[int]): An array of integers with a length of width*height. b (Sequence[int]): An array of integers with a length of width*height. .. deprecated:: 8.4 You should assign to :any:`tcod.console.Console.fg` instead. """ if len(r) != len(g) or len(r) != len(b): raise TypeError("R, G and B must all have the same size.") if ( isinstance(r, np.ndarray) and isinstance(g, np.ndarray) and isinstance(b, np.ndarray) ): # numpy arrays, use numpy's ctypes functions r_ = np.ascontiguousarray(r, dtype=np.intc) g_ = np.ascontiguousarray(g, dtype=np.intc) b_ = np.ascontiguousarray(b, dtype=np.intc) cr = ffi.cast("int *", r_.ctypes.data) cg = ffi.cast("int *", g_.ctypes.data) cb = ffi.cast("int *", b_.ctypes.data) else: # otherwise convert using ffi arrays cr = ffi.new("int[]", r) cg = ffi.new("int[]", g) cb = ffi.new("int[]", b) lib.TCOD_console_fill_foreground(_console(con), cr, cg, cb)
def function[console_fill_foreground, parameter[con, r, g, b]]: constant[Fill the foregound of a console with r,g,b. Args: con (Console): Any Console instance. r (Sequence[int]): An array of integers with a length of width*height. g (Sequence[int]): An array of integers with a length of width*height. b (Sequence[int]): An array of integers with a length of width*height. .. deprecated:: 8.4 You should assign to :any:`tcod.console.Console.fg` instead. ] if <ast.BoolOp object at 0x7da1b2347580> begin[:] <ast.Raise object at 0x7da1b23476a0> if <ast.BoolOp object at 0x7da1b2344fd0> begin[:] variable[r_] assign[=] call[name[np].ascontiguousarray, parameter[name[r]]] variable[g_] assign[=] call[name[np].ascontiguousarray, parameter[name[g]]] variable[b_] assign[=] call[name[np].ascontiguousarray, parameter[name[b]]] variable[cr] assign[=] call[name[ffi].cast, parameter[constant[int *], name[r_].ctypes.data]] variable[cg] assign[=] call[name[ffi].cast, parameter[constant[int *], name[g_].ctypes.data]] variable[cb] assign[=] call[name[ffi].cast, parameter[constant[int *], name[b_].ctypes.data]] call[name[lib].TCOD_console_fill_foreground, parameter[call[name[_console], parameter[name[con]]], name[cr], name[cg], name[cb]]]
keyword[def] identifier[console_fill_foreground] ( identifier[con] : identifier[tcod] . identifier[console] . identifier[Console] , identifier[r] : identifier[Sequence] [ identifier[int] ], identifier[g] : identifier[Sequence] [ identifier[int] ], identifier[b] : identifier[Sequence] [ identifier[int] ], )-> keyword[None] : literal[string] keyword[if] identifier[len] ( identifier[r] )!= identifier[len] ( identifier[g] ) keyword[or] identifier[len] ( identifier[r] )!= identifier[len] ( identifier[b] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] ( identifier[isinstance] ( identifier[r] , identifier[np] . identifier[ndarray] ) keyword[and] identifier[isinstance] ( identifier[g] , identifier[np] . identifier[ndarray] ) keyword[and] identifier[isinstance] ( identifier[b] , identifier[np] . identifier[ndarray] ) ): identifier[r_] = identifier[np] . identifier[ascontiguousarray] ( identifier[r] , identifier[dtype] = identifier[np] . identifier[intc] ) identifier[g_] = identifier[np] . identifier[ascontiguousarray] ( identifier[g] , identifier[dtype] = identifier[np] . identifier[intc] ) identifier[b_] = identifier[np] . identifier[ascontiguousarray] ( identifier[b] , identifier[dtype] = identifier[np] . identifier[intc] ) identifier[cr] = identifier[ffi] . identifier[cast] ( literal[string] , identifier[r_] . identifier[ctypes] . identifier[data] ) identifier[cg] = identifier[ffi] . identifier[cast] ( literal[string] , identifier[g_] . identifier[ctypes] . identifier[data] ) identifier[cb] = identifier[ffi] . identifier[cast] ( literal[string] , identifier[b_] . identifier[ctypes] . identifier[data] ) keyword[else] : identifier[cr] = identifier[ffi] . identifier[new] ( literal[string] , identifier[r] ) identifier[cg] = identifier[ffi] . identifier[new] ( literal[string] , identifier[g] ) identifier[cb] = identifier[ffi] . identifier[new] ( literal[string] , identifier[b] ) identifier[lib] . identifier[TCOD_console_fill_foreground] ( identifier[_console] ( identifier[con] ), identifier[cr] , identifier[cg] , identifier[cb] )
def console_fill_foreground(con: tcod.console.Console, r: Sequence[int], g: Sequence[int], b: Sequence[int]) -> None: """Fill the foregound of a console with r,g,b. Args: con (Console): Any Console instance. r (Sequence[int]): An array of integers with a length of width*height. g (Sequence[int]): An array of integers with a length of width*height. b (Sequence[int]): An array of integers with a length of width*height. .. deprecated:: 8.4 You should assign to :any:`tcod.console.Console.fg` instead. """ if len(r) != len(g) or len(r) != len(b): raise TypeError('R, G and B must all have the same size.') # depends on [control=['if'], data=[]] if isinstance(r, np.ndarray) and isinstance(g, np.ndarray) and isinstance(b, np.ndarray): # numpy arrays, use numpy's ctypes functions r_ = np.ascontiguousarray(r, dtype=np.intc) g_ = np.ascontiguousarray(g, dtype=np.intc) b_ = np.ascontiguousarray(b, dtype=np.intc) cr = ffi.cast('int *', r_.ctypes.data) cg = ffi.cast('int *', g_.ctypes.data) cb = ffi.cast('int *', b_.ctypes.data) # depends on [control=['if'], data=[]] else: # otherwise convert using ffi arrays cr = ffi.new('int[]', r) cg = ffi.new('int[]', g) cb = ffi.new('int[]', b) lib.TCOD_console_fill_foreground(_console(con), cr, cg, cb)
def notification(self): """Provide access to the currently displayed notification. Returns: :py:class:`BaseNotification`: FoxPuppet BaseNotification object. """ with self.selenium.context(self.selenium.CONTEXT_CHROME): try: root = self.selenium.find_element(*self._notification_locator) return BaseNotification.create(self, root) except NoSuchElementException: pass try: notifications = self.selenium.find_elements( *self._app_menu_notification_locator ) root = next(n for n in notifications if n.is_displayed()) return BaseNotification.create(self, root) except StopIteration: pass return None
def function[notification, parameter[self]]: constant[Provide access to the currently displayed notification. Returns: :py:class:`BaseNotification`: FoxPuppet BaseNotification object. ] with call[name[self].selenium.context, parameter[name[self].selenium.CONTEXT_CHROME]] begin[:] <ast.Try object at 0x7da1b24aee90> <ast.Try object at 0x7da1b24aee00> return[constant[None]]
keyword[def] identifier[notification] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[selenium] . identifier[context] ( identifier[self] . identifier[selenium] . identifier[CONTEXT_CHROME] ): keyword[try] : identifier[root] = identifier[self] . identifier[selenium] . identifier[find_element] (* identifier[self] . identifier[_notification_locator] ) keyword[return] identifier[BaseNotification] . identifier[create] ( identifier[self] , identifier[root] ) keyword[except] identifier[NoSuchElementException] : keyword[pass] keyword[try] : identifier[notifications] = identifier[self] . identifier[selenium] . identifier[find_elements] ( * identifier[self] . identifier[_app_menu_notification_locator] ) identifier[root] = identifier[next] ( identifier[n] keyword[for] identifier[n] keyword[in] identifier[notifications] keyword[if] identifier[n] . identifier[is_displayed] ()) keyword[return] identifier[BaseNotification] . identifier[create] ( identifier[self] , identifier[root] ) keyword[except] identifier[StopIteration] : keyword[pass] keyword[return] keyword[None]
def notification(self): """Provide access to the currently displayed notification. Returns: :py:class:`BaseNotification`: FoxPuppet BaseNotification object. """ with self.selenium.context(self.selenium.CONTEXT_CHROME): try: root = self.selenium.find_element(*self._notification_locator) return BaseNotification.create(self, root) # depends on [control=['try'], data=[]] except NoSuchElementException: pass # depends on [control=['except'], data=[]] try: notifications = self.selenium.find_elements(*self._app_menu_notification_locator) root = next((n for n in notifications if n.is_displayed())) return BaseNotification.create(self, root) # depends on [control=['try'], data=[]] except StopIteration: pass # depends on [control=['except'], data=[]] # depends on [control=['with'], data=[]] return None
def returnCSV(self, keys=None, limit=False, omitHeaderLine=False, quoteChar=None, eolChars='\r\n'): r'''Return a list of dictionaries formated as a comma seperated values (CSV) list in a string. Each entry is on one line. By default, each value is seperated by commas and each entry is followed by a newline ('\n'). By default, RFC4180 is followed. See: http://tools.ietf.org/html/rfc4180 The RFC has considerable detail describing the format. Note regarding certain RFC section 2 sub 4: there is no default behavior specified if the last field on a line is empty and thus creating a 'trailing comma'. This routine handles it by quoting the final field. The first example below demonstrates this for the entry with 'name' of 'Larry' and a missing 'income' field. The missing field is shown as "" at the end of the line. Missing keys and/or values of None are simply blank. Example of use: >>> test = [ ... {"name": "Jim, Phd", "age": 18 , "income": 93000, "order": 2}, ... {"name": "Larry", "age": None, "order": 3}, ... {"name": "Joe", "age": 20 , "income": 15000, "order": 1}, ... {"name": "B \"Zip\" O'Tool", "age": 19 , "income": 29000, "order": 4}, ... ] >>> print PLOD(test).returnCSV() # doctest: +NORMALIZE_WHITESPACE age,order,name,income 18,2,"Jim, Phd",93000 ,3,Larry,"" 20,1,Joe,15000 19,4,"B ""Zip"" O'Tool",29000 <BLANKLINE> >>> print PLOD(test).returnCSV(limit=3, omitHeaderLine=True, quoteChar="'", eolChars='\n') '18','2','Jim, Phd','93000' '','3','Larry','' '20','1','Joe','15000' <BLANKLINE> >>> print PLOD(test).returnCSV(keys=["name", "age"], quoteChar='"', eolChars='\n') "name","age" "Jim, Phd","18" "Larry","" "Joe","20" "B ""Zip"" O'Tool","19" <BLANKLINE> :param keys: If the 'keys' parameter is passed a list of keys, then only those keys are returned. The order of keys in the list is retained. If a key is not found in an entry (or in *any* entry), that is not an error condition. Those entries simply have an empty value for that position. NOTE: use this parameter if the order of the keys is critical. Order is not guaranteed otherwise. :param limit: A number limiting the quantity of entries to return. Defaults to False, which means that the full list is returned. :param omitHeaderLine: If set to True, the initial line of text listing the keys is not included. Defaults to False. :param quoteChar: If set to anything (including a single quote), then all fields will be surrounded by the quote character. :param eolChars: These are the characters inserted at the end of each line. By default they are CRLF ('\r\n') as specified in RFC4180. To be more pythonic you could change it to '\n'. :return: A string containing a formatted textual representation of the list of dictionaries. ''' result = "" if quoteChar: quoteAll=True else: quoteAll=False # we limit the table if needed used_table = self.table if limit: used_table = [] for i in range(limit): if len(self.table)>i: used_table.append(self.table[i]) # we locate all of the attributes if keys: attr_list = keys else: attr_list = [] for row in used_table: for key in row: if not key in attr_list: attr_list.append(key) # now we do the pretty print if not omitHeaderLine: if quoteAll: result += quoteChar temp = quoteChar+","+quoteChar result += temp.join(attr_list) result += quoteChar else: result += ",".join(attr_list) result += eolChars for row in used_table: ml = [] for ctr, key in enumerate(attr_list): if key in row: if row[key] is None: value = "" else: value = str(row[key]) if quoteAll: ml.append(internal.csv_quote(quoteChar,value)) else: if ('"' in value) or (',' in value): ml.append(internal.csv_quote('"', value)) else: if ((ctr+1)==len(attr_list)) and (len(value)==0): ml.append('""') else: ml.append(value) else: if quoteAll: ml.append(quoteChar+quoteChar) else: if (ctr+1)==len(attr_list): ml.append('""') else: ml.append("") result += ",".join(ml) result += eolChars return result
def function[returnCSV, parameter[self, keys, limit, omitHeaderLine, quoteChar, eolChars]]: constant[Return a list of dictionaries formated as a comma seperated values (CSV) list in a string. Each entry is on one line. By default, each value is seperated by commas and each entry is followed by a newline ('\n'). By default, RFC4180 is followed. See: http://tools.ietf.org/html/rfc4180 The RFC has considerable detail describing the format. Note regarding certain RFC section 2 sub 4: there is no default behavior specified if the last field on a line is empty and thus creating a 'trailing comma'. This routine handles it by quoting the final field. The first example below demonstrates this for the entry with 'name' of 'Larry' and a missing 'income' field. The missing field is shown as "" at the end of the line. Missing keys and/or values of None are simply blank. Example of use: >>> test = [ ... {"name": "Jim, Phd", "age": 18 , "income": 93000, "order": 2}, ... {"name": "Larry", "age": None, "order": 3}, ... {"name": "Joe", "age": 20 , "income": 15000, "order": 1}, ... {"name": "B \"Zip\" O'Tool", "age": 19 , "income": 29000, "order": 4}, ... ] >>> print PLOD(test).returnCSV() # doctest: +NORMALIZE_WHITESPACE age,order,name,income 18,2,"Jim, Phd",93000 ,3,Larry,"" 20,1,Joe,15000 19,4,"B ""Zip"" O'Tool",29000 <BLANKLINE> >>> print PLOD(test).returnCSV(limit=3, omitHeaderLine=True, quoteChar="'", eolChars='\n') '18','2','Jim, Phd','93000' '','3','Larry','' '20','1','Joe','15000' <BLANKLINE> >>> print PLOD(test).returnCSV(keys=["name", "age"], quoteChar='"', eolChars='\n') "name","age" "Jim, Phd","18" "Larry","" "Joe","20" "B ""Zip"" O'Tool","19" <BLANKLINE> :param keys: If the 'keys' parameter is passed a list of keys, then only those keys are returned. The order of keys in the list is retained. If a key is not found in an entry (or in *any* entry), that is not an error condition. Those entries simply have an empty value for that position. NOTE: use this parameter if the order of the keys is critical. Order is not guaranteed otherwise. :param limit: A number limiting the quantity of entries to return. Defaults to False, which means that the full list is returned. :param omitHeaderLine: If set to True, the initial line of text listing the keys is not included. Defaults to False. :param quoteChar: If set to anything (including a single quote), then all fields will be surrounded by the quote character. :param eolChars: These are the characters inserted at the end of each line. By default they are CRLF ('\r\n') as specified in RFC4180. To be more pythonic you could change it to '\n'. :return: A string containing a formatted textual representation of the list of dictionaries. ] variable[result] assign[=] constant[] if name[quoteChar] begin[:] variable[quoteAll] assign[=] constant[True] variable[used_table] assign[=] name[self].table if name[limit] begin[:] variable[used_table] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[limit]]]] begin[:] if compare[call[name[len], parameter[name[self].table]] greater[>] name[i]] begin[:] call[name[used_table].append, parameter[call[name[self].table][name[i]]]] if name[keys] begin[:] variable[attr_list] assign[=] name[keys] if <ast.UnaryOp object at 0x7da2047ea050> begin[:] if name[quoteAll] begin[:] <ast.AugAssign object at 0x7da2047e9150> variable[temp] assign[=] binary_operation[binary_operation[name[quoteChar] + constant[,]] + name[quoteChar]] <ast.AugAssign object at 0x7da2047e80a0> <ast.AugAssign object at 0x7da2047ea230> <ast.AugAssign object at 0x7da2047ea470> for taget[name[row]] in starred[name[used_table]] begin[:] variable[ml] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da2047ebbe0>, <ast.Name object at 0x7da2047eb5e0>]]] in starred[call[name[enumerate], parameter[name[attr_list]]]] begin[:] if compare[name[key] in name[row]] begin[:] if compare[call[name[row]][name[key]] is constant[None]] begin[:] variable[value] assign[=] constant[] if name[quoteAll] begin[:] call[name[ml].append, parameter[call[name[internal].csv_quote, parameter[name[quoteChar], name[value]]]]] <ast.AugAssign object at 0x7da20c795a50> <ast.AugAssign object at 0x7da20c796290> return[name[result]]
keyword[def] identifier[returnCSV] ( identifier[self] , identifier[keys] = keyword[None] , identifier[limit] = keyword[False] , identifier[omitHeaderLine] = keyword[False] , identifier[quoteChar] = keyword[None] , identifier[eolChars] = literal[string] ): literal[string] identifier[result] = literal[string] keyword[if] identifier[quoteChar] : identifier[quoteAll] = keyword[True] keyword[else] : identifier[quoteAll] = keyword[False] identifier[used_table] = identifier[self] . identifier[table] keyword[if] identifier[limit] : identifier[used_table] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[limit] ): keyword[if] identifier[len] ( identifier[self] . identifier[table] )> identifier[i] : identifier[used_table] . identifier[append] ( identifier[self] . identifier[table] [ identifier[i] ]) keyword[if] identifier[keys] : identifier[attr_list] = identifier[keys] keyword[else] : identifier[attr_list] =[] keyword[for] identifier[row] keyword[in] identifier[used_table] : keyword[for] identifier[key] keyword[in] identifier[row] : keyword[if] keyword[not] identifier[key] keyword[in] identifier[attr_list] : identifier[attr_list] . identifier[append] ( identifier[key] ) keyword[if] keyword[not] identifier[omitHeaderLine] : keyword[if] identifier[quoteAll] : identifier[result] += identifier[quoteChar] identifier[temp] = identifier[quoteChar] + literal[string] + identifier[quoteChar] identifier[result] += identifier[temp] . identifier[join] ( identifier[attr_list] ) identifier[result] += identifier[quoteChar] keyword[else] : identifier[result] += literal[string] . identifier[join] ( identifier[attr_list] ) identifier[result] += identifier[eolChars] keyword[for] identifier[row] keyword[in] identifier[used_table] : identifier[ml] =[] keyword[for] identifier[ctr] , identifier[key] keyword[in] identifier[enumerate] ( identifier[attr_list] ): keyword[if] identifier[key] keyword[in] identifier[row] : keyword[if] identifier[row] [ identifier[key] ] keyword[is] keyword[None] : identifier[value] = literal[string] keyword[else] : identifier[value] = identifier[str] ( identifier[row] [ identifier[key] ]) keyword[if] identifier[quoteAll] : identifier[ml] . identifier[append] ( identifier[internal] . identifier[csv_quote] ( identifier[quoteChar] , identifier[value] )) keyword[else] : keyword[if] ( literal[string] keyword[in] identifier[value] ) keyword[or] ( literal[string] keyword[in] identifier[value] ): identifier[ml] . identifier[append] ( identifier[internal] . identifier[csv_quote] ( literal[string] , identifier[value] )) keyword[else] : keyword[if] (( identifier[ctr] + literal[int] )== identifier[len] ( identifier[attr_list] )) keyword[and] ( identifier[len] ( identifier[value] )== literal[int] ): identifier[ml] . identifier[append] ( literal[string] ) keyword[else] : identifier[ml] . identifier[append] ( identifier[value] ) keyword[else] : keyword[if] identifier[quoteAll] : identifier[ml] . identifier[append] ( identifier[quoteChar] + identifier[quoteChar] ) keyword[else] : keyword[if] ( identifier[ctr] + literal[int] )== identifier[len] ( identifier[attr_list] ): identifier[ml] . identifier[append] ( literal[string] ) keyword[else] : identifier[ml] . identifier[append] ( literal[string] ) identifier[result] += literal[string] . identifier[join] ( identifier[ml] ) identifier[result] += identifier[eolChars] keyword[return] identifier[result]
def returnCSV(self, keys=None, limit=False, omitHeaderLine=False, quoteChar=None, eolChars='\r\n'): """Return a list of dictionaries formated as a comma seperated values (CSV) list in a string. Each entry is on one line. By default, each value is seperated by commas and each entry is followed by a newline ('\\n'). By default, RFC4180 is followed. See: http://tools.ietf.org/html/rfc4180 The RFC has considerable detail describing the format. Note regarding certain RFC section 2 sub 4: there is no default behavior specified if the last field on a line is empty and thus creating a 'trailing comma'. This routine handles it by quoting the final field. The first example below demonstrates this for the entry with 'name' of 'Larry' and a missing 'income' field. The missing field is shown as "" at the end of the line. Missing keys and/or values of None are simply blank. Example of use: >>> test = [ ... {"name": "Jim, Phd", "age": 18 , "income": 93000, "order": 2}, ... {"name": "Larry", "age": None, "order": 3}, ... {"name": "Joe", "age": 20 , "income": 15000, "order": 1}, ... {"name": "B \\"Zip\\" O'Tool", "age": 19 , "income": 29000, "order": 4}, ... ] >>> print PLOD(test).returnCSV() # doctest: +NORMALIZE_WHITESPACE age,order,name,income 18,2,"Jim, Phd",93000 ,3,Larry,"" 20,1,Joe,15000 19,4,"B ""Zip"" O'Tool",29000 <BLANKLINE> >>> print PLOD(test).returnCSV(limit=3, omitHeaderLine=True, quoteChar="'", eolChars='\\n') '18','2','Jim, Phd','93000' '','3','Larry','' '20','1','Joe','15000' <BLANKLINE> >>> print PLOD(test).returnCSV(keys=["name", "age"], quoteChar='"', eolChars='\\n') "name","age" "Jim, Phd","18" "Larry","" "Joe","20" "B ""Zip"" O'Tool","19" <BLANKLINE> :param keys: If the 'keys' parameter is passed a list of keys, then only those keys are returned. The order of keys in the list is retained. If a key is not found in an entry (or in *any* entry), that is not an error condition. Those entries simply have an empty value for that position. NOTE: use this parameter if the order of the keys is critical. Order is not guaranteed otherwise. :param limit: A number limiting the quantity of entries to return. Defaults to False, which means that the full list is returned. :param omitHeaderLine: If set to True, the initial line of text listing the keys is not included. Defaults to False. :param quoteChar: If set to anything (including a single quote), then all fields will be surrounded by the quote character. :param eolChars: These are the characters inserted at the end of each line. By default they are CRLF ('\\r\\n') as specified in RFC4180. To be more pythonic you could change it to '\\n'. :return: A string containing a formatted textual representation of the list of dictionaries. """ result = '' if quoteChar: quoteAll = True # depends on [control=['if'], data=[]] else: quoteAll = False # we limit the table if needed used_table = self.table if limit: used_table = [] for i in range(limit): if len(self.table) > i: used_table.append(self.table[i]) # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # we locate all of the attributes if keys: attr_list = keys # depends on [control=['if'], data=[]] else: attr_list = [] for row in used_table: for key in row: if not key in attr_list: attr_list.append(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['row']] # now we do the pretty print if not omitHeaderLine: if quoteAll: result += quoteChar temp = quoteChar + ',' + quoteChar result += temp.join(attr_list) result += quoteChar # depends on [control=['if'], data=[]] else: result += ','.join(attr_list) result += eolChars # depends on [control=['if'], data=[]] for row in used_table: ml = [] for (ctr, key) in enumerate(attr_list): if key in row: if row[key] is None: value = '' # depends on [control=['if'], data=[]] else: value = str(row[key]) if quoteAll: ml.append(internal.csv_quote(quoteChar, value)) # depends on [control=['if'], data=[]] elif '"' in value or ',' in value: ml.append(internal.csv_quote('"', value)) # depends on [control=['if'], data=[]] elif ctr + 1 == len(attr_list) and len(value) == 0: ml.append('""') # depends on [control=['if'], data=[]] else: ml.append(value) # depends on [control=['if'], data=['key', 'row']] elif quoteAll: ml.append(quoteChar + quoteChar) # depends on [control=['if'], data=[]] elif ctr + 1 == len(attr_list): ml.append('""') # depends on [control=['if'], data=[]] else: ml.append('') # depends on [control=['for'], data=[]] result += ','.join(ml) result += eolChars # depends on [control=['for'], data=['row']] return result
def getFileKeys(self): """ Retrieve a list of file keys that have been read into the database. This is a utility method that can be used to programmatically access the GsshaPy file objects. Use these keys in conjunction with the dictionary returned by the getFileObjects method. Returns: list: List of keys representing file objects that have been read into the database. """ files = self.getFileObjects() files_list = [] for key, value in files.iteritems(): if value: files_list.append(key) return files_list
def function[getFileKeys, parameter[self]]: constant[ Retrieve a list of file keys that have been read into the database. This is a utility method that can be used to programmatically access the GsshaPy file objects. Use these keys in conjunction with the dictionary returned by the getFileObjects method. Returns: list: List of keys representing file objects that have been read into the database. ] variable[files] assign[=] call[name[self].getFileObjects, parameter[]] variable[files_list] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c6a9d20>, <ast.Name object at 0x7da20c6a9cf0>]]] in starred[call[name[files].iteritems, parameter[]]] begin[:] if name[value] begin[:] call[name[files_list].append, parameter[name[key]]] return[name[files_list]]
keyword[def] identifier[getFileKeys] ( identifier[self] ): literal[string] identifier[files] = identifier[self] . identifier[getFileObjects] () identifier[files_list] =[] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[files] . identifier[iteritems] (): keyword[if] identifier[value] : identifier[files_list] . identifier[append] ( identifier[key] ) keyword[return] identifier[files_list]
def getFileKeys(self): """ Retrieve a list of file keys that have been read into the database. This is a utility method that can be used to programmatically access the GsshaPy file objects. Use these keys in conjunction with the dictionary returned by the getFileObjects method. Returns: list: List of keys representing file objects that have been read into the database. """ files = self.getFileObjects() files_list = [] for (key, value) in files.iteritems(): if value: files_list.append(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return files_list
def _consume_flags(self): """Read flags until we encounter the first token that isn't a flag.""" flags = [] while self._at_flag(): flag = self._unconsumed_args.pop() if not self._check_for_help_request(flag): flags.append(flag) return flags
def function[_consume_flags, parameter[self]]: constant[Read flags until we encounter the first token that isn't a flag.] variable[flags] assign[=] list[[]] while call[name[self]._at_flag, parameter[]] begin[:] variable[flag] assign[=] call[name[self]._unconsumed_args.pop, parameter[]] if <ast.UnaryOp object at 0x7da1b1e5cc40> begin[:] call[name[flags].append, parameter[name[flag]]] return[name[flags]]
keyword[def] identifier[_consume_flags] ( identifier[self] ): literal[string] identifier[flags] =[] keyword[while] identifier[self] . identifier[_at_flag] (): identifier[flag] = identifier[self] . identifier[_unconsumed_args] . identifier[pop] () keyword[if] keyword[not] identifier[self] . identifier[_check_for_help_request] ( identifier[flag] ): identifier[flags] . identifier[append] ( identifier[flag] ) keyword[return] identifier[flags]
def _consume_flags(self): """Read flags until we encounter the first token that isn't a flag.""" flags = [] while self._at_flag(): flag = self._unconsumed_args.pop() if not self._check_for_help_request(flag): flags.append(flag) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return flags
def make_html_patterns(): """Strongly inspired from idlelib.ColorDelegator.make_pat """ tags = any("builtin", [r"<", r"[\?/]?>", r"(?<=<).*?(?=[ >])"]) keywords = any("keyword", [r" [\w:-]*?(?==)"]) string = any("string", [r'".*?"']) comment = any("comment", [r"<!--.*?-->"]) multiline_comment_start = any("multiline_comment_start", [r"<!--"]) multiline_comment_end = any("multiline_comment_end", [r"-->"]) return "|".join([comment, multiline_comment_start, multiline_comment_end, tags, keywords, string])
def function[make_html_patterns, parameter[]]: constant[Strongly inspired from idlelib.ColorDelegator.make_pat ] variable[tags] assign[=] call[name[any], parameter[constant[builtin], list[[<ast.Constant object at 0x7da1b26adf30>, <ast.Constant object at 0x7da1b26acc10>, <ast.Constant object at 0x7da1b26aefe0>]]]] variable[keywords] assign[=] call[name[any], parameter[constant[keyword], list[[<ast.Constant object at 0x7da1b26aff40>]]]] variable[string] assign[=] call[name[any], parameter[constant[string], list[[<ast.Constant object at 0x7da1b26ac2e0>]]]] variable[comment] assign[=] call[name[any], parameter[constant[comment], list[[<ast.Constant object at 0x7da1b26ad0c0>]]]] variable[multiline_comment_start] assign[=] call[name[any], parameter[constant[multiline_comment_start], list[[<ast.Constant object at 0x7da1b26acf10>]]]] variable[multiline_comment_end] assign[=] call[name[any], parameter[constant[multiline_comment_end], list[[<ast.Constant object at 0x7da1b26acee0>]]]] return[call[constant[|].join, parameter[list[[<ast.Name object at 0x7da1b26ac820>, <ast.Name object at 0x7da1b26ad180>, <ast.Name object at 0x7da1b26ad390>, <ast.Name object at 0x7da1b26afa30>, <ast.Name object at 0x7da1b26ad1b0>, <ast.Name object at 0x7da1b26ac9d0>]]]]]
keyword[def] identifier[make_html_patterns] (): literal[string] identifier[tags] = identifier[any] ( literal[string] ,[ literal[string] , literal[string] , literal[string] ]) identifier[keywords] = identifier[any] ( literal[string] ,[ literal[string] ]) identifier[string] = identifier[any] ( literal[string] ,[ literal[string] ]) identifier[comment] = identifier[any] ( literal[string] ,[ literal[string] ]) identifier[multiline_comment_start] = identifier[any] ( literal[string] ,[ literal[string] ]) identifier[multiline_comment_end] = identifier[any] ( literal[string] ,[ literal[string] ]) keyword[return] literal[string] . identifier[join] ([ identifier[comment] , identifier[multiline_comment_start] , identifier[multiline_comment_end] , identifier[tags] , identifier[keywords] , identifier[string] ])
def make_html_patterns(): """Strongly inspired from idlelib.ColorDelegator.make_pat """ tags = any('builtin', ['<', '[\\?/]?>', '(?<=<).*?(?=[ >])']) keywords = any('keyword', [' [\\w:-]*?(?==)']) string = any('string', ['".*?"']) comment = any('comment', ['<!--.*?-->']) multiline_comment_start = any('multiline_comment_start', ['<!--']) multiline_comment_end = any('multiline_comment_end', ['-->']) return '|'.join([comment, multiline_comment_start, multiline_comment_end, tags, keywords, string])
def setup(self): """Setup main window""" logger.info("*** Start of MainWindow setup ***") logger.info("Applying theme configuration...") ui_theme = CONF.get('appearance', 'ui_theme') color_scheme = CONF.get('appearance', 'selected') if ui_theme == 'dark': dark_qss = qdarkstyle.load_stylesheet_from_environment() self.setStyleSheet(dark_qss) self.statusBar().setStyleSheet(dark_qss) css_path = DARK_CSS_PATH elif ui_theme == 'automatic': if not is_dark_font_color(color_scheme): dark_qss = qdarkstyle.load_stylesheet_from_environment() self.setStyleSheet(dark_qss) self.statusBar().setStyleSheet(dark_qss) css_path = DARK_CSS_PATH else: css_path = CSS_PATH else: css_path = CSS_PATH logger.info("Creating core actions...") self.close_dockwidget_action = create_action( self, icon=ima.icon('close_pane'), text=_("Close current pane"), triggered=self.close_current_dockwidget, context=Qt.ApplicationShortcut ) self.register_shortcut(self.close_dockwidget_action, "_", "Close pane") self.lock_interface_action = create_action( self, _("Lock panes and toolbars"), toggled=self.toggle_lock, context=Qt.ApplicationShortcut) self.register_shortcut(self.lock_interface_action, "_", "Lock unlock panes") # custom layouts shortcuts self.toggle_next_layout_action = create_action(self, _("Use next layout"), triggered=self.toggle_next_layout, context=Qt.ApplicationShortcut) self.toggle_previous_layout_action = create_action(self, _("Use previous layout"), triggered=self.toggle_previous_layout, context=Qt.ApplicationShortcut) self.register_shortcut(self.toggle_next_layout_action, "_", "Use next layout") self.register_shortcut(self.toggle_previous_layout_action, "_", "Use previous layout") # File switcher shortcuts self.file_switcher_action = create_action( self, _('File switcher...'), icon=ima.icon('filelist'), tip=_('Fast switch between files'), triggered=self.open_fileswitcher, context=Qt.ApplicationShortcut) self.register_shortcut(self.file_switcher_action, context="_", name="File switcher") self.symbol_finder_action = create_action( self, _('Symbol finder...'), icon=ima.icon('symbol_find'), tip=_('Fast symbol search in file'), triggered=self.open_symbolfinder, context=Qt.ApplicationShortcut) self.register_shortcut(self.symbol_finder_action, context="_", name="symbol finder", add_sc_to_tip=True) self.file_toolbar_actions = [self.file_switcher_action, self.symbol_finder_action] def create_edit_action(text, tr_text, icon): textseq = text.split(' ') method_name = textseq[0].lower()+"".join(textseq[1:]) action = create_action(self, tr_text, icon=icon, triggered=self.global_callback, data=method_name, context=Qt.WidgetShortcut) self.register_shortcut(action, "Editor", text) return action self.undo_action = create_edit_action('Undo', _('Undo'), ima.icon('undo')) self.redo_action = create_edit_action('Redo', _('Redo'), ima.icon('redo')) self.copy_action = create_edit_action('Copy', _('Copy'), ima.icon('editcopy')) self.cut_action = create_edit_action('Cut', _('Cut'), ima.icon('editcut')) self.paste_action = create_edit_action('Paste', _('Paste'), ima.icon('editpaste')) self.selectall_action = create_edit_action("Select All", _("Select All"), ima.icon('selectall')) self.edit_menu_actions = [self.undo_action, self.redo_action, None, self.cut_action, self.copy_action, self.paste_action, self.selectall_action] namespace = None logger.info("Creating toolbars...") # File menu/toolbar self.file_menu = self.menuBar().addMenu(_("&File")) self.file_toolbar = self.create_toolbar(_("File toolbar"), "file_toolbar") # Edit menu/toolbar self.edit_menu = self.menuBar().addMenu(_("&Edit")) self.edit_toolbar = self.create_toolbar(_("Edit toolbar"), "edit_toolbar") # Search menu/toolbar self.search_menu = self.menuBar().addMenu(_("&Search")) self.search_toolbar = self.create_toolbar(_("Search toolbar"), "search_toolbar") # Source menu/toolbar self.source_menu = self.menuBar().addMenu(_("Sour&ce")) self.source_toolbar = self.create_toolbar(_("Source toolbar"), "source_toolbar") # Run menu/toolbar self.run_menu = self.menuBar().addMenu(_("&Run")) self.run_toolbar = self.create_toolbar(_("Run toolbar"), "run_toolbar") # Debug menu/toolbar self.debug_menu = self.menuBar().addMenu(_("&Debug")) self.debug_toolbar = self.create_toolbar(_("Debug toolbar"), "debug_toolbar") # Consoles menu/toolbar self.consoles_menu = self.menuBar().addMenu(_("C&onsoles")) self.consoles_menu.aboutToShow.connect( self.update_execution_state_kernel) # Projects menu self.projects_menu = self.menuBar().addMenu(_("&Projects")) self.projects_menu.aboutToShow.connect(self.valid_project) # Tools menu self.tools_menu = self.menuBar().addMenu(_("&Tools")) # View menu self.view_menu = self.menuBar().addMenu(_("&View")) # Help menu self.help_menu = self.menuBar().addMenu(_("&Help")) # Status bar status = self.statusBar() status.setObjectName("StatusBar") status.showMessage(_("Welcome to Spyder!"), 5000) logger.info("Creating Tools menu...") # Tools + External Tools prefs_action = create_action(self, _("Pre&ferences"), icon=ima.icon('configure'), triggered=self.edit_preferences, context=Qt.ApplicationShortcut) self.register_shortcut(prefs_action, "_", "Preferences", add_sc_to_tip=True) spyder_path_action = create_action(self, _("PYTHONPATH manager"), None, icon=ima.icon('pythonpath'), triggered=self.path_manager_callback, tip=_("Python Path Manager"), menurole=QAction.ApplicationSpecificRole) reset_spyder_action = create_action( self, _("Reset Spyder to factory defaults"), triggered=self.reset_spyder) self.tools_menu_actions = [prefs_action, spyder_path_action] if WinUserEnvDialog is not None: winenv_action = create_action(self, _("Current user environment variables..."), icon='win_env.png', tip=_("Show and edit current user environment " "variables in Windows registry " "(i.e. for all sessions)"), triggered=self.win_env) self.tools_menu_actions.append(winenv_action) self.tools_menu_actions += [MENU_SEPARATOR, reset_spyder_action] # External Tools submenu self.external_tools_menu = QMenu(_("External Tools")) self.external_tools_menu_actions = [] # WinPython control panel self.wp_action = create_action(self, _("WinPython control panel"), icon=get_icon('winpython.svg'), triggered=lambda: programs.run_python_script('winpython', 'controlpanel')) if os.name == 'nt' and is_module_installed('winpython'): self.external_tools_menu_actions.append(self.wp_action) # Qt-related tools additact = [] for name in ("designer-qt4", "designer"): qtdact = create_program_action(self, _("Qt Designer"), name) if qtdact: break for name in ("linguist-qt4", "linguist"): qtlact = create_program_action(self, _("Qt Linguist"), "linguist") if qtlact: break args = ['-no-opengl'] if os.name == 'nt' else [] for act in (qtdact, qtlact): if act: additact.append(act) if additact and is_module_installed('winpython'): self.external_tools_menu_actions += [None] + additact # Guidata and Sift logger.info("Creating guidata and sift entries...") gdgq_act = [] # Guidata and Guiqwt don't support PyQt5 yet and they fail # with an AssertionError when imported using those bindings # (see issue 2274) try: from guidata import configtools from guidata import config # analysis:ignore guidata_icon = configtools.get_icon('guidata.svg') guidata_act = create_python_script_action(self, _("guidata examples"), guidata_icon, "guidata", osp.join("tests", "__init__")) gdgq_act += [guidata_act] except: pass try: from guidata import configtools from guiqwt import config # analysis:ignore guiqwt_icon = configtools.get_icon('guiqwt.svg') guiqwt_act = create_python_script_action(self, _("guiqwt examples"), guiqwt_icon, "guiqwt", osp.join("tests", "__init__")) if guiqwt_act: gdgq_act += [guiqwt_act] sift_icon = configtools.get_icon('sift.svg') sift_act = create_python_script_action(self, _("Sift"), sift_icon, "guiqwt", osp.join("tests", "sift")) if sift_act: gdgq_act += [sift_act] except: pass if gdgq_act: self.external_tools_menu_actions += [None] + gdgq_act # Maximize current plugin self.maximize_action = create_action(self, '', triggered=self.maximize_dockwidget, context=Qt.ApplicationShortcut) self.register_shortcut(self.maximize_action, "_", "Maximize pane") self.__update_maximize_action() # Fullscreen mode self.fullscreen_action = create_action(self, _("Fullscreen mode"), triggered=self.toggle_fullscreen, context=Qt.ApplicationShortcut) self.register_shortcut(self.fullscreen_action, "_", "Fullscreen mode", add_sc_to_tip=True) # Main toolbar self.main_toolbar_actions = [self.maximize_action, self.fullscreen_action, None, prefs_action, spyder_path_action] self.main_toolbar = self.create_toolbar(_("Main toolbar"), "main_toolbar") # Internal console plugin logger.info("Loading internal console...") from spyder.plugins.console.plugin import Console self.console = Console(self, namespace, exitfunc=self.closing, profile=self.profile, multithreaded=self.multithreaded, message=_("Spyder Internal Console\n\n" "This console is used to report application\n" "internal errors and to inspect Spyder\n" "internals with the following commands:\n" " spy.app, spy.window, dir(spy)\n\n" "Please don't use it to run your code\n\n")) self.console.register_plugin() # Language Server Protocol Client initialization self.set_splash(_("Starting Language Server Protocol manager...")) from spyder.plugins.editor.lsp.manager import LSPManager self.lspmanager = LSPManager(self) # Working directory plugin logger.info("Loading working directory...") from spyder.plugins.workingdirectory.plugin import WorkingDirectory self.workingdirectory = WorkingDirectory(self, self.init_workdir, main=self) self.workingdirectory.register_plugin() self.toolbarslist.append(self.workingdirectory.toolbar) # Help plugin if CONF.get('help', 'enable'): self.set_splash(_("Loading help...")) from spyder.plugins.help.plugin import Help self.help = Help(self, css_path=css_path) self.help.register_plugin() # Outline explorer widget if CONF.get('outline_explorer', 'enable'): self.set_splash(_("Loading outline explorer...")) from spyder.plugins.outlineexplorer.plugin import OutlineExplorer self.outlineexplorer = OutlineExplorer(self) self.outlineexplorer.register_plugin() # Editor plugin self.set_splash(_("Loading editor...")) from spyder.plugins.editor.plugin import Editor self.editor = Editor(self) self.editor.register_plugin() # Start LSP client self.set_splash(_("Launching LSP Client for Python...")) self.lspmanager.start_client(language='python') # Populating file menu entries quit_action = create_action(self, _("&Quit"), icon=ima.icon('exit'), tip=_("Quit"), triggered=self.console.quit, context=Qt.ApplicationShortcut) self.register_shortcut(quit_action, "_", "Quit") restart_action = create_action(self, _("&Restart"), icon=ima.icon('restart'), tip=_("Restart"), triggered=self.restart, context=Qt.ApplicationShortcut) self.register_shortcut(restart_action, "_", "Restart") self.file_menu_actions += [self.file_switcher_action, self.symbol_finder_action, None, restart_action, quit_action] self.set_splash("") # Namespace browser self.set_splash(_("Loading namespace browser...")) from spyder.plugins.variableexplorer.plugin import VariableExplorer self.variableexplorer = VariableExplorer(self) self.variableexplorer.register_plugin() # Figure browser self.set_splash(_("Loading figure browser...")) from spyder.plugins.plots.plugin import Plots self.plots = Plots(self) self.plots.register_plugin() # History log widget if CONF.get('historylog', 'enable'): self.set_splash(_("Loading history plugin...")) from spyder.plugins.history.plugin import HistoryLog self.historylog = HistoryLog(self) self.historylog.register_plugin() # IPython console self.set_splash(_("Loading IPython console...")) from spyder.plugins.ipythonconsole.plugin import IPythonConsole self.ipyconsole = IPythonConsole(self, css_path=css_path) self.ipyconsole.register_plugin() # Explorer if CONF.get('explorer', 'enable'): self.set_splash(_("Loading file explorer...")) from spyder.plugins.explorer.plugin import Explorer self.explorer = Explorer(self) self.explorer.register_plugin() # Online help widget try: # Qt >= v4.4 from spyder.plugins.onlinehelp.plugin import OnlineHelp except ImportError: # Qt < v4.4 OnlineHelp = None # analysis:ignore if CONF.get('onlinehelp', 'enable') and OnlineHelp is not None: self.set_splash(_("Loading online help...")) self.onlinehelp = OnlineHelp(self) self.onlinehelp.register_plugin() # Project explorer widget self.set_splash(_("Loading project explorer...")) from spyder.plugins.projects.plugin import Projects self.projects = Projects(self) self.projects.register_plugin() self.project_path = self.projects.get_pythonpath(at_start=True) # Find in files if CONF.get('find_in_files', 'enable'): from spyder.plugins.findinfiles.plugin import FindInFiles self.findinfiles = FindInFiles(self) self.findinfiles.register_plugin() # Load other plugins (former external plugins) # TODO: Use this bucle to load all internall plugins and remove # duplicated code other_plugins = ['breakpoints', 'profiler', 'pylint'] for plugin_name in other_plugins: if CONF.get(plugin_name, 'enable'): module = importlib.import_module( 'spyder.plugins.{}'.format(plugin_name)) plugin = module.PLUGIN_CLASS(self) if plugin.check_compatibility()[0]: self.thirdparty_plugins.append(plugin) plugin.register_plugin() # Third-party plugins self.set_splash(_("Loading third-party plugins...")) for mod in get_spyderplugins_mods(): try: plugin = mod.PLUGIN_CLASS(self) if plugin.check_compatibility()[0]: self.thirdparty_plugins.append(plugin) plugin.register_plugin() except Exception as error: print("%s: %s" % (mod, str(error)), file=STDERR) traceback.print_exc(file=STDERR) self.set_splash(_("Setting up main window...")) # Help menu trouble_action = create_action(self, _("Troubleshooting..."), triggered=self.trouble_guide) dep_action = create_action(self, _("Dependencies..."), triggered=self.show_dependencies, icon=ima.icon('advanced')) report_action = create_action(self, _("Report issue..."), icon=ima.icon('bug'), triggered=self.report_issue) support_action = create_action(self, _("Spyder support..."), triggered=self.google_group) self.check_updates_action = create_action(self, _("Check for updates..."), triggered=self.check_updates) # Spyder documentation spyder_doc = 'https://docs.spyder-ide.org/' doc_action = create_action(self, _("Spyder documentation"), icon=ima.icon('DialogHelpButton'), triggered=lambda: programs.start_file(spyder_doc)) self.register_shortcut(doc_action, "_", "spyder documentation") if self.help is not None: tut_action = create_action(self, _("Spyder tutorial"), triggered=self.help.show_tutorial) else: tut_action = None shortcuts_action = create_action(self, _("Shortcuts Summary"), shortcut="Meta+F1", triggered=self.show_shortcuts_dialog) #----- Tours self.tour = tour.AnimatedTour(self) self.tours_menu = QMenu(_("Interactive tours"), self) self.tour_menu_actions = [] # TODO: Only show intro tour for now. When we are close to finish # 3.0, we will finish and show the other tour self.tours_available = tour.get_tours(0) for i, tour_available in enumerate(self.tours_available): self.tours_available[i]['last'] = 0 tour_name = tour_available['name'] def trigger(i=i, self=self): # closure needed! return lambda: self.show_tour(i) temp_action = create_action(self, tour_name, tip="", triggered=trigger()) self.tour_menu_actions += [temp_action] self.tours_menu.addActions(self.tour_menu_actions) self.help_menu_actions = [doc_action, tut_action, shortcuts_action, self.tours_menu, MENU_SEPARATOR, trouble_action, report_action, dep_action, self.check_updates_action, support_action, MENU_SEPARATOR] # Python documentation if get_python_doc_path() is not None: pydoc_act = create_action(self, _("Python documentation"), triggered=lambda: programs.start_file(get_python_doc_path())) self.help_menu_actions.append(pydoc_act) # IPython documentation if self.help is not None: ipython_menu = QMenu(_("IPython documentation"), self) intro_action = create_action(self, _("Intro to IPython"), triggered=self.ipyconsole.show_intro) quickref_action = create_action(self, _("Quick reference"), triggered=self.ipyconsole.show_quickref) guiref_action = create_action(self, _("Console help"), triggered=self.ipyconsole.show_guiref) add_actions(ipython_menu, (intro_action, guiref_action, quickref_action)) self.help_menu_actions.append(ipython_menu) # Windows-only: documentation located in sys.prefix/Doc ipm_actions = [] def add_ipm_action(text, path): """Add installed Python module doc action to help submenu""" # QAction.triggered works differently for PySide and PyQt path = file_uri(path) if not API == 'pyside': slot=lambda _checked, path=path: programs.start_file(path) else: slot=lambda path=path: programs.start_file(path) action = create_action(self, text, icon='%s.png' % osp.splitext(path)[1][1:], triggered=slot) ipm_actions.append(action) sysdocpth = osp.join(sys.prefix, 'Doc') if osp.isdir(sysdocpth): # exists on Windows, except frozen dist. for docfn in os.listdir(sysdocpth): pt = r'([a-zA-Z\_]*)(doc)?(-dev)?(-ref)?(-user)?.(chm|pdf)' match = re.match(pt, docfn) if match is not None: pname = match.groups()[0] if pname not in ('Python', ): add_ipm_action(pname, osp.join(sysdocpth, docfn)) # Installed Python modules submenu (Windows only) if ipm_actions: pymods_menu = QMenu(_("Installed Python modules"), self) add_actions(pymods_menu, ipm_actions) self.help_menu_actions.append(pymods_menu) # Online documentation web_resources = QMenu(_("Online documentation"), self) webres_actions = create_module_bookmark_actions(self, self.BOOKMARKS) webres_actions.insert(2, None) webres_actions.insert(5, None) webres_actions.insert(8, None) add_actions(web_resources, webres_actions) self.help_menu_actions.append(web_resources) # Qt assistant link if sys.platform.startswith('linux') and not PYQT5: qta_exe = "assistant-qt4" else: qta_exe = "assistant" qta_act = create_program_action(self, _("Qt documentation"), qta_exe) if qta_act: self.help_menu_actions += [qta_act, None] # About Spyder about_action = create_action(self, _("About %s...") % "Spyder", icon=ima.icon('MessageBoxInformation'), triggered=self.about) self.help_menu_actions += [MENU_SEPARATOR, about_action] # Status bar widgets from spyder.widgets.status import MemoryStatus, CPUStatus self.mem_status = MemoryStatus(self, status) self.cpu_status = CPUStatus(self, status) self.apply_statusbar_settings() # ----- View # View menu self.plugins_menu = QMenu(_("Panes"), self) self.toolbars_menu = QMenu(_("Toolbars"), self) self.quick_layout_menu = QMenu(_("Window layouts"), self) self.quick_layout_set_menu() self.view_menu.addMenu(self.plugins_menu) # Panes add_actions(self.view_menu, (self.lock_interface_action, self.close_dockwidget_action, self.maximize_action, MENU_SEPARATOR)) self.show_toolbars_action = create_action(self, _("Show toolbars"), triggered=self.show_toolbars, context=Qt.ApplicationShortcut) self.register_shortcut(self.show_toolbars_action, "_", "Show toolbars") self.view_menu.addMenu(self.toolbars_menu) self.view_menu.addAction(self.show_toolbars_action) add_actions(self.view_menu, (MENU_SEPARATOR, self.quick_layout_menu, self.toggle_previous_layout_action, self.toggle_next_layout_action, MENU_SEPARATOR, self.fullscreen_action)) if set_attached_console_visible is not None: cmd_act = create_action(self, _("Attached console window (debugging)"), toggled=set_attached_console_visible) cmd_act.setChecked(is_attached_console_visible()) add_actions(self.view_menu, (MENU_SEPARATOR, cmd_act)) # Adding external tools action to "Tools" menu if self.external_tools_menu_actions: external_tools_act = create_action(self, _("External Tools")) external_tools_act.setMenu(self.external_tools_menu) self.tools_menu_actions += [None, external_tools_act] # Filling out menu/toolbar entries: add_actions(self.file_menu, self.file_menu_actions) add_actions(self.edit_menu, self.edit_menu_actions) add_actions(self.search_menu, self.search_menu_actions) add_actions(self.source_menu, self.source_menu_actions) add_actions(self.run_menu, self.run_menu_actions) add_actions(self.debug_menu, self.debug_menu_actions) add_actions(self.consoles_menu, self.consoles_menu_actions) add_actions(self.projects_menu, self.projects_menu_actions) add_actions(self.tools_menu, self.tools_menu_actions) add_actions(self.external_tools_menu, self.external_tools_menu_actions) add_actions(self.help_menu, self.help_menu_actions) add_actions(self.main_toolbar, self.main_toolbar_actions) add_actions(self.file_toolbar, self.file_toolbar_actions) add_actions(self.edit_toolbar, self.edit_toolbar_actions) add_actions(self.search_toolbar, self.search_toolbar_actions) add_actions(self.source_toolbar, self.source_toolbar_actions) add_actions(self.debug_toolbar, self.debug_toolbar_actions) add_actions(self.run_toolbar, self.run_toolbar_actions) # Apply all defined shortcuts (plugins + 3rd-party plugins) self.apply_shortcuts() # Emitting the signal notifying plugins that main window menu and # toolbar actions are all defined: self.all_actions_defined.emit() # Window set-up logger.info("Setting up window...") self.setup_layout(default=False) # Show and hide shortcuts in menus for Mac. # This is a workaround because we can't disable shortcuts # by setting context=Qt.WidgetShortcut there if sys.platform == 'darwin': for name in ['file', 'edit', 'search', 'source', 'run', 'debug', 'projects', 'tools', 'plugins']: menu_object = getattr(self, name + '_menu') menu_object.aboutToShow.connect( lambda name=name: self.show_shortcuts(name)) menu_object.aboutToHide.connect( lambda name=name: self.hide_shortcuts(name)) if self.splash is not None: self.splash.hide() # Enabling tear off for all menus except help menu if CONF.get('main', 'tear_off_menus'): for child in self.menuBar().children(): if isinstance(child, QMenu) and child != self.help_menu: child.setTearOffEnabled(True) # Menu about to show for child in self.menuBar().children(): if isinstance(child, QMenu): try: child.aboutToShow.connect(self.update_edit_menu) child.aboutToShow.connect(self.update_search_menu) except TypeError: pass logger.info("*** End of MainWindow setup ***") self.is_starting_up = False
def function[setup, parameter[self]]: constant[Setup main window] call[name[logger].info, parameter[constant[*** Start of MainWindow setup ***]]] call[name[logger].info, parameter[constant[Applying theme configuration...]]] variable[ui_theme] assign[=] call[name[CONF].get, parameter[constant[appearance], constant[ui_theme]]] variable[color_scheme] assign[=] call[name[CONF].get, parameter[constant[appearance], constant[selected]]] if compare[name[ui_theme] equal[==] constant[dark]] begin[:] variable[dark_qss] assign[=] call[name[qdarkstyle].load_stylesheet_from_environment, parameter[]] call[name[self].setStyleSheet, parameter[name[dark_qss]]] call[call[name[self].statusBar, parameter[]].setStyleSheet, parameter[name[dark_qss]]] variable[css_path] assign[=] name[DARK_CSS_PATH] call[name[logger].info, parameter[constant[Creating core actions...]]] name[self].close_dockwidget_action assign[=] call[name[create_action], parameter[name[self]]] call[name[self].register_shortcut, parameter[name[self].close_dockwidget_action, constant[_], constant[Close pane]]] name[self].lock_interface_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Lock panes and toolbars]]]]] call[name[self].register_shortcut, parameter[name[self].lock_interface_action, constant[_], constant[Lock unlock panes]]] name[self].toggle_next_layout_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Use next layout]]]]] name[self].toggle_previous_layout_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Use previous layout]]]]] call[name[self].register_shortcut, parameter[name[self].toggle_next_layout_action, constant[_], constant[Use next layout]]] call[name[self].register_shortcut, parameter[name[self].toggle_previous_layout_action, constant[_], constant[Use previous layout]]] name[self].file_switcher_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[File switcher...]]]]] call[name[self].register_shortcut, parameter[name[self].file_switcher_action]] name[self].symbol_finder_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Symbol finder...]]]]] call[name[self].register_shortcut, parameter[name[self].symbol_finder_action]] name[self].file_toolbar_actions assign[=] list[[<ast.Attribute object at 0x7da207f03520>, <ast.Attribute object at 0x7da207f01cf0>]] def function[create_edit_action, parameter[text, tr_text, icon]]: variable[textseq] assign[=] call[name[text].split, parameter[constant[ ]]] variable[method_name] assign[=] binary_operation[call[call[name[textseq]][constant[0]].lower, parameter[]] + call[constant[].join, parameter[call[name[textseq]][<ast.Slice object at 0x7da207f00b80>]]]] variable[action] assign[=] call[name[create_action], parameter[name[self], name[tr_text]]] call[name[self].register_shortcut, parameter[name[action], constant[Editor], name[text]]] return[name[action]] name[self].undo_action assign[=] call[name[create_edit_action], parameter[constant[Undo], call[name[_], parameter[constant[Undo]]], call[name[ima].icon, parameter[constant[undo]]]]] name[self].redo_action assign[=] call[name[create_edit_action], parameter[constant[Redo], call[name[_], parameter[constant[Redo]]], call[name[ima].icon, parameter[constant[redo]]]]] name[self].copy_action assign[=] call[name[create_edit_action], parameter[constant[Copy], call[name[_], parameter[constant[Copy]]], call[name[ima].icon, parameter[constant[editcopy]]]]] name[self].cut_action assign[=] call[name[create_edit_action], parameter[constant[Cut], call[name[_], parameter[constant[Cut]]], call[name[ima].icon, parameter[constant[editcut]]]]] name[self].paste_action assign[=] call[name[create_edit_action], parameter[constant[Paste], call[name[_], parameter[constant[Paste]]], call[name[ima].icon, parameter[constant[editpaste]]]]] name[self].selectall_action assign[=] call[name[create_edit_action], parameter[constant[Select All], call[name[_], parameter[constant[Select All]]], call[name[ima].icon, parameter[constant[selectall]]]]] name[self].edit_menu_actions assign[=] list[[<ast.Attribute object at 0x7da207f012a0>, <ast.Attribute object at 0x7da207f02fb0>, <ast.Constant object at 0x7da207f03d00>, <ast.Attribute object at 0x7da207f009d0>, <ast.Attribute object at 0x7da207f03a00>, <ast.Attribute object at 0x7da207f00dc0>, <ast.Attribute object at 0x7da207f017b0>]] variable[namespace] assign[=] constant[None] call[name[logger].info, parameter[constant[Creating toolbars...]]] name[self].file_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&File]]]]] name[self].file_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[File toolbar]]], constant[file_toolbar]]] name[self].edit_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Edit]]]]] name[self].edit_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[Edit toolbar]]], constant[edit_toolbar]]] name[self].search_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Search]]]]] name[self].search_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[Search toolbar]]], constant[search_toolbar]]] name[self].source_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[Sour&ce]]]]] name[self].source_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[Source toolbar]]], constant[source_toolbar]]] name[self].run_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Run]]]]] name[self].run_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[Run toolbar]]], constant[run_toolbar]]] name[self].debug_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Debug]]]]] name[self].debug_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[Debug toolbar]]], constant[debug_toolbar]]] name[self].consoles_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[C&onsoles]]]]] call[name[self].consoles_menu.aboutToShow.connect, parameter[name[self].update_execution_state_kernel]] name[self].projects_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Projects]]]]] call[name[self].projects_menu.aboutToShow.connect, parameter[name[self].valid_project]] name[self].tools_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Tools]]]]] name[self].view_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&View]]]]] name[self].help_menu assign[=] call[call[name[self].menuBar, parameter[]].addMenu, parameter[call[name[_], parameter[constant[&Help]]]]] variable[status] assign[=] call[name[self].statusBar, parameter[]] call[name[status].setObjectName, parameter[constant[StatusBar]]] call[name[status].showMessage, parameter[call[name[_], parameter[constant[Welcome to Spyder!]]], constant[5000]]] call[name[logger].info, parameter[constant[Creating Tools menu...]]] variable[prefs_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Pre&ferences]]]]] call[name[self].register_shortcut, parameter[name[prefs_action], constant[_], constant[Preferences]]] variable[spyder_path_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[PYTHONPATH manager]]], constant[None]]] variable[reset_spyder_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Reset Spyder to factory defaults]]]]] name[self].tools_menu_actions assign[=] list[[<ast.Name object at 0x7da204567490>, <ast.Name object at 0x7da204565db0>]] if compare[name[WinUserEnvDialog] is_not constant[None]] begin[:] variable[winenv_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Current user environment variables...]]]]] call[name[self].tools_menu_actions.append, parameter[name[winenv_action]]] <ast.AugAssign object at 0x7da2045674c0> name[self].external_tools_menu assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[External Tools]]]]] name[self].external_tools_menu_actions assign[=] list[[]] name[self].wp_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[WinPython control panel]]]]] if <ast.BoolOp object at 0x7da204623a00> begin[:] call[name[self].external_tools_menu_actions.append, parameter[name[self].wp_action]] variable[additact] assign[=] list[[]] for taget[name[name]] in starred[tuple[[<ast.Constant object at 0x7da2046216f0>, <ast.Constant object at 0x7da204620dc0>]]] begin[:] variable[qtdact] assign[=] call[name[create_program_action], parameter[name[self], call[name[_], parameter[constant[Qt Designer]]], name[name]]] if name[qtdact] begin[:] break for taget[name[name]] in starred[tuple[[<ast.Constant object at 0x7da1b2346620>, <ast.Constant object at 0x7da1b2345bd0>]]] begin[:] variable[qtlact] assign[=] call[name[create_program_action], parameter[name[self], call[name[_], parameter[constant[Qt Linguist]]], constant[linguist]]] if name[qtlact] begin[:] break variable[args] assign[=] <ast.IfExp object at 0x7da1b2347e50> for taget[name[act]] in starred[tuple[[<ast.Name object at 0x7da1b23456f0>, <ast.Name object at 0x7da1b2347d90>]]] begin[:] if name[act] begin[:] call[name[additact].append, parameter[name[act]]] if <ast.BoolOp object at 0x7da1b23461a0> begin[:] <ast.AugAssign object at 0x7da1b23468f0> call[name[logger].info, parameter[constant[Creating guidata and sift entries...]]] variable[gdgq_act] assign[=] list[[]] <ast.Try object at 0x7da1b2346710> <ast.Try object at 0x7da1b2344520> if name[gdgq_act] begin[:] <ast.AugAssign object at 0x7da1b23474c0> name[self].maximize_action assign[=] call[name[create_action], parameter[name[self], constant[]]] call[name[self].register_shortcut, parameter[name[self].maximize_action, constant[_], constant[Maximize pane]]] call[name[self].__update_maximize_action, parameter[]] name[self].fullscreen_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Fullscreen mode]]]]] call[name[self].register_shortcut, parameter[name[self].fullscreen_action, constant[_], constant[Fullscreen mode]]] name[self].main_toolbar_actions assign[=] list[[<ast.Attribute object at 0x7da1b2345ff0>, <ast.Attribute object at 0x7da1b2344400>, <ast.Constant object at 0x7da1b2346080>, <ast.Name object at 0x7da1b2346140>, <ast.Name object at 0x7da1b23471c0>]] name[self].main_toolbar assign[=] call[name[self].create_toolbar, parameter[call[name[_], parameter[constant[Main toolbar]]], constant[main_toolbar]]] call[name[logger].info, parameter[constant[Loading internal console...]]] from relative_module[spyder.plugins.console.plugin] import module[Console] name[self].console assign[=] call[name[Console], parameter[name[self], name[namespace]]] call[name[self].console.register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Starting Language Server Protocol manager...]]]]] from relative_module[spyder.plugins.editor.lsp.manager] import module[LSPManager] name[self].lspmanager assign[=] call[name[LSPManager], parameter[name[self]]] call[name[logger].info, parameter[constant[Loading working directory...]]] from relative_module[spyder.plugins.workingdirectory.plugin] import module[WorkingDirectory] name[self].workingdirectory assign[=] call[name[WorkingDirectory], parameter[name[self], name[self].init_workdir]] call[name[self].workingdirectory.register_plugin, parameter[]] call[name[self].toolbarslist.append, parameter[name[self].workingdirectory.toolbar]] if call[name[CONF].get, parameter[constant[help], constant[enable]]] begin[:] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading help...]]]]] from relative_module[spyder.plugins.help.plugin] import module[Help] name[self].help assign[=] call[name[Help], parameter[name[self]]] call[name[self].help.register_plugin, parameter[]] if call[name[CONF].get, parameter[constant[outline_explorer], constant[enable]]] begin[:] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading outline explorer...]]]]] from relative_module[spyder.plugins.outlineexplorer.plugin] import module[OutlineExplorer] name[self].outlineexplorer assign[=] call[name[OutlineExplorer], parameter[name[self]]] call[name[self].outlineexplorer.register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading editor...]]]]] from relative_module[spyder.plugins.editor.plugin] import module[Editor] name[self].editor assign[=] call[name[Editor], parameter[name[self]]] call[name[self].editor.register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Launching LSP Client for Python...]]]]] call[name[self].lspmanager.start_client, parameter[]] variable[quit_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[&Quit]]]]] call[name[self].register_shortcut, parameter[name[quit_action], constant[_], constant[Quit]]] variable[restart_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[&Restart]]]]] call[name[self].register_shortcut, parameter[name[restart_action], constant[_], constant[Restart]]] <ast.AugAssign object at 0x7da207f9a170> call[name[self].set_splash, parameter[constant[]]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading namespace browser...]]]]] from relative_module[spyder.plugins.variableexplorer.plugin] import module[VariableExplorer] name[self].variableexplorer assign[=] call[name[VariableExplorer], parameter[name[self]]] call[name[self].variableexplorer.register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading figure browser...]]]]] from relative_module[spyder.plugins.plots.plugin] import module[Plots] name[self].plots assign[=] call[name[Plots], parameter[name[self]]] call[name[self].plots.register_plugin, parameter[]] if call[name[CONF].get, parameter[constant[historylog], constant[enable]]] begin[:] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading history plugin...]]]]] from relative_module[spyder.plugins.history.plugin] import module[HistoryLog] name[self].historylog assign[=] call[name[HistoryLog], parameter[name[self]]] call[name[self].historylog.register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading IPython console...]]]]] from relative_module[spyder.plugins.ipythonconsole.plugin] import module[IPythonConsole] name[self].ipyconsole assign[=] call[name[IPythonConsole], parameter[name[self]]] call[name[self].ipyconsole.register_plugin, parameter[]] if call[name[CONF].get, parameter[constant[explorer], constant[enable]]] begin[:] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading file explorer...]]]]] from relative_module[spyder.plugins.explorer.plugin] import module[Explorer] name[self].explorer assign[=] call[name[Explorer], parameter[name[self]]] call[name[self].explorer.register_plugin, parameter[]] <ast.Try object at 0x7da1b21edbd0> if <ast.BoolOp object at 0x7da1b21eef80> begin[:] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading online help...]]]]] name[self].onlinehelp assign[=] call[name[OnlineHelp], parameter[name[self]]] call[name[self].onlinehelp.register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading project explorer...]]]]] from relative_module[spyder.plugins.projects.plugin] import module[Projects] name[self].projects assign[=] call[name[Projects], parameter[name[self]]] call[name[self].projects.register_plugin, parameter[]] name[self].project_path assign[=] call[name[self].projects.get_pythonpath, parameter[]] if call[name[CONF].get, parameter[constant[find_in_files], constant[enable]]] begin[:] from relative_module[spyder.plugins.findinfiles.plugin] import module[FindInFiles] name[self].findinfiles assign[=] call[name[FindInFiles], parameter[name[self]]] call[name[self].findinfiles.register_plugin, parameter[]] variable[other_plugins] assign[=] list[[<ast.Constant object at 0x7da1b21ecd30>, <ast.Constant object at 0x7da1b21ecd60>, <ast.Constant object at 0x7da1b21ecd90>]] for taget[name[plugin_name]] in starred[name[other_plugins]] begin[:] if call[name[CONF].get, parameter[name[plugin_name], constant[enable]]] begin[:] variable[module] assign[=] call[name[importlib].import_module, parameter[call[constant[spyder.plugins.{}].format, parameter[name[plugin_name]]]]] variable[plugin] assign[=] call[name[module].PLUGIN_CLASS, parameter[name[self]]] if call[call[name[plugin].check_compatibility, parameter[]]][constant[0]] begin[:] call[name[self].thirdparty_plugins.append, parameter[name[plugin]]] call[name[plugin].register_plugin, parameter[]] call[name[self].set_splash, parameter[call[name[_], parameter[constant[Loading third-party plugins...]]]]] for taget[name[mod]] in starred[call[name[get_spyderplugins_mods], parameter[]]] begin[:] <ast.Try object at 0x7da1b21ed210> call[name[self].set_splash, parameter[call[name[_], parameter[constant[Setting up main window...]]]]] variable[trouble_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Troubleshooting...]]]]] variable[dep_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Dependencies...]]]]] variable[report_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Report issue...]]]]] variable[support_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Spyder support...]]]]] name[self].check_updates_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Check for updates...]]]]] variable[spyder_doc] assign[=] constant[https://docs.spyder-ide.org/] variable[doc_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Spyder documentation]]]]] call[name[self].register_shortcut, parameter[name[doc_action], constant[_], constant[spyder documentation]]] if compare[name[self].help is_not constant[None]] begin[:] variable[tut_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Spyder tutorial]]]]] variable[shortcuts_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Shortcuts Summary]]]]] name[self].tour assign[=] call[name[tour].AnimatedTour, parameter[name[self]]] name[self].tours_menu assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[Interactive tours]]], name[self]]] name[self].tour_menu_actions assign[=] list[[]] name[self].tours_available assign[=] call[name[tour].get_tours, parameter[constant[0]]] for taget[tuple[[<ast.Name object at 0x7da1b1f676d0>, <ast.Name object at 0x7da1b1f65450>]]] in starred[call[name[enumerate], parameter[name[self].tours_available]]] begin[:] call[call[name[self].tours_available][name[i]]][constant[last]] assign[=] constant[0] variable[tour_name] assign[=] call[name[tour_available]][constant[name]] def function[trigger, parameter[i, self]]: return[<ast.Lambda object at 0x7da1b1f651e0>] variable[temp_action] assign[=] call[name[create_action], parameter[name[self], name[tour_name]]] <ast.AugAssign object at 0x7da1b1f67820> call[name[self].tours_menu.addActions, parameter[name[self].tour_menu_actions]] name[self].help_menu_actions assign[=] list[[<ast.Name object at 0x7da1b1f65030>, <ast.Name object at 0x7da1b1f64700>, <ast.Name object at 0x7da1b1f64520>, <ast.Attribute object at 0x7da1b1f649a0>, <ast.Name object at 0x7da1b1f677f0>, <ast.Name object at 0x7da1b1f67bb0>, <ast.Name object at 0x7da1b1f67b50>, <ast.Name object at 0x7da1b1f64430>, <ast.Attribute object at 0x7da1b1f67550>, <ast.Name object at 0x7da1b1f678b0>, <ast.Name object at 0x7da1b1f673a0>]] if compare[call[name[get_python_doc_path], parameter[]] is_not constant[None]] begin[:] variable[pydoc_act] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Python documentation]]]]] call[name[self].help_menu_actions.append, parameter[name[pydoc_act]]] if compare[name[self].help is_not constant[None]] begin[:] variable[ipython_menu] assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[IPython documentation]]], name[self]]] variable[intro_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Intro to IPython]]]]] variable[quickref_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Quick reference]]]]] variable[guiref_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Console help]]]]] call[name[add_actions], parameter[name[ipython_menu], tuple[[<ast.Name object at 0x7da1b1f65210>, <ast.Name object at 0x7da1b1f67160>, <ast.Name object at 0x7da1b1f65630>]]]] call[name[self].help_menu_actions.append, parameter[name[ipython_menu]]] variable[ipm_actions] assign[=] list[[]] def function[add_ipm_action, parameter[text, path]]: constant[Add installed Python module doc action to help submenu] variable[path] assign[=] call[name[file_uri], parameter[name[path]]] if <ast.UnaryOp object at 0x7da1b1fa44f0> begin[:] variable[slot] assign[=] <ast.Lambda object at 0x7da1b1fa7b80> variable[action] assign[=] call[name[create_action], parameter[name[self], name[text]]] call[name[ipm_actions].append, parameter[name[action]]] variable[sysdocpth] assign[=] call[name[osp].join, parameter[name[sys].prefix, constant[Doc]]] if call[name[osp].isdir, parameter[name[sysdocpth]]] begin[:] for taget[name[docfn]] in starred[call[name[os].listdir, parameter[name[sysdocpth]]]] begin[:] variable[pt] assign[=] constant[([a-zA-Z\_]*)(doc)?(-dev)?(-ref)?(-user)?.(chm|pdf)] variable[match] assign[=] call[name[re].match, parameter[name[pt], name[docfn]]] if compare[name[match] is_not constant[None]] begin[:] variable[pname] assign[=] call[call[name[match].groups, parameter[]]][constant[0]] if compare[name[pname] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b1fa7580>]]] begin[:] call[name[add_ipm_action], parameter[name[pname], call[name[osp].join, parameter[name[sysdocpth], name[docfn]]]]] if name[ipm_actions] begin[:] variable[pymods_menu] assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[Installed Python modules]]], name[self]]] call[name[add_actions], parameter[name[pymods_menu], name[ipm_actions]]] call[name[self].help_menu_actions.append, parameter[name[pymods_menu]]] variable[web_resources] assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[Online documentation]]], name[self]]] variable[webres_actions] assign[=] call[name[create_module_bookmark_actions], parameter[name[self], name[self].BOOKMARKS]] call[name[webres_actions].insert, parameter[constant[2], constant[None]]] call[name[webres_actions].insert, parameter[constant[5], constant[None]]] call[name[webres_actions].insert, parameter[constant[8], constant[None]]] call[name[add_actions], parameter[name[web_resources], name[webres_actions]]] call[name[self].help_menu_actions.append, parameter[name[web_resources]]] if <ast.BoolOp object at 0x7da1b1fa4ca0> begin[:] variable[qta_exe] assign[=] constant[assistant-qt4] variable[qta_act] assign[=] call[name[create_program_action], parameter[name[self], call[name[_], parameter[constant[Qt documentation]]], name[qta_exe]]] if name[qta_act] begin[:] <ast.AugAssign object at 0x7da1b1fa5c60> variable[about_action] assign[=] call[name[create_action], parameter[name[self], binary_operation[call[name[_], parameter[constant[About %s...]]] <ast.Mod object at 0x7da2590d6920> constant[Spyder]]]] <ast.AugAssign object at 0x7da1b1fa6b90> from relative_module[spyder.widgets.status] import module[MemoryStatus], module[CPUStatus] name[self].mem_status assign[=] call[name[MemoryStatus], parameter[name[self], name[status]]] name[self].cpu_status assign[=] call[name[CPUStatus], parameter[name[self], name[status]]] call[name[self].apply_statusbar_settings, parameter[]] name[self].plugins_menu assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[Panes]]], name[self]]] name[self].toolbars_menu assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[Toolbars]]], name[self]]] name[self].quick_layout_menu assign[=] call[name[QMenu], parameter[call[name[_], parameter[constant[Window layouts]]], name[self]]] call[name[self].quick_layout_set_menu, parameter[]] call[name[self].view_menu.addMenu, parameter[name[self].plugins_menu]] call[name[add_actions], parameter[name[self].view_menu, tuple[[<ast.Attribute object at 0x7da1b1fa7b50>, <ast.Attribute object at 0x7da1b1fa4610>, <ast.Attribute object at 0x7da1b1fa4d60>, <ast.Name object at 0x7da1b1fa7ca0>]]]] name[self].show_toolbars_action assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Show toolbars]]]]] call[name[self].register_shortcut, parameter[name[self].show_toolbars_action, constant[_], constant[Show toolbars]]] call[name[self].view_menu.addMenu, parameter[name[self].toolbars_menu]] call[name[self].view_menu.addAction, parameter[name[self].show_toolbars_action]] call[name[add_actions], parameter[name[self].view_menu, tuple[[<ast.Name object at 0x7da1b21f5b40>, <ast.Attribute object at 0x7da1b21f42b0>, <ast.Attribute object at 0x7da1b21f5300>, <ast.Attribute object at 0x7da1b21f41c0>, <ast.Name object at 0x7da1b21f4ee0>, <ast.Attribute object at 0x7da1b21f4e20>]]]] if compare[name[set_attached_console_visible] is_not constant[None]] begin[:] variable[cmd_act] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Attached console window (debugging)]]]]] call[name[cmd_act].setChecked, parameter[call[name[is_attached_console_visible], parameter[]]]] call[name[add_actions], parameter[name[self].view_menu, tuple[[<ast.Name object at 0x7da1b21f4dc0>, <ast.Name object at 0x7da1b21f42e0>]]]] if name[self].external_tools_menu_actions begin[:] variable[external_tools_act] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[External Tools]]]]] call[name[external_tools_act].setMenu, parameter[name[self].external_tools_menu]] <ast.AugAssign object at 0x7da1b21f7640> call[name[add_actions], parameter[name[self].file_menu, name[self].file_menu_actions]] call[name[add_actions], parameter[name[self].edit_menu, name[self].edit_menu_actions]] call[name[add_actions], parameter[name[self].search_menu, name[self].search_menu_actions]] call[name[add_actions], parameter[name[self].source_menu, name[self].source_menu_actions]] call[name[add_actions], parameter[name[self].run_menu, name[self].run_menu_actions]] call[name[add_actions], parameter[name[self].debug_menu, name[self].debug_menu_actions]] call[name[add_actions], parameter[name[self].consoles_menu, name[self].consoles_menu_actions]] call[name[add_actions], parameter[name[self].projects_menu, name[self].projects_menu_actions]] call[name[add_actions], parameter[name[self].tools_menu, name[self].tools_menu_actions]] call[name[add_actions], parameter[name[self].external_tools_menu, name[self].external_tools_menu_actions]] call[name[add_actions], parameter[name[self].help_menu, name[self].help_menu_actions]] call[name[add_actions], parameter[name[self].main_toolbar, name[self].main_toolbar_actions]] call[name[add_actions], parameter[name[self].file_toolbar, name[self].file_toolbar_actions]] call[name[add_actions], parameter[name[self].edit_toolbar, name[self].edit_toolbar_actions]] call[name[add_actions], parameter[name[self].search_toolbar, name[self].search_toolbar_actions]] call[name[add_actions], parameter[name[self].source_toolbar, name[self].source_toolbar_actions]] call[name[add_actions], parameter[name[self].debug_toolbar, name[self].debug_toolbar_actions]] call[name[add_actions], parameter[name[self].run_toolbar, name[self].run_toolbar_actions]] call[name[self].apply_shortcuts, parameter[]] call[name[self].all_actions_defined.emit, parameter[]] call[name[logger].info, parameter[constant[Setting up window...]]] call[name[self].setup_layout, parameter[]] if compare[name[sys].platform equal[==] constant[darwin]] begin[:] for taget[name[name]] in starred[list[[<ast.Constant object at 0x7da1b1fc8700>, <ast.Constant object at 0x7da1b1fc96f0>, <ast.Constant object at 0x7da1b1fcbbe0>, <ast.Constant object at 0x7da1b1fca650>, <ast.Constant object at 0x7da1b1fca590>, <ast.Constant object at 0x7da1b1fc8640>, <ast.Constant object at 0x7da1b1fc98a0>, <ast.Constant object at 0x7da1b1fca6b0>, <ast.Constant object at 0x7da1b1fca560>]]] begin[:] variable[menu_object] assign[=] call[name[getattr], parameter[name[self], binary_operation[name[name] + constant[_menu]]]] call[name[menu_object].aboutToShow.connect, parameter[<ast.Lambda object at 0x7da1b1fc8940>]] call[name[menu_object].aboutToHide.connect, parameter[<ast.Lambda object at 0x7da1b1fcbf10>]] if compare[name[self].splash is_not constant[None]] begin[:] call[name[self].splash.hide, parameter[]] if call[name[CONF].get, parameter[constant[main], constant[tear_off_menus]]] begin[:] for taget[name[child]] in starred[call[call[name[self].menuBar, parameter[]].children, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b1fca7d0> begin[:] call[name[child].setTearOffEnabled, parameter[constant[True]]] for taget[name[child]] in starred[call[call[name[self].menuBar, parameter[]].children, parameter[]]] begin[:] if call[name[isinstance], parameter[name[child], name[QMenu]]] begin[:] <ast.Try object at 0x7da1b1fcbcd0> call[name[logger].info, parameter[constant[*** End of MainWindow setup ***]]] name[self].is_starting_up assign[=] constant[False]
keyword[def] identifier[setup] ( identifier[self] ): literal[string] identifier[logger] . identifier[info] ( literal[string] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[ui_theme] = identifier[CONF] . identifier[get] ( literal[string] , literal[string] ) identifier[color_scheme] = identifier[CONF] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[ui_theme] == literal[string] : identifier[dark_qss] = identifier[qdarkstyle] . identifier[load_stylesheet_from_environment] () identifier[self] . identifier[setStyleSheet] ( identifier[dark_qss] ) identifier[self] . identifier[statusBar] (). identifier[setStyleSheet] ( identifier[dark_qss] ) identifier[css_path] = identifier[DARK_CSS_PATH] keyword[elif] identifier[ui_theme] == literal[string] : keyword[if] keyword[not] identifier[is_dark_font_color] ( identifier[color_scheme] ): identifier[dark_qss] = identifier[qdarkstyle] . identifier[load_stylesheet_from_environment] () identifier[self] . identifier[setStyleSheet] ( identifier[dark_qss] ) identifier[self] . identifier[statusBar] (). identifier[setStyleSheet] ( identifier[dark_qss] ) identifier[css_path] = identifier[DARK_CSS_PATH] keyword[else] : identifier[css_path] = identifier[CSS_PATH] keyword[else] : identifier[css_path] = identifier[CSS_PATH] identifier[logger] . identifier[info] ( literal[string] ) identifier[self] . identifier[close_dockwidget_action] = identifier[create_action] ( identifier[self] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[text] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[close_current_dockwidget] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[close_dockwidget_action] , literal[string] , literal[string] ) identifier[self] . identifier[lock_interface_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[toggled] = identifier[self] . identifier[toggle_lock] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[lock_interface_action] , literal[string] , literal[string] ) identifier[self] . identifier[toggle_next_layout_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[toggle_next_layout] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[toggle_previous_layout_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[toggle_previous_layout] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[toggle_next_layout_action] , literal[string] , literal[string] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[toggle_previous_layout_action] , literal[string] , literal[string] ) identifier[self] . identifier[file_switcher_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[open_fileswitcher] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[file_switcher_action] , identifier[context] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[symbol_finder_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[open_symbolfinder] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[symbol_finder_action] , identifier[context] = literal[string] , identifier[name] = literal[string] , identifier[add_sc_to_tip] = keyword[True] ) identifier[self] . identifier[file_toolbar_actions] =[ identifier[self] . identifier[file_switcher_action] , identifier[self] . identifier[symbol_finder_action] ] keyword[def] identifier[create_edit_action] ( identifier[text] , identifier[tr_text] , identifier[icon] ): identifier[textseq] = identifier[text] . identifier[split] ( literal[string] ) identifier[method_name] = identifier[textseq] [ literal[int] ]. identifier[lower] ()+ literal[string] . identifier[join] ( identifier[textseq] [ literal[int] :]) identifier[action] = identifier[create_action] ( identifier[self] , identifier[tr_text] , identifier[icon] = identifier[icon] , identifier[triggered] = identifier[self] . identifier[global_callback] , identifier[data] = identifier[method_name] , identifier[context] = identifier[Qt] . identifier[WidgetShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[action] , literal[string] , identifier[text] ) keyword[return] identifier[action] identifier[self] . identifier[undo_action] = identifier[create_edit_action] ( literal[string] , identifier[_] ( literal[string] ), identifier[ima] . identifier[icon] ( literal[string] )) identifier[self] . identifier[redo_action] = identifier[create_edit_action] ( literal[string] , identifier[_] ( literal[string] ), identifier[ima] . identifier[icon] ( literal[string] )) identifier[self] . identifier[copy_action] = identifier[create_edit_action] ( literal[string] , identifier[_] ( literal[string] ), identifier[ima] . identifier[icon] ( literal[string] )) identifier[self] . identifier[cut_action] = identifier[create_edit_action] ( literal[string] , identifier[_] ( literal[string] ), identifier[ima] . identifier[icon] ( literal[string] )) identifier[self] . identifier[paste_action] = identifier[create_edit_action] ( literal[string] , identifier[_] ( literal[string] ), identifier[ima] . identifier[icon] ( literal[string] )) identifier[self] . identifier[selectall_action] = identifier[create_edit_action] ( literal[string] , identifier[_] ( literal[string] ), identifier[ima] . identifier[icon] ( literal[string] )) identifier[self] . identifier[edit_menu_actions] =[ identifier[self] . identifier[undo_action] , identifier[self] . identifier[redo_action] , keyword[None] , identifier[self] . identifier[cut_action] , identifier[self] . identifier[copy_action] , identifier[self] . identifier[paste_action] , identifier[self] . identifier[selectall_action] ] identifier[namespace] = keyword[None] identifier[logger] . identifier[info] ( literal[string] ) identifier[self] . identifier[file_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[file_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[self] . identifier[edit_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[edit_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[self] . identifier[search_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[search_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[self] . identifier[source_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[source_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[self] . identifier[run_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[run_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[self] . identifier[debug_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[debug_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[self] . identifier[consoles_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[consoles_menu] . identifier[aboutToShow] . identifier[connect] ( identifier[self] . identifier[update_execution_state_kernel] ) identifier[self] . identifier[projects_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[projects_menu] . identifier[aboutToShow] . identifier[connect] ( identifier[self] . identifier[valid_project] ) identifier[self] . identifier[tools_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[view_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[help_menu] = identifier[self] . identifier[menuBar] (). identifier[addMenu] ( identifier[_] ( literal[string] )) identifier[status] = identifier[self] . identifier[statusBar] () identifier[status] . identifier[setObjectName] ( literal[string] ) identifier[status] . identifier[showMessage] ( identifier[_] ( literal[string] ), literal[int] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[prefs_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[edit_preferences] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[prefs_action] , literal[string] , literal[string] , identifier[add_sc_to_tip] = keyword[True] ) identifier[spyder_path_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), keyword[None] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[path_manager_callback] , identifier[tip] = identifier[_] ( literal[string] ), identifier[menurole] = identifier[QAction] . identifier[ApplicationSpecificRole] ) identifier[reset_spyder_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[reset_spyder] ) identifier[self] . identifier[tools_menu_actions] =[ identifier[prefs_action] , identifier[spyder_path_action] ] keyword[if] identifier[WinUserEnvDialog] keyword[is] keyword[not] keyword[None] : identifier[winenv_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = literal[string] , identifier[tip] = identifier[_] ( literal[string] literal[string] literal[string] ), identifier[triggered] = identifier[self] . identifier[win_env] ) identifier[self] . identifier[tools_menu_actions] . identifier[append] ( identifier[winenv_action] ) identifier[self] . identifier[tools_menu_actions] +=[ identifier[MENU_SEPARATOR] , identifier[reset_spyder_action] ] identifier[self] . identifier[external_tools_menu] = identifier[QMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[external_tools_menu_actions] =[] identifier[self] . identifier[wp_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[get_icon] ( literal[string] ), identifier[triggered] = keyword[lambda] : identifier[programs] . identifier[run_python_script] ( literal[string] , literal[string] )) keyword[if] identifier[os] . identifier[name] == literal[string] keyword[and] identifier[is_module_installed] ( literal[string] ): identifier[self] . identifier[external_tools_menu_actions] . identifier[append] ( identifier[self] . identifier[wp_action] ) identifier[additact] =[] keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] ): identifier[qtdact] = identifier[create_program_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[name] ) keyword[if] identifier[qtdact] : keyword[break] keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] ): identifier[qtlact] = identifier[create_program_action] ( identifier[self] , identifier[_] ( literal[string] ), literal[string] ) keyword[if] identifier[qtlact] : keyword[break] identifier[args] =[ literal[string] ] keyword[if] identifier[os] . identifier[name] == literal[string] keyword[else] [] keyword[for] identifier[act] keyword[in] ( identifier[qtdact] , identifier[qtlact] ): keyword[if] identifier[act] : identifier[additact] . identifier[append] ( identifier[act] ) keyword[if] identifier[additact] keyword[and] identifier[is_module_installed] ( literal[string] ): identifier[self] . identifier[external_tools_menu_actions] +=[ keyword[None] ]+ identifier[additact] identifier[logger] . identifier[info] ( literal[string] ) identifier[gdgq_act] =[] keyword[try] : keyword[from] identifier[guidata] keyword[import] identifier[configtools] keyword[from] identifier[guidata] keyword[import] identifier[config] identifier[guidata_icon] = identifier[configtools] . identifier[get_icon] ( literal[string] ) identifier[guidata_act] = identifier[create_python_script_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[guidata_icon] , literal[string] , identifier[osp] . identifier[join] ( literal[string] , literal[string] )) identifier[gdgq_act] +=[ identifier[guidata_act] ] keyword[except] : keyword[pass] keyword[try] : keyword[from] identifier[guidata] keyword[import] identifier[configtools] keyword[from] identifier[guiqwt] keyword[import] identifier[config] identifier[guiqwt_icon] = identifier[configtools] . identifier[get_icon] ( literal[string] ) identifier[guiqwt_act] = identifier[create_python_script_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[guiqwt_icon] , literal[string] , identifier[osp] . identifier[join] ( literal[string] , literal[string] )) keyword[if] identifier[guiqwt_act] : identifier[gdgq_act] +=[ identifier[guiqwt_act] ] identifier[sift_icon] = identifier[configtools] . identifier[get_icon] ( literal[string] ) identifier[sift_act] = identifier[create_python_script_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[sift_icon] , literal[string] , identifier[osp] . identifier[join] ( literal[string] , literal[string] )) keyword[if] identifier[sift_act] : identifier[gdgq_act] +=[ identifier[sift_act] ] keyword[except] : keyword[pass] keyword[if] identifier[gdgq_act] : identifier[self] . identifier[external_tools_menu_actions] +=[ keyword[None] ]+ identifier[gdgq_act] identifier[self] . identifier[maximize_action] = identifier[create_action] ( identifier[self] , literal[string] , identifier[triggered] = identifier[self] . identifier[maximize_dockwidget] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[maximize_action] , literal[string] , literal[string] ) identifier[self] . identifier[__update_maximize_action] () identifier[self] . identifier[fullscreen_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[toggle_fullscreen] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[fullscreen_action] , literal[string] , literal[string] , identifier[add_sc_to_tip] = keyword[True] ) identifier[self] . identifier[main_toolbar_actions] =[ identifier[self] . identifier[maximize_action] , identifier[self] . identifier[fullscreen_action] , keyword[None] , identifier[prefs_action] , identifier[spyder_path_action] ] identifier[self] . identifier[main_toolbar] = identifier[self] . identifier[create_toolbar] ( identifier[_] ( literal[string] ), literal[string] ) identifier[logger] . identifier[info] ( literal[string] ) keyword[from] identifier[spyder] . identifier[plugins] . identifier[console] . identifier[plugin] keyword[import] identifier[Console] identifier[self] . identifier[console] = identifier[Console] ( identifier[self] , identifier[namespace] , identifier[exitfunc] = identifier[self] . identifier[closing] , identifier[profile] = identifier[self] . identifier[profile] , identifier[multithreaded] = identifier[self] . identifier[multithreaded] , identifier[message] = identifier[_] ( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] )) identifier[self] . identifier[console] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[editor] . identifier[lsp] . identifier[manager] keyword[import] identifier[LSPManager] identifier[self] . identifier[lspmanager] = identifier[LSPManager] ( identifier[self] ) identifier[logger] . identifier[info] ( literal[string] ) keyword[from] identifier[spyder] . identifier[plugins] . identifier[workingdirectory] . identifier[plugin] keyword[import] identifier[WorkingDirectory] identifier[self] . identifier[workingdirectory] = identifier[WorkingDirectory] ( identifier[self] , identifier[self] . identifier[init_workdir] , identifier[main] = identifier[self] ) identifier[self] . identifier[workingdirectory] . identifier[register_plugin] () identifier[self] . identifier[toolbarslist] . identifier[append] ( identifier[self] . identifier[workingdirectory] . identifier[toolbar] ) keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ): identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[help] . identifier[plugin] keyword[import] identifier[Help] identifier[self] . identifier[help] = identifier[Help] ( identifier[self] , identifier[css_path] = identifier[css_path] ) identifier[self] . identifier[help] . identifier[register_plugin] () keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ): identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[outlineexplorer] . identifier[plugin] keyword[import] identifier[OutlineExplorer] identifier[self] . identifier[outlineexplorer] = identifier[OutlineExplorer] ( identifier[self] ) identifier[self] . identifier[outlineexplorer] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[editor] . identifier[plugin] keyword[import] identifier[Editor] identifier[self] . identifier[editor] = identifier[Editor] ( identifier[self] ) identifier[self] . identifier[editor] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) identifier[self] . identifier[lspmanager] . identifier[start_client] ( identifier[language] = literal[string] ) identifier[quit_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[console] . identifier[quit] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[quit_action] , literal[string] , literal[string] ) identifier[restart_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[tip] = identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[restart] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[restart_action] , literal[string] , literal[string] ) identifier[self] . identifier[file_menu_actions] +=[ identifier[self] . identifier[file_switcher_action] , identifier[self] . identifier[symbol_finder_action] , keyword[None] , identifier[restart_action] , identifier[quit_action] ] identifier[self] . identifier[set_splash] ( literal[string] ) identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[variableexplorer] . identifier[plugin] keyword[import] identifier[VariableExplorer] identifier[self] . identifier[variableexplorer] = identifier[VariableExplorer] ( identifier[self] ) identifier[self] . identifier[variableexplorer] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[plots] . identifier[plugin] keyword[import] identifier[Plots] identifier[self] . identifier[plots] = identifier[Plots] ( identifier[self] ) identifier[self] . identifier[plots] . identifier[register_plugin] () keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ): identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[history] . identifier[plugin] keyword[import] identifier[HistoryLog] identifier[self] . identifier[historylog] = identifier[HistoryLog] ( identifier[self] ) identifier[self] . identifier[historylog] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[ipythonconsole] . identifier[plugin] keyword[import] identifier[IPythonConsole] identifier[self] . identifier[ipyconsole] = identifier[IPythonConsole] ( identifier[self] , identifier[css_path] = identifier[css_path] ) identifier[self] . identifier[ipyconsole] . identifier[register_plugin] () keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ): identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[explorer] . identifier[plugin] keyword[import] identifier[Explorer] identifier[self] . identifier[explorer] = identifier[Explorer] ( identifier[self] ) identifier[self] . identifier[explorer] . identifier[register_plugin] () keyword[try] : keyword[from] identifier[spyder] . identifier[plugins] . identifier[onlinehelp] . identifier[plugin] keyword[import] identifier[OnlineHelp] keyword[except] identifier[ImportError] : identifier[OnlineHelp] = keyword[None] keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ) keyword[and] identifier[OnlineHelp] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) identifier[self] . identifier[onlinehelp] = identifier[OnlineHelp] ( identifier[self] ) identifier[self] . identifier[onlinehelp] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[from] identifier[spyder] . identifier[plugins] . identifier[projects] . identifier[plugin] keyword[import] identifier[Projects] identifier[self] . identifier[projects] = identifier[Projects] ( identifier[self] ) identifier[self] . identifier[projects] . identifier[register_plugin] () identifier[self] . identifier[project_path] = identifier[self] . identifier[projects] . identifier[get_pythonpath] ( identifier[at_start] = keyword[True] ) keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ): keyword[from] identifier[spyder] . identifier[plugins] . identifier[findinfiles] . identifier[plugin] keyword[import] identifier[FindInFiles] identifier[self] . identifier[findinfiles] = identifier[FindInFiles] ( identifier[self] ) identifier[self] . identifier[findinfiles] . identifier[register_plugin] () identifier[other_plugins] =[ literal[string] , literal[string] , literal[string] ] keyword[for] identifier[plugin_name] keyword[in] identifier[other_plugins] : keyword[if] identifier[CONF] . identifier[get] ( identifier[plugin_name] , literal[string] ): identifier[module] = identifier[importlib] . identifier[import_module] ( literal[string] . identifier[format] ( identifier[plugin_name] )) identifier[plugin] = identifier[module] . identifier[PLUGIN_CLASS] ( identifier[self] ) keyword[if] identifier[plugin] . identifier[check_compatibility] ()[ literal[int] ]: identifier[self] . identifier[thirdparty_plugins] . identifier[append] ( identifier[plugin] ) identifier[plugin] . identifier[register_plugin] () identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) keyword[for] identifier[mod] keyword[in] identifier[get_spyderplugins_mods] (): keyword[try] : identifier[plugin] = identifier[mod] . identifier[PLUGIN_CLASS] ( identifier[self] ) keyword[if] identifier[plugin] . identifier[check_compatibility] ()[ literal[int] ]: identifier[self] . identifier[thirdparty_plugins] . identifier[append] ( identifier[plugin] ) identifier[plugin] . identifier[register_plugin] () keyword[except] identifier[Exception] keyword[as] identifier[error] : identifier[print] ( literal[string] %( identifier[mod] , identifier[str] ( identifier[error] )), identifier[file] = identifier[STDERR] ) identifier[traceback] . identifier[print_exc] ( identifier[file] = identifier[STDERR] ) identifier[self] . identifier[set_splash] ( identifier[_] ( literal[string] )) identifier[trouble_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[trouble_guide] ) identifier[dep_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[show_dependencies] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] )) identifier[report_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[report_issue] ) identifier[support_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[google_group] ) identifier[self] . identifier[check_updates_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[check_updates] ) identifier[spyder_doc] = literal[string] identifier[doc_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[triggered] = keyword[lambda] : identifier[programs] . identifier[start_file] ( identifier[spyder_doc] )) identifier[self] . identifier[register_shortcut] ( identifier[doc_action] , literal[string] , literal[string] ) keyword[if] identifier[self] . identifier[help] keyword[is] keyword[not] keyword[None] : identifier[tut_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[help] . identifier[show_tutorial] ) keyword[else] : identifier[tut_action] = keyword[None] identifier[shortcuts_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[shortcut] = literal[string] , identifier[triggered] = identifier[self] . identifier[show_shortcuts_dialog] ) identifier[self] . identifier[tour] = identifier[tour] . identifier[AnimatedTour] ( identifier[self] ) identifier[self] . identifier[tours_menu] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[self] . identifier[tour_menu_actions] =[] identifier[self] . identifier[tours_available] = identifier[tour] . identifier[get_tours] ( literal[int] ) keyword[for] identifier[i] , identifier[tour_available] keyword[in] identifier[enumerate] ( identifier[self] . identifier[tours_available] ): identifier[self] . identifier[tours_available] [ identifier[i] ][ literal[string] ]= literal[int] identifier[tour_name] = identifier[tour_available] [ literal[string] ] keyword[def] identifier[trigger] ( identifier[i] = identifier[i] , identifier[self] = identifier[self] ): keyword[return] keyword[lambda] : identifier[self] . identifier[show_tour] ( identifier[i] ) identifier[temp_action] = identifier[create_action] ( identifier[self] , identifier[tour_name] , identifier[tip] = literal[string] , identifier[triggered] = identifier[trigger] ()) identifier[self] . identifier[tour_menu_actions] +=[ identifier[temp_action] ] identifier[self] . identifier[tours_menu] . identifier[addActions] ( identifier[self] . identifier[tour_menu_actions] ) identifier[self] . identifier[help_menu_actions] =[ identifier[doc_action] , identifier[tut_action] , identifier[shortcuts_action] , identifier[self] . identifier[tours_menu] , identifier[MENU_SEPARATOR] , identifier[trouble_action] , identifier[report_action] , identifier[dep_action] , identifier[self] . identifier[check_updates_action] , identifier[support_action] , identifier[MENU_SEPARATOR] ] keyword[if] identifier[get_python_doc_path] () keyword[is] keyword[not] keyword[None] : identifier[pydoc_act] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = keyword[lambda] : identifier[programs] . identifier[start_file] ( identifier[get_python_doc_path] ())) identifier[self] . identifier[help_menu_actions] . identifier[append] ( identifier[pydoc_act] ) keyword[if] identifier[self] . identifier[help] keyword[is] keyword[not] keyword[None] : identifier[ipython_menu] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[intro_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[ipyconsole] . identifier[show_intro] ) identifier[quickref_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[ipyconsole] . identifier[show_quickref] ) identifier[guiref_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[ipyconsole] . identifier[show_guiref] ) identifier[add_actions] ( identifier[ipython_menu] ,( identifier[intro_action] , identifier[guiref_action] , identifier[quickref_action] )) identifier[self] . identifier[help_menu_actions] . identifier[append] ( identifier[ipython_menu] ) identifier[ipm_actions] =[] keyword[def] identifier[add_ipm_action] ( identifier[text] , identifier[path] ): literal[string] identifier[path] = identifier[file_uri] ( identifier[path] ) keyword[if] keyword[not] identifier[API] == literal[string] : identifier[slot] = keyword[lambda] identifier[_checked] , identifier[path] = identifier[path] : identifier[programs] . identifier[start_file] ( identifier[path] ) keyword[else] : identifier[slot] = keyword[lambda] identifier[path] = identifier[path] : identifier[programs] . identifier[start_file] ( identifier[path] ) identifier[action] = identifier[create_action] ( identifier[self] , identifier[text] , identifier[icon] = literal[string] % identifier[osp] . identifier[splitext] ( identifier[path] )[ literal[int] ][ literal[int] :], identifier[triggered] = identifier[slot] ) identifier[ipm_actions] . identifier[append] ( identifier[action] ) identifier[sysdocpth] = identifier[osp] . identifier[join] ( identifier[sys] . identifier[prefix] , literal[string] ) keyword[if] identifier[osp] . identifier[isdir] ( identifier[sysdocpth] ): keyword[for] identifier[docfn] keyword[in] identifier[os] . identifier[listdir] ( identifier[sysdocpth] ): identifier[pt] = literal[string] identifier[match] = identifier[re] . identifier[match] ( identifier[pt] , identifier[docfn] ) keyword[if] identifier[match] keyword[is] keyword[not] keyword[None] : identifier[pname] = identifier[match] . identifier[groups] ()[ literal[int] ] keyword[if] identifier[pname] keyword[not] keyword[in] ( literal[string] ,): identifier[add_ipm_action] ( identifier[pname] , identifier[osp] . identifier[join] ( identifier[sysdocpth] , identifier[docfn] )) keyword[if] identifier[ipm_actions] : identifier[pymods_menu] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[add_actions] ( identifier[pymods_menu] , identifier[ipm_actions] ) identifier[self] . identifier[help_menu_actions] . identifier[append] ( identifier[pymods_menu] ) identifier[web_resources] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[webres_actions] = identifier[create_module_bookmark_actions] ( identifier[self] , identifier[self] . identifier[BOOKMARKS] ) identifier[webres_actions] . identifier[insert] ( literal[int] , keyword[None] ) identifier[webres_actions] . identifier[insert] ( literal[int] , keyword[None] ) identifier[webres_actions] . identifier[insert] ( literal[int] , keyword[None] ) identifier[add_actions] ( identifier[web_resources] , identifier[webres_actions] ) identifier[self] . identifier[help_menu_actions] . identifier[append] ( identifier[web_resources] ) keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ) keyword[and] keyword[not] identifier[PYQT5] : identifier[qta_exe] = literal[string] keyword[else] : identifier[qta_exe] = literal[string] identifier[qta_act] = identifier[create_program_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[qta_exe] ) keyword[if] identifier[qta_act] : identifier[self] . identifier[help_menu_actions] +=[ identifier[qta_act] , keyword[None] ] identifier[about_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] )% literal[string] , identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[about] ) identifier[self] . identifier[help_menu_actions] +=[ identifier[MENU_SEPARATOR] , identifier[about_action] ] keyword[from] identifier[spyder] . identifier[widgets] . identifier[status] keyword[import] identifier[MemoryStatus] , identifier[CPUStatus] identifier[self] . identifier[mem_status] = identifier[MemoryStatus] ( identifier[self] , identifier[status] ) identifier[self] . identifier[cpu_status] = identifier[CPUStatus] ( identifier[self] , identifier[status] ) identifier[self] . identifier[apply_statusbar_settings] () identifier[self] . identifier[plugins_menu] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[self] . identifier[toolbars_menu] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[self] . identifier[quick_layout_menu] = identifier[QMenu] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[self] . identifier[quick_layout_set_menu] () identifier[self] . identifier[view_menu] . identifier[addMenu] ( identifier[self] . identifier[plugins_menu] ) identifier[add_actions] ( identifier[self] . identifier[view_menu] ,( identifier[self] . identifier[lock_interface_action] , identifier[self] . identifier[close_dockwidget_action] , identifier[self] . identifier[maximize_action] , identifier[MENU_SEPARATOR] )) identifier[self] . identifier[show_toolbars_action] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[triggered] = identifier[self] . identifier[show_toolbars] , identifier[context] = identifier[Qt] . identifier[ApplicationShortcut] ) identifier[self] . identifier[register_shortcut] ( identifier[self] . identifier[show_toolbars_action] , literal[string] , literal[string] ) identifier[self] . identifier[view_menu] . identifier[addMenu] ( identifier[self] . identifier[toolbars_menu] ) identifier[self] . identifier[view_menu] . identifier[addAction] ( identifier[self] . identifier[show_toolbars_action] ) identifier[add_actions] ( identifier[self] . identifier[view_menu] ,( identifier[MENU_SEPARATOR] , identifier[self] . identifier[quick_layout_menu] , identifier[self] . identifier[toggle_previous_layout_action] , identifier[self] . identifier[toggle_next_layout_action] , identifier[MENU_SEPARATOR] , identifier[self] . identifier[fullscreen_action] )) keyword[if] identifier[set_attached_console_visible] keyword[is] keyword[not] keyword[None] : identifier[cmd_act] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] ), identifier[toggled] = identifier[set_attached_console_visible] ) identifier[cmd_act] . identifier[setChecked] ( identifier[is_attached_console_visible] ()) identifier[add_actions] ( identifier[self] . identifier[view_menu] ,( identifier[MENU_SEPARATOR] , identifier[cmd_act] )) keyword[if] identifier[self] . identifier[external_tools_menu_actions] : identifier[external_tools_act] = identifier[create_action] ( identifier[self] , identifier[_] ( literal[string] )) identifier[external_tools_act] . identifier[setMenu] ( identifier[self] . identifier[external_tools_menu] ) identifier[self] . identifier[tools_menu_actions] +=[ keyword[None] , identifier[external_tools_act] ] identifier[add_actions] ( identifier[self] . identifier[file_menu] , identifier[self] . identifier[file_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[edit_menu] , identifier[self] . identifier[edit_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[search_menu] , identifier[self] . identifier[search_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[source_menu] , identifier[self] . identifier[source_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[run_menu] , identifier[self] . identifier[run_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[debug_menu] , identifier[self] . identifier[debug_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[consoles_menu] , identifier[self] . identifier[consoles_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[projects_menu] , identifier[self] . identifier[projects_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[tools_menu] , identifier[self] . identifier[tools_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[external_tools_menu] , identifier[self] . identifier[external_tools_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[help_menu] , identifier[self] . identifier[help_menu_actions] ) identifier[add_actions] ( identifier[self] . identifier[main_toolbar] , identifier[self] . identifier[main_toolbar_actions] ) identifier[add_actions] ( identifier[self] . identifier[file_toolbar] , identifier[self] . identifier[file_toolbar_actions] ) identifier[add_actions] ( identifier[self] . identifier[edit_toolbar] , identifier[self] . identifier[edit_toolbar_actions] ) identifier[add_actions] ( identifier[self] . identifier[search_toolbar] , identifier[self] . identifier[search_toolbar_actions] ) identifier[add_actions] ( identifier[self] . identifier[source_toolbar] , identifier[self] . identifier[source_toolbar_actions] ) identifier[add_actions] ( identifier[self] . identifier[debug_toolbar] , identifier[self] . identifier[debug_toolbar_actions] ) identifier[add_actions] ( identifier[self] . identifier[run_toolbar] , identifier[self] . identifier[run_toolbar_actions] ) identifier[self] . identifier[apply_shortcuts] () identifier[self] . identifier[all_actions_defined] . identifier[emit] () identifier[logger] . identifier[info] ( literal[string] ) identifier[self] . identifier[setup_layout] ( identifier[default] = keyword[False] ) keyword[if] identifier[sys] . identifier[platform] == literal[string] : keyword[for] identifier[name] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[menu_object] = identifier[getattr] ( identifier[self] , identifier[name] + literal[string] ) identifier[menu_object] . identifier[aboutToShow] . identifier[connect] ( keyword[lambda] identifier[name] = identifier[name] : identifier[self] . identifier[show_shortcuts] ( identifier[name] )) identifier[menu_object] . identifier[aboutToHide] . identifier[connect] ( keyword[lambda] identifier[name] = identifier[name] : identifier[self] . identifier[hide_shortcuts] ( identifier[name] )) keyword[if] identifier[self] . identifier[splash] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[splash] . identifier[hide] () keyword[if] identifier[CONF] . identifier[get] ( literal[string] , literal[string] ): keyword[for] identifier[child] keyword[in] identifier[self] . identifier[menuBar] (). identifier[children] (): keyword[if] identifier[isinstance] ( identifier[child] , identifier[QMenu] ) keyword[and] identifier[child] != identifier[self] . identifier[help_menu] : identifier[child] . identifier[setTearOffEnabled] ( keyword[True] ) keyword[for] identifier[child] keyword[in] identifier[self] . identifier[menuBar] (). identifier[children] (): keyword[if] identifier[isinstance] ( identifier[child] , identifier[QMenu] ): keyword[try] : identifier[child] . identifier[aboutToShow] . identifier[connect] ( identifier[self] . identifier[update_edit_menu] ) identifier[child] . identifier[aboutToShow] . identifier[connect] ( identifier[self] . identifier[update_search_menu] ) keyword[except] identifier[TypeError] : keyword[pass] identifier[logger] . identifier[info] ( literal[string] ) identifier[self] . identifier[is_starting_up] = keyword[False]
def setup(self): """Setup main window""" logger.info('*** Start of MainWindow setup ***') logger.info('Applying theme configuration...') ui_theme = CONF.get('appearance', 'ui_theme') color_scheme = CONF.get('appearance', 'selected') if ui_theme == 'dark': dark_qss = qdarkstyle.load_stylesheet_from_environment() self.setStyleSheet(dark_qss) self.statusBar().setStyleSheet(dark_qss) css_path = DARK_CSS_PATH # depends on [control=['if'], data=[]] elif ui_theme == 'automatic': if not is_dark_font_color(color_scheme): dark_qss = qdarkstyle.load_stylesheet_from_environment() self.setStyleSheet(dark_qss) self.statusBar().setStyleSheet(dark_qss) css_path = DARK_CSS_PATH # depends on [control=['if'], data=[]] else: css_path = CSS_PATH # depends on [control=['if'], data=[]] else: css_path = CSS_PATH logger.info('Creating core actions...') self.close_dockwidget_action = create_action(self, icon=ima.icon('close_pane'), text=_('Close current pane'), triggered=self.close_current_dockwidget, context=Qt.ApplicationShortcut) self.register_shortcut(self.close_dockwidget_action, '_', 'Close pane') self.lock_interface_action = create_action(self, _('Lock panes and toolbars'), toggled=self.toggle_lock, context=Qt.ApplicationShortcut) self.register_shortcut(self.lock_interface_action, '_', 'Lock unlock panes') # custom layouts shortcuts self.toggle_next_layout_action = create_action(self, _('Use next layout'), triggered=self.toggle_next_layout, context=Qt.ApplicationShortcut) self.toggle_previous_layout_action = create_action(self, _('Use previous layout'), triggered=self.toggle_previous_layout, context=Qt.ApplicationShortcut) self.register_shortcut(self.toggle_next_layout_action, '_', 'Use next layout') self.register_shortcut(self.toggle_previous_layout_action, '_', 'Use previous layout') # File switcher shortcuts self.file_switcher_action = create_action(self, _('File switcher...'), icon=ima.icon('filelist'), tip=_('Fast switch between files'), triggered=self.open_fileswitcher, context=Qt.ApplicationShortcut) self.register_shortcut(self.file_switcher_action, context='_', name='File switcher') self.symbol_finder_action = create_action(self, _('Symbol finder...'), icon=ima.icon('symbol_find'), tip=_('Fast symbol search in file'), triggered=self.open_symbolfinder, context=Qt.ApplicationShortcut) self.register_shortcut(self.symbol_finder_action, context='_', name='symbol finder', add_sc_to_tip=True) self.file_toolbar_actions = [self.file_switcher_action, self.symbol_finder_action] def create_edit_action(text, tr_text, icon): textseq = text.split(' ') method_name = textseq[0].lower() + ''.join(textseq[1:]) action = create_action(self, tr_text, icon=icon, triggered=self.global_callback, data=method_name, context=Qt.WidgetShortcut) self.register_shortcut(action, 'Editor', text) return action self.undo_action = create_edit_action('Undo', _('Undo'), ima.icon('undo')) self.redo_action = create_edit_action('Redo', _('Redo'), ima.icon('redo')) self.copy_action = create_edit_action('Copy', _('Copy'), ima.icon('editcopy')) self.cut_action = create_edit_action('Cut', _('Cut'), ima.icon('editcut')) self.paste_action = create_edit_action('Paste', _('Paste'), ima.icon('editpaste')) self.selectall_action = create_edit_action('Select All', _('Select All'), ima.icon('selectall')) self.edit_menu_actions = [self.undo_action, self.redo_action, None, self.cut_action, self.copy_action, self.paste_action, self.selectall_action] namespace = None logger.info('Creating toolbars...') # File menu/toolbar self.file_menu = self.menuBar().addMenu(_('&File')) self.file_toolbar = self.create_toolbar(_('File toolbar'), 'file_toolbar') # Edit menu/toolbar self.edit_menu = self.menuBar().addMenu(_('&Edit')) self.edit_toolbar = self.create_toolbar(_('Edit toolbar'), 'edit_toolbar') # Search menu/toolbar self.search_menu = self.menuBar().addMenu(_('&Search')) self.search_toolbar = self.create_toolbar(_('Search toolbar'), 'search_toolbar') # Source menu/toolbar self.source_menu = self.menuBar().addMenu(_('Sour&ce')) self.source_toolbar = self.create_toolbar(_('Source toolbar'), 'source_toolbar') # Run menu/toolbar self.run_menu = self.menuBar().addMenu(_('&Run')) self.run_toolbar = self.create_toolbar(_('Run toolbar'), 'run_toolbar') # Debug menu/toolbar self.debug_menu = self.menuBar().addMenu(_('&Debug')) self.debug_toolbar = self.create_toolbar(_('Debug toolbar'), 'debug_toolbar') # Consoles menu/toolbar self.consoles_menu = self.menuBar().addMenu(_('C&onsoles')) self.consoles_menu.aboutToShow.connect(self.update_execution_state_kernel) # Projects menu self.projects_menu = self.menuBar().addMenu(_('&Projects')) self.projects_menu.aboutToShow.connect(self.valid_project) # Tools menu self.tools_menu = self.menuBar().addMenu(_('&Tools')) # View menu self.view_menu = self.menuBar().addMenu(_('&View')) # Help menu self.help_menu = self.menuBar().addMenu(_('&Help')) # Status bar status = self.statusBar() status.setObjectName('StatusBar') status.showMessage(_('Welcome to Spyder!'), 5000) logger.info('Creating Tools menu...') # Tools + External Tools prefs_action = create_action(self, _('Pre&ferences'), icon=ima.icon('configure'), triggered=self.edit_preferences, context=Qt.ApplicationShortcut) self.register_shortcut(prefs_action, '_', 'Preferences', add_sc_to_tip=True) spyder_path_action = create_action(self, _('PYTHONPATH manager'), None, icon=ima.icon('pythonpath'), triggered=self.path_manager_callback, tip=_('Python Path Manager'), menurole=QAction.ApplicationSpecificRole) reset_spyder_action = create_action(self, _('Reset Spyder to factory defaults'), triggered=self.reset_spyder) self.tools_menu_actions = [prefs_action, spyder_path_action] if WinUserEnvDialog is not None: winenv_action = create_action(self, _('Current user environment variables...'), icon='win_env.png', tip=_('Show and edit current user environment variables in Windows registry (i.e. for all sessions)'), triggered=self.win_env) self.tools_menu_actions.append(winenv_action) # depends on [control=['if'], data=[]] self.tools_menu_actions += [MENU_SEPARATOR, reset_spyder_action] # External Tools submenu self.external_tools_menu = QMenu(_('External Tools')) self.external_tools_menu_actions = [] # WinPython control panel self.wp_action = create_action(self, _('WinPython control panel'), icon=get_icon('winpython.svg'), triggered=lambda : programs.run_python_script('winpython', 'controlpanel')) if os.name == 'nt' and is_module_installed('winpython'): self.external_tools_menu_actions.append(self.wp_action) # depends on [control=['if'], data=[]] # Qt-related tools additact = [] for name in ('designer-qt4', 'designer'): qtdact = create_program_action(self, _('Qt Designer'), name) if qtdact: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] for name in ('linguist-qt4', 'linguist'): qtlact = create_program_action(self, _('Qt Linguist'), 'linguist') if qtlact: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] args = ['-no-opengl'] if os.name == 'nt' else [] for act in (qtdact, qtlact): if act: additact.append(act) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['act']] if additact and is_module_installed('winpython'): self.external_tools_menu_actions += [None] + additact # depends on [control=['if'], data=[]] # Guidata and Sift logger.info('Creating guidata and sift entries...') gdgq_act = [] # Guidata and Guiqwt don't support PyQt5 yet and they fail # with an AssertionError when imported using those bindings # (see issue 2274) try: from guidata import configtools from guidata import config # analysis:ignore guidata_icon = configtools.get_icon('guidata.svg') guidata_act = create_python_script_action(self, _('guidata examples'), guidata_icon, 'guidata', osp.join('tests', '__init__')) gdgq_act += [guidata_act] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] try: from guidata import configtools from guiqwt import config # analysis:ignore guiqwt_icon = configtools.get_icon('guiqwt.svg') guiqwt_act = create_python_script_action(self, _('guiqwt examples'), guiqwt_icon, 'guiqwt', osp.join('tests', '__init__')) if guiqwt_act: gdgq_act += [guiqwt_act] # depends on [control=['if'], data=[]] sift_icon = configtools.get_icon('sift.svg') sift_act = create_python_script_action(self, _('Sift'), sift_icon, 'guiqwt', osp.join('tests', 'sift')) if sift_act: gdgq_act += [sift_act] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] if gdgq_act: self.external_tools_menu_actions += [None] + gdgq_act # depends on [control=['if'], data=[]] # Maximize current plugin self.maximize_action = create_action(self, '', triggered=self.maximize_dockwidget, context=Qt.ApplicationShortcut) self.register_shortcut(self.maximize_action, '_', 'Maximize pane') self.__update_maximize_action() # Fullscreen mode self.fullscreen_action = create_action(self, _('Fullscreen mode'), triggered=self.toggle_fullscreen, context=Qt.ApplicationShortcut) self.register_shortcut(self.fullscreen_action, '_', 'Fullscreen mode', add_sc_to_tip=True) # Main toolbar self.main_toolbar_actions = [self.maximize_action, self.fullscreen_action, None, prefs_action, spyder_path_action] self.main_toolbar = self.create_toolbar(_('Main toolbar'), 'main_toolbar') # Internal console plugin logger.info('Loading internal console...') from spyder.plugins.console.plugin import Console self.console = Console(self, namespace, exitfunc=self.closing, profile=self.profile, multithreaded=self.multithreaded, message=_("Spyder Internal Console\n\nThis console is used to report application\ninternal errors and to inspect Spyder\ninternals with the following commands:\n spy.app, spy.window, dir(spy)\n\nPlease don't use it to run your code\n\n")) self.console.register_plugin() # Language Server Protocol Client initialization self.set_splash(_('Starting Language Server Protocol manager...')) from spyder.plugins.editor.lsp.manager import LSPManager self.lspmanager = LSPManager(self) # Working directory plugin logger.info('Loading working directory...') from spyder.plugins.workingdirectory.plugin import WorkingDirectory self.workingdirectory = WorkingDirectory(self, self.init_workdir, main=self) self.workingdirectory.register_plugin() self.toolbarslist.append(self.workingdirectory.toolbar) # Help plugin if CONF.get('help', 'enable'): self.set_splash(_('Loading help...')) from spyder.plugins.help.plugin import Help self.help = Help(self, css_path=css_path) self.help.register_plugin() # depends on [control=['if'], data=[]] # Outline explorer widget if CONF.get('outline_explorer', 'enable'): self.set_splash(_('Loading outline explorer...')) from spyder.plugins.outlineexplorer.plugin import OutlineExplorer self.outlineexplorer = OutlineExplorer(self) self.outlineexplorer.register_plugin() # depends on [control=['if'], data=[]] # Editor plugin self.set_splash(_('Loading editor...')) from spyder.plugins.editor.plugin import Editor self.editor = Editor(self) self.editor.register_plugin() # Start LSP client self.set_splash(_('Launching LSP Client for Python...')) self.lspmanager.start_client(language='python') # Populating file menu entries quit_action = create_action(self, _('&Quit'), icon=ima.icon('exit'), tip=_('Quit'), triggered=self.console.quit, context=Qt.ApplicationShortcut) self.register_shortcut(quit_action, '_', 'Quit') restart_action = create_action(self, _('&Restart'), icon=ima.icon('restart'), tip=_('Restart'), triggered=self.restart, context=Qt.ApplicationShortcut) self.register_shortcut(restart_action, '_', 'Restart') self.file_menu_actions += [self.file_switcher_action, self.symbol_finder_action, None, restart_action, quit_action] self.set_splash('') # Namespace browser self.set_splash(_('Loading namespace browser...')) from spyder.plugins.variableexplorer.plugin import VariableExplorer self.variableexplorer = VariableExplorer(self) self.variableexplorer.register_plugin() # Figure browser self.set_splash(_('Loading figure browser...')) from spyder.plugins.plots.plugin import Plots self.plots = Plots(self) self.plots.register_plugin() # History log widget if CONF.get('historylog', 'enable'): self.set_splash(_('Loading history plugin...')) from spyder.plugins.history.plugin import HistoryLog self.historylog = HistoryLog(self) self.historylog.register_plugin() # depends on [control=['if'], data=[]] # IPython console self.set_splash(_('Loading IPython console...')) from spyder.plugins.ipythonconsole.plugin import IPythonConsole self.ipyconsole = IPythonConsole(self, css_path=css_path) self.ipyconsole.register_plugin() # Explorer if CONF.get('explorer', 'enable'): self.set_splash(_('Loading file explorer...')) from spyder.plugins.explorer.plugin import Explorer self.explorer = Explorer(self) self.explorer.register_plugin() # depends on [control=['if'], data=[]] # Online help widget try: # Qt >= v4.4 from spyder.plugins.onlinehelp.plugin import OnlineHelp # depends on [control=['try'], data=[]] except ImportError: # Qt < v4.4 OnlineHelp = None # analysis:ignore # depends on [control=['except'], data=[]] if CONF.get('onlinehelp', 'enable') and OnlineHelp is not None: self.set_splash(_('Loading online help...')) self.onlinehelp = OnlineHelp(self) self.onlinehelp.register_plugin() # depends on [control=['if'], data=[]] # Project explorer widget self.set_splash(_('Loading project explorer...')) from spyder.plugins.projects.plugin import Projects self.projects = Projects(self) self.projects.register_plugin() self.project_path = self.projects.get_pythonpath(at_start=True) # Find in files if CONF.get('find_in_files', 'enable'): from spyder.plugins.findinfiles.plugin import FindInFiles self.findinfiles = FindInFiles(self) self.findinfiles.register_plugin() # depends on [control=['if'], data=[]] # Load other plugins (former external plugins) # TODO: Use this bucle to load all internall plugins and remove # duplicated code other_plugins = ['breakpoints', 'profiler', 'pylint'] for plugin_name in other_plugins: if CONF.get(plugin_name, 'enable'): module = importlib.import_module('spyder.plugins.{}'.format(plugin_name)) plugin = module.PLUGIN_CLASS(self) if plugin.check_compatibility()[0]: self.thirdparty_plugins.append(plugin) plugin.register_plugin() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plugin_name']] # Third-party plugins self.set_splash(_('Loading third-party plugins...')) for mod in get_spyderplugins_mods(): try: plugin = mod.PLUGIN_CLASS(self) if plugin.check_compatibility()[0]: self.thirdparty_plugins.append(plugin) plugin.register_plugin() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as error: print('%s: %s' % (mod, str(error)), file=STDERR) traceback.print_exc(file=STDERR) # depends on [control=['except'], data=['error']] # depends on [control=['for'], data=['mod']] self.set_splash(_('Setting up main window...')) # Help menu trouble_action = create_action(self, _('Troubleshooting...'), triggered=self.trouble_guide) dep_action = create_action(self, _('Dependencies...'), triggered=self.show_dependencies, icon=ima.icon('advanced')) report_action = create_action(self, _('Report issue...'), icon=ima.icon('bug'), triggered=self.report_issue) support_action = create_action(self, _('Spyder support...'), triggered=self.google_group) self.check_updates_action = create_action(self, _('Check for updates...'), triggered=self.check_updates) # Spyder documentation spyder_doc = 'https://docs.spyder-ide.org/' doc_action = create_action(self, _('Spyder documentation'), icon=ima.icon('DialogHelpButton'), triggered=lambda : programs.start_file(spyder_doc)) self.register_shortcut(doc_action, '_', 'spyder documentation') if self.help is not None: tut_action = create_action(self, _('Spyder tutorial'), triggered=self.help.show_tutorial) # depends on [control=['if'], data=[]] else: tut_action = None shortcuts_action = create_action(self, _('Shortcuts Summary'), shortcut='Meta+F1', triggered=self.show_shortcuts_dialog) #----- Tours self.tour = tour.AnimatedTour(self) self.tours_menu = QMenu(_('Interactive tours'), self) self.tour_menu_actions = [] # TODO: Only show intro tour for now. When we are close to finish # 3.0, we will finish and show the other tour self.tours_available = tour.get_tours(0) for (i, tour_available) in enumerate(self.tours_available): self.tours_available[i]['last'] = 0 tour_name = tour_available['name'] def trigger(i=i, self=self): # closure needed! return lambda : self.show_tour(i) temp_action = create_action(self, tour_name, tip='', triggered=trigger()) self.tour_menu_actions += [temp_action] # depends on [control=['for'], data=[]] self.tours_menu.addActions(self.tour_menu_actions) self.help_menu_actions = [doc_action, tut_action, shortcuts_action, self.tours_menu, MENU_SEPARATOR, trouble_action, report_action, dep_action, self.check_updates_action, support_action, MENU_SEPARATOR] # Python documentation if get_python_doc_path() is not None: pydoc_act = create_action(self, _('Python documentation'), triggered=lambda : programs.start_file(get_python_doc_path())) self.help_menu_actions.append(pydoc_act) # depends on [control=['if'], data=[]] # IPython documentation if self.help is not None: ipython_menu = QMenu(_('IPython documentation'), self) intro_action = create_action(self, _('Intro to IPython'), triggered=self.ipyconsole.show_intro) quickref_action = create_action(self, _('Quick reference'), triggered=self.ipyconsole.show_quickref) guiref_action = create_action(self, _('Console help'), triggered=self.ipyconsole.show_guiref) add_actions(ipython_menu, (intro_action, guiref_action, quickref_action)) self.help_menu_actions.append(ipython_menu) # depends on [control=['if'], data=[]] # Windows-only: documentation located in sys.prefix/Doc ipm_actions = [] def add_ipm_action(text, path): """Add installed Python module doc action to help submenu""" # QAction.triggered works differently for PySide and PyQt path = file_uri(path) if not API == 'pyside': slot = lambda _checked, path=path: programs.start_file(path) # depends on [control=['if'], data=[]] else: slot = lambda path=path: programs.start_file(path) action = create_action(self, text, icon='%s.png' % osp.splitext(path)[1][1:], triggered=slot) ipm_actions.append(action) sysdocpth = osp.join(sys.prefix, 'Doc') if osp.isdir(sysdocpth): # exists on Windows, except frozen dist. for docfn in os.listdir(sysdocpth): pt = '([a-zA-Z\\_]*)(doc)?(-dev)?(-ref)?(-user)?.(chm|pdf)' match = re.match(pt, docfn) if match is not None: pname = match.groups()[0] if pname not in ('Python',): add_ipm_action(pname, osp.join(sysdocpth, docfn)) # depends on [control=['if'], data=['pname']] # depends on [control=['if'], data=['match']] # depends on [control=['for'], data=['docfn']] # depends on [control=['if'], data=[]] # Installed Python modules submenu (Windows only) if ipm_actions: pymods_menu = QMenu(_('Installed Python modules'), self) add_actions(pymods_menu, ipm_actions) self.help_menu_actions.append(pymods_menu) # depends on [control=['if'], data=[]] # Online documentation web_resources = QMenu(_('Online documentation'), self) webres_actions = create_module_bookmark_actions(self, self.BOOKMARKS) webres_actions.insert(2, None) webres_actions.insert(5, None) webres_actions.insert(8, None) add_actions(web_resources, webres_actions) self.help_menu_actions.append(web_resources) # Qt assistant link if sys.platform.startswith('linux') and (not PYQT5): qta_exe = 'assistant-qt4' # depends on [control=['if'], data=[]] else: qta_exe = 'assistant' qta_act = create_program_action(self, _('Qt documentation'), qta_exe) if qta_act: self.help_menu_actions += [qta_act, None] # depends on [control=['if'], data=[]] # About Spyder about_action = create_action(self, _('About %s...') % 'Spyder', icon=ima.icon('MessageBoxInformation'), triggered=self.about) self.help_menu_actions += [MENU_SEPARATOR, about_action] # Status bar widgets from spyder.widgets.status import MemoryStatus, CPUStatus self.mem_status = MemoryStatus(self, status) self.cpu_status = CPUStatus(self, status) self.apply_statusbar_settings() # ----- View # View menu self.plugins_menu = QMenu(_('Panes'), self) self.toolbars_menu = QMenu(_('Toolbars'), self) self.quick_layout_menu = QMenu(_('Window layouts'), self) self.quick_layout_set_menu() self.view_menu.addMenu(self.plugins_menu) # Panes add_actions(self.view_menu, (self.lock_interface_action, self.close_dockwidget_action, self.maximize_action, MENU_SEPARATOR)) self.show_toolbars_action = create_action(self, _('Show toolbars'), triggered=self.show_toolbars, context=Qt.ApplicationShortcut) self.register_shortcut(self.show_toolbars_action, '_', 'Show toolbars') self.view_menu.addMenu(self.toolbars_menu) self.view_menu.addAction(self.show_toolbars_action) add_actions(self.view_menu, (MENU_SEPARATOR, self.quick_layout_menu, self.toggle_previous_layout_action, self.toggle_next_layout_action, MENU_SEPARATOR, self.fullscreen_action)) if set_attached_console_visible is not None: cmd_act = create_action(self, _('Attached console window (debugging)'), toggled=set_attached_console_visible) cmd_act.setChecked(is_attached_console_visible()) add_actions(self.view_menu, (MENU_SEPARATOR, cmd_act)) # depends on [control=['if'], data=['set_attached_console_visible']] # Adding external tools action to "Tools" menu if self.external_tools_menu_actions: external_tools_act = create_action(self, _('External Tools')) external_tools_act.setMenu(self.external_tools_menu) self.tools_menu_actions += [None, external_tools_act] # depends on [control=['if'], data=[]] # Filling out menu/toolbar entries: add_actions(self.file_menu, self.file_menu_actions) add_actions(self.edit_menu, self.edit_menu_actions) add_actions(self.search_menu, self.search_menu_actions) add_actions(self.source_menu, self.source_menu_actions) add_actions(self.run_menu, self.run_menu_actions) add_actions(self.debug_menu, self.debug_menu_actions) add_actions(self.consoles_menu, self.consoles_menu_actions) add_actions(self.projects_menu, self.projects_menu_actions) add_actions(self.tools_menu, self.tools_menu_actions) add_actions(self.external_tools_menu, self.external_tools_menu_actions) add_actions(self.help_menu, self.help_menu_actions) add_actions(self.main_toolbar, self.main_toolbar_actions) add_actions(self.file_toolbar, self.file_toolbar_actions) add_actions(self.edit_toolbar, self.edit_toolbar_actions) add_actions(self.search_toolbar, self.search_toolbar_actions) add_actions(self.source_toolbar, self.source_toolbar_actions) add_actions(self.debug_toolbar, self.debug_toolbar_actions) add_actions(self.run_toolbar, self.run_toolbar_actions) # Apply all defined shortcuts (plugins + 3rd-party plugins) self.apply_shortcuts() # Emitting the signal notifying plugins that main window menu and # toolbar actions are all defined: self.all_actions_defined.emit() # Window set-up logger.info('Setting up window...') self.setup_layout(default=False) # Show and hide shortcuts in menus for Mac. # This is a workaround because we can't disable shortcuts # by setting context=Qt.WidgetShortcut there if sys.platform == 'darwin': for name in ['file', 'edit', 'search', 'source', 'run', 'debug', 'projects', 'tools', 'plugins']: menu_object = getattr(self, name + '_menu') menu_object.aboutToShow.connect(lambda name=name: self.show_shortcuts(name)) menu_object.aboutToHide.connect(lambda name=name: self.hide_shortcuts(name)) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]] if self.splash is not None: self.splash.hide() # depends on [control=['if'], data=[]] # Enabling tear off for all menus except help menu if CONF.get('main', 'tear_off_menus'): for child in self.menuBar().children(): if isinstance(child, QMenu) and child != self.help_menu: child.setTearOffEnabled(True) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]] # Menu about to show for child in self.menuBar().children(): if isinstance(child, QMenu): try: child.aboutToShow.connect(self.update_edit_menu) child.aboutToShow.connect(self.update_search_menu) # depends on [control=['try'], data=[]] except TypeError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] logger.info('*** End of MainWindow setup ***') self.is_starting_up = False
def fix_worksheet_status_inconsistencies(portal): """Walks through open worksheets and transition them to 'verified' or 'to_be_verified' if all their analyses are not in an open status """ logger.info("Fixing worksheet inconsistencies ...") query = dict(portal_type="Worksheet", review_state=["open", "to_be_verified"]) brains = api.search(query, CATALOG_WORKSHEET_LISTING) total = len(brains) for num, brain in enumerate(brains): success = False if num % 100 == 0: logger.info("Fixing worksheet inconsistencies: {}/{}" .format(num, total)) # Note we don't check anything, WS guards for "submit" and "verify" # will take care of checking if the status of contained analyses allows # the transition. worksheet = api.get_object(brain) if api.get_workflow_status_of(worksheet) == "open": success, msg = do_action_for(worksheet, "submit") elif api.get_workflow_status_of(worksheet) == "to_be_verified": success, msg = do_action_for(worksheet, "verify") if success: logger.info("Worksheet {} transitioned to 'to_be_verified'" .format(worksheet.getId())) success, msg = do_action_for(worksheet, "verify") if success: logger.info("Worksheet {} transitioned to 'verified'" .format(worksheet.getId())) commit_transaction(portal)
def function[fix_worksheet_status_inconsistencies, parameter[portal]]: constant[Walks through open worksheets and transition them to 'verified' or 'to_be_verified' if all their analyses are not in an open status ] call[name[logger].info, parameter[constant[Fixing worksheet inconsistencies ...]]] variable[query] assign[=] call[name[dict], parameter[]] variable[brains] assign[=] call[name[api].search, parameter[name[query], name[CATALOG_WORKSHEET_LISTING]]] variable[total] assign[=] call[name[len], parameter[name[brains]]] for taget[tuple[[<ast.Name object at 0x7da1b2347040>, <ast.Name object at 0x7da1b2346530>]]] in starred[call[name[enumerate], parameter[name[brains]]]] begin[:] variable[success] assign[=] constant[False] if compare[binary_operation[name[num] <ast.Mod object at 0x7da2590d6920> constant[100]] equal[==] constant[0]] begin[:] call[name[logger].info, parameter[call[constant[Fixing worksheet inconsistencies: {}/{}].format, parameter[name[num], name[total]]]]] variable[worksheet] assign[=] call[name[api].get_object, parameter[name[brain]]] if compare[call[name[api].get_workflow_status_of, parameter[name[worksheet]]] equal[==] constant[open]] begin[:] <ast.Tuple object at 0x7da204345c00> assign[=] call[name[do_action_for], parameter[name[worksheet], constant[submit]]] if name[success] begin[:] call[name[logger].info, parameter[call[constant[Worksheet {} transitioned to 'to_be_verified'].format, parameter[call[name[worksheet].getId, parameter[]]]]]] <ast.Tuple object at 0x7da204347a30> assign[=] call[name[do_action_for], parameter[name[worksheet], constant[verify]]] if name[success] begin[:] call[name[logger].info, parameter[call[constant[Worksheet {} transitioned to 'verified'].format, parameter[call[name[worksheet].getId, parameter[]]]]]] call[name[commit_transaction], parameter[name[portal]]]
keyword[def] identifier[fix_worksheet_status_inconsistencies] ( identifier[portal] ): literal[string] identifier[logger] . identifier[info] ( literal[string] ) identifier[query] = identifier[dict] ( identifier[portal_type] = literal[string] , identifier[review_state] =[ literal[string] , literal[string] ]) identifier[brains] = identifier[api] . identifier[search] ( identifier[query] , identifier[CATALOG_WORKSHEET_LISTING] ) identifier[total] = identifier[len] ( identifier[brains] ) keyword[for] identifier[num] , identifier[brain] keyword[in] identifier[enumerate] ( identifier[brains] ): identifier[success] = keyword[False] keyword[if] identifier[num] % literal[int] == literal[int] : identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[num] , identifier[total] )) identifier[worksheet] = identifier[api] . identifier[get_object] ( identifier[brain] ) keyword[if] identifier[api] . identifier[get_workflow_status_of] ( identifier[worksheet] )== literal[string] : identifier[success] , identifier[msg] = identifier[do_action_for] ( identifier[worksheet] , literal[string] ) keyword[elif] identifier[api] . identifier[get_workflow_status_of] ( identifier[worksheet] )== literal[string] : identifier[success] , identifier[msg] = identifier[do_action_for] ( identifier[worksheet] , literal[string] ) keyword[if] identifier[success] : identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[worksheet] . identifier[getId] ())) identifier[success] , identifier[msg] = identifier[do_action_for] ( identifier[worksheet] , literal[string] ) keyword[if] identifier[success] : identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[worksheet] . identifier[getId] ())) identifier[commit_transaction] ( identifier[portal] )
def fix_worksheet_status_inconsistencies(portal): """Walks through open worksheets and transition them to 'verified' or 'to_be_verified' if all their analyses are not in an open status """ logger.info('Fixing worksheet inconsistencies ...') query = dict(portal_type='Worksheet', review_state=['open', 'to_be_verified']) brains = api.search(query, CATALOG_WORKSHEET_LISTING) total = len(brains) for (num, brain) in enumerate(brains): success = False if num % 100 == 0: logger.info('Fixing worksheet inconsistencies: {}/{}'.format(num, total)) # depends on [control=['if'], data=[]] # Note we don't check anything, WS guards for "submit" and "verify" # will take care of checking if the status of contained analyses allows # the transition. worksheet = api.get_object(brain) if api.get_workflow_status_of(worksheet) == 'open': (success, msg) = do_action_for(worksheet, 'submit') # depends on [control=['if'], data=[]] elif api.get_workflow_status_of(worksheet) == 'to_be_verified': (success, msg) = do_action_for(worksheet, 'verify') # depends on [control=['if'], data=[]] if success: logger.info("Worksheet {} transitioned to 'to_be_verified'".format(worksheet.getId())) (success, msg) = do_action_for(worksheet, 'verify') if success: logger.info("Worksheet {} transitioned to 'verified'".format(worksheet.getId())) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] commit_transaction(portal)
def label(x, gr, preferred_languages=None): """ @param x : graph entity @param gr (Graph): RDF graph @param preferred_languages (iterable) Return the best available label in the graph for the passed entity. If a set of preferred languages is given, try them in order. If none is found, an arbitrary language will be chosen """ # Find all labels & their language labels = { l.language : l for labelProp in LABEL_PROPERTIES for l in gr.objects(x,labelProp) } if labels: #return repr(preferred_languages) + repr(labels) #return u'|'.join(preferred_languages) + u' -> ' + u'/'.join( u'{}:{}'.format(*i) for i in labels.items() ) if preferred_languages is not None: for l in preferred_languages: if l in labels: return labels[l] return labels.itervalues().next() # No labels available. Try to generate a QNAME, or else, the string itself try: return gr.namespace_manager.compute_qname(x)[2].replace('_',' ') except: # Attempt to extract the trailing part of an URI m = re.search( '([^/]+)$', x ) return m.group(1).replace('_',' ') if m else x
def function[label, parameter[x, gr, preferred_languages]]: constant[ @param x : graph entity @param gr (Graph): RDF graph @param preferred_languages (iterable) Return the best available label in the graph for the passed entity. If a set of preferred languages is given, try them in order. If none is found, an arbitrary language will be chosen ] variable[labels] assign[=] <ast.DictComp object at 0x7da20c7cb790> if name[labels] begin[:] if compare[name[preferred_languages] is_not constant[None]] begin[:] for taget[name[l]] in starred[name[preferred_languages]] begin[:] if compare[name[l] in name[labels]] begin[:] return[call[name[labels]][name[l]]] return[call[call[name[labels].itervalues, parameter[]].next, parameter[]]] <ast.Try object at 0x7da20c7c8940>
keyword[def] identifier[label] ( identifier[x] , identifier[gr] , identifier[preferred_languages] = keyword[None] ): literal[string] identifier[labels] ={ identifier[l] . identifier[language] : identifier[l] keyword[for] identifier[labelProp] keyword[in] identifier[LABEL_PROPERTIES] keyword[for] identifier[l] keyword[in] identifier[gr] . identifier[objects] ( identifier[x] , identifier[labelProp] )} keyword[if] identifier[labels] : keyword[if] identifier[preferred_languages] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[l] keyword[in] identifier[preferred_languages] : keyword[if] identifier[l] keyword[in] identifier[labels] : keyword[return] identifier[labels] [ identifier[l] ] keyword[return] identifier[labels] . identifier[itervalues] (). identifier[next] () keyword[try] : keyword[return] identifier[gr] . identifier[namespace_manager] . identifier[compute_qname] ( identifier[x] )[ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ) keyword[except] : identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[x] ) keyword[return] identifier[m] . identifier[group] ( literal[int] ). identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[m] keyword[else] identifier[x]
def label(x, gr, preferred_languages=None): """ @param x : graph entity @param gr (Graph): RDF graph @param preferred_languages (iterable) Return the best available label in the graph for the passed entity. If a set of preferred languages is given, try them in order. If none is found, an arbitrary language will be chosen """ # Find all labels & their language labels = {l.language: l for labelProp in LABEL_PROPERTIES for l in gr.objects(x, labelProp)} if labels: #return repr(preferred_languages) + repr(labels) #return u'|'.join(preferred_languages) + u' -> ' + u'/'.join( u'{}:{}'.format(*i) for i in labels.items() ) if preferred_languages is not None: for l in preferred_languages: if l in labels: return labels[l] # depends on [control=['if'], data=['l', 'labels']] # depends on [control=['for'], data=['l']] # depends on [control=['if'], data=['preferred_languages']] return labels.itervalues().next() # depends on [control=['if'], data=[]] # No labels available. Try to generate a QNAME, or else, the string itself try: return gr.namespace_manager.compute_qname(x)[2].replace('_', ' ') # depends on [control=['try'], data=[]] except: # Attempt to extract the trailing part of an URI m = re.search('([^/]+)$', x) return m.group(1).replace('_', ' ') if m else x # depends on [control=['except'], data=[]]
def ParseNetworkDataUsage( self, parser_mediator, cache=None, database=None, table=None, **unused_kwargs): """Parses the network data usage monitor table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. """ self._ParseGUIDTable( parser_mediator, cache, database, table, self._NETWORK_DATA_USAGE_VALUES_MAP, SRUMNetworkDataUsageEventData)
def function[ParseNetworkDataUsage, parameter[self, parser_mediator, cache, database, table]]: constant[Parses the network data usage monitor table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. ] call[name[self]._ParseGUIDTable, parameter[name[parser_mediator], name[cache], name[database], name[table], name[self]._NETWORK_DATA_USAGE_VALUES_MAP, name[SRUMNetworkDataUsageEventData]]]
keyword[def] identifier[ParseNetworkDataUsage] ( identifier[self] , identifier[parser_mediator] , identifier[cache] = keyword[None] , identifier[database] = keyword[None] , identifier[table] = keyword[None] , ** identifier[unused_kwargs] ): literal[string] identifier[self] . identifier[_ParseGUIDTable] ( identifier[parser_mediator] , identifier[cache] , identifier[database] , identifier[table] , identifier[self] . identifier[_NETWORK_DATA_USAGE_VALUES_MAP] , identifier[SRUMNetworkDataUsageEventData] )
def ParseNetworkDataUsage(self, parser_mediator, cache=None, database=None, table=None, **unused_kwargs): """Parses the network data usage monitor table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. """ self._ParseGUIDTable(parser_mediator, cache, database, table, self._NETWORK_DATA_USAGE_VALUES_MAP, SRUMNetworkDataUsageEventData)
def remove_nesting(dom, tag_name): """ Unwrap items in the node list that have ancestors with the same tag. """ for node in dom.getElementsByTagName(tag_name): for ancestor in ancestors(node): if ancestor is node: continue if ancestor is dom.documentElement: break if ancestor.tagName == tag_name: unwrap(node) break
def function[remove_nesting, parameter[dom, tag_name]]: constant[ Unwrap items in the node list that have ancestors with the same tag. ] for taget[name[node]] in starred[call[name[dom].getElementsByTagName, parameter[name[tag_name]]]] begin[:] for taget[name[ancestor]] in starred[call[name[ancestors], parameter[name[node]]]] begin[:] if compare[name[ancestor] is name[node]] begin[:] continue if compare[name[ancestor] is name[dom].documentElement] begin[:] break if compare[name[ancestor].tagName equal[==] name[tag_name]] begin[:] call[name[unwrap], parameter[name[node]]] break
keyword[def] identifier[remove_nesting] ( identifier[dom] , identifier[tag_name] ): literal[string] keyword[for] identifier[node] keyword[in] identifier[dom] . identifier[getElementsByTagName] ( identifier[tag_name] ): keyword[for] identifier[ancestor] keyword[in] identifier[ancestors] ( identifier[node] ): keyword[if] identifier[ancestor] keyword[is] identifier[node] : keyword[continue] keyword[if] identifier[ancestor] keyword[is] identifier[dom] . identifier[documentElement] : keyword[break] keyword[if] identifier[ancestor] . identifier[tagName] == identifier[tag_name] : identifier[unwrap] ( identifier[node] ) keyword[break]
def remove_nesting(dom, tag_name): """ Unwrap items in the node list that have ancestors with the same tag. """ for node in dom.getElementsByTagName(tag_name): for ancestor in ancestors(node): if ancestor is node: continue # depends on [control=['if'], data=[]] if ancestor is dom.documentElement: break # depends on [control=['if'], data=[]] if ancestor.tagName == tag_name: unwrap(node) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ancestor']] # depends on [control=['for'], data=['node']]
def normalizeX(value): """ Normalizes x coordinate. * **value** must be an :ref:`type-int-float`. * Returned value is the same type as the input value. """ if not isinstance(value, (int, float)): raise TypeError("X coordinates must be instances of " ":ref:`type-int-float`, not %s." % type(value).__name__) return value
def function[normalizeX, parameter[value]]: constant[ Normalizes x coordinate. * **value** must be an :ref:`type-int-float`. * Returned value is the same type as the input value. ] if <ast.UnaryOp object at 0x7da2041d9bd0> begin[:] <ast.Raise object at 0x7da2041d8c70> return[name[value]]
keyword[def] identifier[normalizeX] ( identifier[value] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[int] , identifier[float] )): keyword[raise] identifier[TypeError] ( literal[string] literal[string] % identifier[type] ( identifier[value] ). identifier[__name__] ) keyword[return] identifier[value]
def normalizeX(value): """ Normalizes x coordinate. * **value** must be an :ref:`type-int-float`. * Returned value is the same type as the input value. """ if not isinstance(value, (int, float)): raise TypeError('X coordinates must be instances of :ref:`type-int-float`, not %s.' % type(value).__name__) # depends on [control=['if'], data=[]] return value
def Extract_Checkpoints(self): ''' Extract the checkpoints and store in self.tracking_data ''' # Make sure page is available if self.page is None: raise Exception("The HTML data was not fetched due to some reasons") soup = BeautifulSoup(self.page,'html.parser') if 'Delivery information not found' in self.page: raise ValueError('The Tracking number is invalid/Tracking number is over 45 days old.') # Assign the current status of the shipment if 'Delivered on' in self.page: self.status = 'C' else: # The shipment is in Transit self.status = 'T' # Checkpoints extraction begins here table = soup.findAll('table',{'cellpadding':'1','cellspacing':'1','border':'1','align':'center','style':"width:800px;border-color:#034291;"})[1] rows = table.findAll('tr')[1:] for row in rows: ''' Each row will have 3 columns: Date--Location--Status ''' row_cells = row.findAll('td') date = row_cells[0].string.strip() date = datetime.strptime(date,"%A, %B %d, %Y") location = row_cells[1].find('a').string.strip() if location is '': # ignore the days which are holidays continue status = row_cells[2].text.strip() self.tracking_data.append({'status':status,'date':date,'location':location}) # Sort the checkpoints based on Date and Time --- this is important self.tracking_data = sorted(self.tracking_data, key=lambda k: k['date'])
def function[Extract_Checkpoints, parameter[self]]: constant[ Extract the checkpoints and store in self.tracking_data ] if compare[name[self].page is constant[None]] begin[:] <ast.Raise object at 0x7da204623fd0> variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[self].page, constant[html.parser]]] if compare[constant[Delivery information not found] in name[self].page] begin[:] <ast.Raise object at 0x7da2046236d0> if compare[constant[Delivered on] in name[self].page] begin[:] name[self].status assign[=] constant[C] variable[table] assign[=] call[call[name[soup].findAll, parameter[constant[table], dictionary[[<ast.Constant object at 0x7da204620f70>, <ast.Constant object at 0x7da204623e20>, <ast.Constant object at 0x7da204621db0>, <ast.Constant object at 0x7da204621450>, <ast.Constant object at 0x7da2046202e0>], [<ast.Constant object at 0x7da204622f50>, <ast.Constant object at 0x7da2046234f0>, <ast.Constant object at 0x7da204622b00>, <ast.Constant object at 0x7da204620580>, <ast.Constant object at 0x7da1b0a62f50>]]]]][constant[1]] variable[rows] assign[=] call[call[name[table].findAll, parameter[constant[tr]]]][<ast.Slice object at 0x7da1b0a601c0>] for taget[name[row]] in starred[name[rows]] begin[:] constant[ Each row will have 3 columns: Date--Location--Status ] variable[row_cells] assign[=] call[name[row].findAll, parameter[constant[td]]] variable[date] assign[=] call[call[name[row_cells]][constant[0]].string.strip, parameter[]] variable[date] assign[=] call[name[datetime].strptime, parameter[name[date], constant[%A, %B %d, %Y]]] variable[location] assign[=] call[call[call[name[row_cells]][constant[1]].find, parameter[constant[a]]].string.strip, parameter[]] if compare[name[location] is constant[]] begin[:] continue variable[status] assign[=] call[call[name[row_cells]][constant[2]].text.strip, parameter[]] call[name[self].tracking_data.append, parameter[dictionary[[<ast.Constant object at 0x7da1b0a21690>, <ast.Constant object at 0x7da1b0a21a80>, <ast.Constant object at 0x7da1b0a221a0>], [<ast.Name object at 0x7da1b0a23ca0>, <ast.Name object at 0x7da1b0a221d0>, <ast.Name object at 0x7da1b0a21b10>]]]] name[self].tracking_data assign[=] call[name[sorted], parameter[name[self].tracking_data]]
keyword[def] identifier[Extract_Checkpoints] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[page] keyword[is] keyword[None] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[soup] = identifier[BeautifulSoup] ( identifier[self] . identifier[page] , literal[string] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[page] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[page] : identifier[self] . identifier[status] = literal[string] keyword[else] : identifier[self] . identifier[status] = literal[string] identifier[table] = identifier[soup] . identifier[findAll] ( literal[string] ,{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] })[ literal[int] ] identifier[rows] = identifier[table] . identifier[findAll] ( literal[string] )[ literal[int] :] keyword[for] identifier[row] keyword[in] identifier[rows] : literal[string] identifier[row_cells] = identifier[row] . identifier[findAll] ( literal[string] ) identifier[date] = identifier[row_cells] [ literal[int] ]. identifier[string] . identifier[strip] () identifier[date] = identifier[datetime] . identifier[strptime] ( identifier[date] , literal[string] ) identifier[location] = identifier[row_cells] [ literal[int] ]. identifier[find] ( literal[string] ). identifier[string] . identifier[strip] () keyword[if] identifier[location] keyword[is] literal[string] : keyword[continue] identifier[status] = identifier[row_cells] [ literal[int] ]. identifier[text] . identifier[strip] () identifier[self] . identifier[tracking_data] . identifier[append] ({ literal[string] : identifier[status] , literal[string] : identifier[date] , literal[string] : identifier[location] }) identifier[self] . identifier[tracking_data] = identifier[sorted] ( identifier[self] . identifier[tracking_data] , identifier[key] = keyword[lambda] identifier[k] : identifier[k] [ literal[string] ])
def Extract_Checkpoints(self): """ Extract the checkpoints and store in self.tracking_data """ # Make sure page is available if self.page is None: raise Exception('The HTML data was not fetched due to some reasons') # depends on [control=['if'], data=[]] soup = BeautifulSoup(self.page, 'html.parser') if 'Delivery information not found' in self.page: raise ValueError('The Tracking number is invalid/Tracking number is over 45 days old.') # depends on [control=['if'], data=[]] # Assign the current status of the shipment if 'Delivered on' in self.page: self.status = 'C' # depends on [control=['if'], data=[]] else: # The shipment is in Transit self.status = 'T' # Checkpoints extraction begins here table = soup.findAll('table', {'cellpadding': '1', 'cellspacing': '1', 'border': '1', 'align': 'center', 'style': 'width:800px;border-color:#034291;'})[1] rows = table.findAll('tr')[1:] for row in rows: '\n\t\t\t\tEach row will have 3 columns: Date--Location--Status\n\t\t\t' row_cells = row.findAll('td') date = row_cells[0].string.strip() date = datetime.strptime(date, '%A, %B %d, %Y') location = row_cells[1].find('a').string.strip() if location is '': # ignore the days which are holidays continue # depends on [control=['if'], data=[]] status = row_cells[2].text.strip() self.tracking_data.append({'status': status, 'date': date, 'location': location}) # depends on [control=['for'], data=['row']] # Sort the checkpoints based on Date and Time --- this is important self.tracking_data = sorted(self.tracking_data, key=lambda k: k['date'])
def add_nio(self, nio, port_number): """ Adds a NIO as new port on Ethernet switch. :param nio: NIO instance to add :param port_number: port to allocate for the NIO """ if port_number in self._nios: raise DynamipsError("Port {} isn't free".format(port_number)) yield from self._hypervisor.send('ethsw add_nio "{name}" {nio}'.format(name=self._name, nio=nio)) log.info('Ethernet switch "{name}" [{id}]: NIO {nio} bound to port {port}'.format(name=self._name, id=self._id, nio=nio, port=port_number)) self._nios[port_number] = nio for port_settings in self._ports: if port_settings["port_number"] == port_number: yield from self.set_port_settings(port_number, port_settings) break
def function[add_nio, parameter[self, nio, port_number]]: constant[ Adds a NIO as new port on Ethernet switch. :param nio: NIO instance to add :param port_number: port to allocate for the NIO ] if compare[name[port_number] in name[self]._nios] begin[:] <ast.Raise object at 0x7da2044c3f70> <ast.YieldFrom object at 0x7da2044c2dd0> call[name[log].info, parameter[call[constant[Ethernet switch "{name}" [{id}]: NIO {nio} bound to port {port}].format, parameter[]]]] call[name[self]._nios][name[port_number]] assign[=] name[nio] for taget[name[port_settings]] in starred[name[self]._ports] begin[:] if compare[call[name[port_settings]][constant[port_number]] equal[==] name[port_number]] begin[:] <ast.YieldFrom object at 0x7da2044c2920> break
keyword[def] identifier[add_nio] ( identifier[self] , identifier[nio] , identifier[port_number] ): literal[string] keyword[if] identifier[port_number] keyword[in] identifier[self] . identifier[_nios] : keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[port_number] )) keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[nio] = identifier[nio] )) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[id] = identifier[self] . identifier[_id] , identifier[nio] = identifier[nio] , identifier[port] = identifier[port_number] )) identifier[self] . identifier[_nios] [ identifier[port_number] ]= identifier[nio] keyword[for] identifier[port_settings] keyword[in] identifier[self] . identifier[_ports] : keyword[if] identifier[port_settings] [ literal[string] ]== identifier[port_number] : keyword[yield] keyword[from] identifier[self] . identifier[set_port_settings] ( identifier[port_number] , identifier[port_settings] ) keyword[break]
def add_nio(self, nio, port_number): """ Adds a NIO as new port on Ethernet switch. :param nio: NIO instance to add :param port_number: port to allocate for the NIO """ if port_number in self._nios: raise DynamipsError("Port {} isn't free".format(port_number)) # depends on [control=['if'], data=['port_number']] yield from self._hypervisor.send('ethsw add_nio "{name}" {nio}'.format(name=self._name, nio=nio)) log.info('Ethernet switch "{name}" [{id}]: NIO {nio} bound to port {port}'.format(name=self._name, id=self._id, nio=nio, port=port_number)) self._nios[port_number] = nio for port_settings in self._ports: if port_settings['port_number'] == port_number: yield from self.set_port_settings(port_number, port_settings) break # depends on [control=['if'], data=['port_number']] # depends on [control=['for'], data=['port_settings']]
def _convert_volume_from(self, volume_from): """ :param volume_from: :return: """ if ':' in volume_from: container, permissions = volume_from.split(':') else: container = volume_from permissions = 'rw' if permissions not in ('ro', 'rw'): raise ValueError("only permissions supported for volumes_from are rw and ro.") return "{0}:{1}".format(container, permissions)
def function[_convert_volume_from, parameter[self, volume_from]]: constant[ :param volume_from: :return: ] if compare[constant[:] in name[volume_from]] begin[:] <ast.Tuple object at 0x7da204622260> assign[=] call[name[volume_from].split, parameter[constant[:]]] if compare[name[permissions] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2046222c0>, <ast.Constant object at 0x7da204620cd0>]]] begin[:] <ast.Raise object at 0x7da204621d80> return[call[constant[{0}:{1}].format, parameter[name[container], name[permissions]]]]
keyword[def] identifier[_convert_volume_from] ( identifier[self] , identifier[volume_from] ): literal[string] keyword[if] literal[string] keyword[in] identifier[volume_from] : identifier[container] , identifier[permissions] = identifier[volume_from] . identifier[split] ( literal[string] ) keyword[else] : identifier[container] = identifier[volume_from] identifier[permissions] = literal[string] keyword[if] identifier[permissions] keyword[not] keyword[in] ( literal[string] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] literal[string] . identifier[format] ( identifier[container] , identifier[permissions] )
def _convert_volume_from(self, volume_from): """ :param volume_from: :return: """ if ':' in volume_from: (container, permissions) = volume_from.split(':') # depends on [control=['if'], data=['volume_from']] else: container = volume_from permissions = 'rw' if permissions not in ('ro', 'rw'): raise ValueError('only permissions supported for volumes_from are rw and ro.') # depends on [control=['if'], data=[]] return '{0}:{1}'.format(container, permissions)
def _upsert_persons(cursor, person_ids, lookup_func): """Upsert's user info into the database. The model contains the user info as part of the role values. """ person_ids = list(set(person_ids)) # cleanse data # Check for existing records to update. cursor.execute("SELECT personid from persons where personid = ANY (%s)", (person_ids,)) existing_person_ids = [x[0] for x in cursor.fetchall()] new_person_ids = [p for p in person_ids if p not in existing_person_ids] # Update existing records. for person_id in existing_person_ids: # TODO only update based on a delta against the 'updated' column. person_info = lookup_func(person_id) cursor.execute("""\ UPDATE persons SET (personid, firstname, surname, fullname) = ( %(username)s, %(first_name)s, %(last_name)s, %(full_name)s) WHERE personid = %(username)s""", person_info) # Insert new records. # Email is an empty string because # accounts no longer gives out user # email info but a string datatype # is still needed for legacy to # properly process the persons table for person_id in new_person_ids: person_info = lookup_func(person_id) cursor.execute("""\ INSERT INTO persons (personid, firstname, surname, fullname, email) VALUES (%(username)s, %(first_name)s, %(last_name)s, %(full_name)s, '')""", person_info)
def function[_upsert_persons, parameter[cursor, person_ids, lookup_func]]: constant[Upsert's user info into the database. The model contains the user info as part of the role values. ] variable[person_ids] assign[=] call[name[list], parameter[call[name[set], parameter[name[person_ids]]]]] call[name[cursor].execute, parameter[constant[SELECT personid from persons where personid = ANY (%s)], tuple[[<ast.Name object at 0x7da1b003d870>]]]] variable[existing_person_ids] assign[=] <ast.ListComp object at 0x7da1b003ed70> variable[new_person_ids] assign[=] <ast.ListComp object at 0x7da1b003cbb0> for taget[name[person_id]] in starred[name[existing_person_ids]] begin[:] variable[person_info] assign[=] call[name[lookup_func], parameter[name[person_id]]] call[name[cursor].execute, parameter[constant[UPDATE persons SET (personid, firstname, surname, fullname) = ( %(username)s, %(first_name)s, %(last_name)s, %(full_name)s) WHERE personid = %(username)s], name[person_info]]] for taget[name[person_id]] in starred[name[new_person_ids]] begin[:] variable[person_info] assign[=] call[name[lookup_func], parameter[name[person_id]]] call[name[cursor].execute, parameter[constant[INSERT INTO persons (personid, firstname, surname, fullname, email) VALUES (%(username)s, %(first_name)s, %(last_name)s, %(full_name)s, '')], name[person_info]]]
keyword[def] identifier[_upsert_persons] ( identifier[cursor] , identifier[person_ids] , identifier[lookup_func] ): literal[string] identifier[person_ids] = identifier[list] ( identifier[set] ( identifier[person_ids] )) identifier[cursor] . identifier[execute] ( literal[string] , ( identifier[person_ids] ,)) identifier[existing_person_ids] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[cursor] . identifier[fetchall] ()] identifier[new_person_ids] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[person_ids] keyword[if] identifier[p] keyword[not] keyword[in] identifier[existing_person_ids] ] keyword[for] identifier[person_id] keyword[in] identifier[existing_person_ids] : identifier[person_info] = identifier[lookup_func] ( identifier[person_id] ) identifier[cursor] . identifier[execute] ( literal[string] , identifier[person_info] ) keyword[for] identifier[person_id] keyword[in] identifier[new_person_ids] : identifier[person_info] = identifier[lookup_func] ( identifier[person_id] ) identifier[cursor] . identifier[execute] ( literal[string] , identifier[person_info] )
def _upsert_persons(cursor, person_ids, lookup_func): """Upsert's user info into the database. The model contains the user info as part of the role values. """ person_ids = list(set(person_ids)) # cleanse data # Check for existing records to update. cursor.execute('SELECT personid from persons where personid = ANY (%s)', (person_ids,)) existing_person_ids = [x[0] for x in cursor.fetchall()] new_person_ids = [p for p in person_ids if p not in existing_person_ids] # Update existing records. for person_id in existing_person_ids: # TODO only update based on a delta against the 'updated' column. person_info = lookup_func(person_id) cursor.execute('UPDATE persons\nSET (personid, firstname, surname, fullname) =\n ( %(username)s, %(first_name)s, %(last_name)s,\n %(full_name)s)\nWHERE personid = %(username)s', person_info) # depends on [control=['for'], data=['person_id']] # Insert new records. # Email is an empty string because # accounts no longer gives out user # email info but a string datatype # is still needed for legacy to # properly process the persons table for person_id in new_person_ids: person_info = lookup_func(person_id) cursor.execute("INSERT INTO persons\n(personid, firstname, surname, fullname, email)\nVALUES\n(%(username)s, %(first_name)s,\n%(last_name)s, %(full_name)s, '')", person_info) # depends on [control=['for'], data=['person_id']]
def _remove_whitespace(text): """Remove excess whitespace from the ends of a given input string.""" # while True: # old_text = text # text = text.replace(' ', ' ') # if text == old_text: # return text non_spaces = re.finditer(r'[^ ]', text) if not non_spaces: return text first_non_space = non_spaces.next() first_non_space = first_non_space.start() last_non_space = None for item in non_spaces: last_non_space = item if not last_non_space: return text[first_non_space:] else: last_non_space = last_non_space.end() return text[first_non_space:last_non_space]
def function[_remove_whitespace, parameter[text]]: constant[Remove excess whitespace from the ends of a given input string.] variable[non_spaces] assign[=] call[name[re].finditer, parameter[constant[[^ ]], name[text]]] if <ast.UnaryOp object at 0x7da1b0473490> begin[:] return[name[text]] variable[first_non_space] assign[=] call[name[non_spaces].next, parameter[]] variable[first_non_space] assign[=] call[name[first_non_space].start, parameter[]] variable[last_non_space] assign[=] constant[None] for taget[name[item]] in starred[name[non_spaces]] begin[:] variable[last_non_space] assign[=] name[item] if <ast.UnaryOp object at 0x7da1b04735e0> begin[:] return[call[name[text]][<ast.Slice object at 0x7da1b0470d60>]]
keyword[def] identifier[_remove_whitespace] ( identifier[text] ): literal[string] identifier[non_spaces] = identifier[re] . identifier[finditer] ( literal[string] , identifier[text] ) keyword[if] keyword[not] identifier[non_spaces] : keyword[return] identifier[text] identifier[first_non_space] = identifier[non_spaces] . identifier[next] () identifier[first_non_space] = identifier[first_non_space] . identifier[start] () identifier[last_non_space] = keyword[None] keyword[for] identifier[item] keyword[in] identifier[non_spaces] : identifier[last_non_space] = identifier[item] keyword[if] keyword[not] identifier[last_non_space] : keyword[return] identifier[text] [ identifier[first_non_space] :] keyword[else] : identifier[last_non_space] = identifier[last_non_space] . identifier[end] () keyword[return] identifier[text] [ identifier[first_non_space] : identifier[last_non_space] ]
def _remove_whitespace(text): """Remove excess whitespace from the ends of a given input string.""" # while True: # old_text = text # text = text.replace(' ', ' ') # if text == old_text: # return text non_spaces = re.finditer('[^ ]', text) if not non_spaces: return text # depends on [control=['if'], data=[]] first_non_space = non_spaces.next() first_non_space = first_non_space.start() last_non_space = None for item in non_spaces: last_non_space = item # depends on [control=['for'], data=['item']] if not last_non_space: return text[first_non_space:] # depends on [control=['if'], data=[]] else: last_non_space = last_non_space.end() return text[first_non_space:last_non_space]
def unserialize(wd: WordDictionary, text: Dict): """ Transforms back a serialized value of `serialize()` """ if not isinstance(text, Mapping): raise ValueError('Text has not the right format') try: t = text['type'] if t == 'string': return text['value'] elif t == 'trans': if not isinstance(text['params'], Mapping): raise ValueError('Params should be a dictionary') for param in text['params']: if not isinstance(param, str): raise ValueError('Params are not all text-keys') return StringToTranslate( wd=wd, key=text['key'], count=text['count'], params=text['params'], ) else: raise ValueError('Unknown type "{}"'.format(t)) except KeyError: raise ValueError('Not enough information to unserialize')
def function[unserialize, parameter[wd, text]]: constant[ Transforms back a serialized value of `serialize()` ] if <ast.UnaryOp object at 0x7da20c7c9cc0> begin[:] <ast.Raise object at 0x7da20c7cba90> <ast.Try object at 0x7da20c7c8d00>
keyword[def] identifier[unserialize] ( identifier[wd] : identifier[WordDictionary] , identifier[text] : identifier[Dict] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[text] , identifier[Mapping] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[try] : identifier[t] = identifier[text] [ literal[string] ] keyword[if] identifier[t] == literal[string] : keyword[return] identifier[text] [ literal[string] ] keyword[elif] identifier[t] == literal[string] : keyword[if] keyword[not] identifier[isinstance] ( identifier[text] [ literal[string] ], identifier[Mapping] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[param] keyword[in] identifier[text] [ literal[string] ]: keyword[if] keyword[not] identifier[isinstance] ( identifier[param] , identifier[str] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[StringToTranslate] ( identifier[wd] = identifier[wd] , identifier[key] = identifier[text] [ literal[string] ], identifier[count] = identifier[text] [ literal[string] ], identifier[params] = identifier[text] [ literal[string] ], ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[t] )) keyword[except] identifier[KeyError] : keyword[raise] identifier[ValueError] ( literal[string] )
def unserialize(wd: WordDictionary, text: Dict): """ Transforms back a serialized value of `serialize()` """ if not isinstance(text, Mapping): raise ValueError('Text has not the right format') # depends on [control=['if'], data=[]] try: t = text['type'] if t == 'string': return text['value'] # depends on [control=['if'], data=[]] elif t == 'trans': if not isinstance(text['params'], Mapping): raise ValueError('Params should be a dictionary') # depends on [control=['if'], data=[]] for param in text['params']: if not isinstance(param, str): raise ValueError('Params are not all text-keys') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] return StringToTranslate(wd=wd, key=text['key'], count=text['count'], params=text['params']) # depends on [control=['if'], data=[]] else: raise ValueError('Unknown type "{}"'.format(t)) # depends on [control=['try'], data=[]] except KeyError: raise ValueError('Not enough information to unserialize') # depends on [control=['except'], data=[]]
def get_command_history(self, issued_command): """ Gets locally cached CommandHistory for the specified command. :param .IssuedCommand issued_command: object representing a previously issued command. :rtype: .CommandHistory """ #pylint: disable=protected-access entry = issued_command._proto.commandQueueEntry key = self._cache_key(entry.cmdId) if key in self._cache: return self._cache[key] return None
def function[get_command_history, parameter[self, issued_command]]: constant[ Gets locally cached CommandHistory for the specified command. :param .IssuedCommand issued_command: object representing a previously issued command. :rtype: .CommandHistory ] variable[entry] assign[=] name[issued_command]._proto.commandQueueEntry variable[key] assign[=] call[name[self]._cache_key, parameter[name[entry].cmdId]] if compare[name[key] in name[self]._cache] begin[:] return[call[name[self]._cache][name[key]]] return[constant[None]]
keyword[def] identifier[get_command_history] ( identifier[self] , identifier[issued_command] ): literal[string] identifier[entry] = identifier[issued_command] . identifier[_proto] . identifier[commandQueueEntry] identifier[key] = identifier[self] . identifier[_cache_key] ( identifier[entry] . identifier[cmdId] ) keyword[if] identifier[key] keyword[in] identifier[self] . identifier[_cache] : keyword[return] identifier[self] . identifier[_cache] [ identifier[key] ] keyword[return] keyword[None]
def get_command_history(self, issued_command): """ Gets locally cached CommandHistory for the specified command. :param .IssuedCommand issued_command: object representing a previously issued command. :rtype: .CommandHistory """ #pylint: disable=protected-access entry = issued_command._proto.commandQueueEntry key = self._cache_key(entry.cmdId) if key in self._cache: return self._cache[key] # depends on [control=['if'], data=['key']] return None
def parse(self, stride=None): """Read and cache the file as a numpy array. Store every *stride* line of data; if ``None`` then the class default is used. The array is returned with column-first indexing, i.e. for a data file with columns X Y1 Y2 Y3 ... the array a will be a[0] = X, a[1] = Y1, ... . """ if stride is None: stride = self.stride self.corrupted_lineno = [] irow = 0 # count rows of data # cannot use numpy.loadtxt() because xvg can have two types of 'comment' lines with utilities.openany(self.real_filename) as xvg: rows = [] ncol = None for lineno,line in enumerate(xvg): line = line.strip() if len(line) == 0: continue if "label" in line and "xaxis" in line: self.xaxis = line.split('"')[-2] if "label" in line and "yaxis" in line: self.yaxis = line.split('"')[-2] if line.startswith("@ legend"): if not "legend" in self.metadata: self.metadata["legend"] = [] self.metadata["legend"].append(line.split("legend ")[-1]) if line.startswith("@ s") and "subtitle" not in line: name = line.split("legend ")[-1].replace('"','').strip() self.names.append(name) if line.startswith(('#', '@')) : continue if line.startswith('&'): raise NotImplementedError('{0!s}: Multi-data not supported, only simple NXY format.'.format(self.real_filename)) # parse line as floats try: row = [float(el) for el in line.split()] except: if self.permissive: self.logger.warn("%s: SKIPPING unparsable line %d: %r", self.real_filename, lineno+1, line) self.corrupted_lineno.append(lineno+1) continue self.logger.error("%s: Cannot parse line %d: %r", self.real_filename, lineno+1, line) raise # check for same number of columns as in previous step if ncol is not None and len(row) != ncol: if self.permissive: self.logger.warn("%s: SKIPPING line %d with wrong number of columns: %r", self.real_filename, lineno+1, line) self.corrupted_lineno.append(lineno+1) continue errmsg = "{0!s}: Wrong number of columns in line {1:d}: {2!r}".format(self.real_filename, lineno+1, line) self.logger.error(errmsg) raise IOError(errno.ENODATA, errmsg, self.real_filename) # finally: a good line if irow % stride == 0: ncol = len(row) rows.append(row) irow += 1 try: self.__array = numpy.array(rows).transpose() # cache result except: self.logger.error("%s: Failed reading XVG file, possibly data corrupted. " "Check the last line of the file...", self.real_filename) raise finally: del rows
def function[parse, parameter[self, stride]]: constant[Read and cache the file as a numpy array. Store every *stride* line of data; if ``None`` then the class default is used. The array is returned with column-first indexing, i.e. for a data file with columns X Y1 Y2 Y3 ... the array a will be a[0] = X, a[1] = Y1, ... . ] if compare[name[stride] is constant[None]] begin[:] variable[stride] assign[=] name[self].stride name[self].corrupted_lineno assign[=] list[[]] variable[irow] assign[=] constant[0] with call[name[utilities].openany, parameter[name[self].real_filename]] begin[:] variable[rows] assign[=] list[[]] variable[ncol] assign[=] constant[None] for taget[tuple[[<ast.Name object at 0x7da207f98bb0>, <ast.Name object at 0x7da207f9b640>]]] in starred[call[name[enumerate], parameter[name[xvg]]]] begin[:] variable[line] assign[=] call[name[line].strip, parameter[]] if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:] continue if <ast.BoolOp object at 0x7da207f98f70> begin[:] name[self].xaxis assign[=] call[call[name[line].split, parameter[constant["]]]][<ast.UnaryOp object at 0x7da207f9b8e0>] if <ast.BoolOp object at 0x7da207f9aec0> begin[:] name[self].yaxis assign[=] call[call[name[line].split, parameter[constant["]]]][<ast.UnaryOp object at 0x7da20c6a8760>] if call[name[line].startswith, parameter[constant[@ legend]]] begin[:] if <ast.UnaryOp object at 0x7da20c6aa2c0> begin[:] call[name[self].metadata][constant[legend]] assign[=] list[[]] call[call[name[self].metadata][constant[legend]].append, parameter[call[call[name[line].split, parameter[constant[legend ]]]][<ast.UnaryOp object at 0x7da20c6a8910>]]] if <ast.BoolOp object at 0x7da20c6a99c0> begin[:] variable[name] assign[=] call[call[call[call[name[line].split, parameter[constant[legend ]]]][<ast.UnaryOp object at 0x7da20c6a80d0>].replace, parameter[constant["], constant[]]].strip, parameter[]] call[name[self].names.append, parameter[name[name]]] if call[name[line].startswith, parameter[tuple[[<ast.Constant object at 0x7da20c6ab1c0>, <ast.Constant object at 0x7da20c6abd00>]]]] begin[:] continue if call[name[line].startswith, parameter[constant[&]]] begin[:] <ast.Raise object at 0x7da20c6a9c90> <ast.Try object at 0x7da20c6aa590> if <ast.BoolOp object at 0x7da204567520> begin[:] if name[self].permissive begin[:] call[name[self].logger.warn, parameter[constant[%s: SKIPPING line %d with wrong number of columns: %r], name[self].real_filename, binary_operation[name[lineno] + constant[1]], name[line]]] call[name[self].corrupted_lineno.append, parameter[binary_operation[name[lineno] + constant[1]]]] continue variable[errmsg] assign[=] call[constant[{0!s}: Wrong number of columns in line {1:d}: {2!r}].format, parameter[name[self].real_filename, binary_operation[name[lineno] + constant[1]], name[line]]] call[name[self].logger.error, parameter[name[errmsg]]] <ast.Raise object at 0x7da204564700> if compare[binary_operation[name[irow] <ast.Mod object at 0x7da2590d6920> name[stride]] equal[==] constant[0]] begin[:] variable[ncol] assign[=] call[name[len], parameter[name[row]]] call[name[rows].append, parameter[name[row]]] <ast.AugAssign object at 0x7da2045644c0> <ast.Try object at 0x7da204566bc0>
keyword[def] identifier[parse] ( identifier[self] , identifier[stride] = keyword[None] ): literal[string] keyword[if] identifier[stride] keyword[is] keyword[None] : identifier[stride] = identifier[self] . identifier[stride] identifier[self] . identifier[corrupted_lineno] =[] identifier[irow] = literal[int] keyword[with] identifier[utilities] . identifier[openany] ( identifier[self] . identifier[real_filename] ) keyword[as] identifier[xvg] : identifier[rows] =[] identifier[ncol] = keyword[None] keyword[for] identifier[lineno] , identifier[line] keyword[in] identifier[enumerate] ( identifier[xvg] ): identifier[line] = identifier[line] . identifier[strip] () keyword[if] identifier[len] ( identifier[line] )== literal[int] : keyword[continue] keyword[if] literal[string] keyword[in] identifier[line] keyword[and] literal[string] keyword[in] identifier[line] : identifier[self] . identifier[xaxis] = identifier[line] . identifier[split] ( literal[string] )[- literal[int] ] keyword[if] literal[string] keyword[in] identifier[line] keyword[and] literal[string] keyword[in] identifier[line] : identifier[self] . identifier[yaxis] = identifier[line] . identifier[split] ( literal[string] )[- literal[int] ] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[if] keyword[not] literal[string] keyword[in] identifier[self] . identifier[metadata] : identifier[self] . identifier[metadata] [ literal[string] ]=[] identifier[self] . identifier[metadata] [ literal[string] ]. identifier[append] ( identifier[line] . identifier[split] ( literal[string] )[- literal[int] ]) keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[not] keyword[in] identifier[line] : identifier[name] = identifier[line] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] () identifier[self] . identifier[names] . identifier[append] ( identifier[name] ) keyword[if] identifier[line] . identifier[startswith] (( literal[string] , literal[string] )): keyword[continue] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[self] . identifier[real_filename] )) keyword[try] : identifier[row] =[ identifier[float] ( identifier[el] ) keyword[for] identifier[el] keyword[in] identifier[line] . identifier[split] ()] keyword[except] : keyword[if] identifier[self] . identifier[permissive] : identifier[self] . identifier[logger] . identifier[warn] ( literal[string] , identifier[self] . identifier[real_filename] , identifier[lineno] + literal[int] , identifier[line] ) identifier[self] . identifier[corrupted_lineno] . identifier[append] ( identifier[lineno] + literal[int] ) keyword[continue] identifier[self] . identifier[logger] . identifier[error] ( literal[string] , identifier[self] . identifier[real_filename] , identifier[lineno] + literal[int] , identifier[line] ) keyword[raise] keyword[if] identifier[ncol] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[row] )!= identifier[ncol] : keyword[if] identifier[self] . identifier[permissive] : identifier[self] . identifier[logger] . identifier[warn] ( literal[string] , identifier[self] . identifier[real_filename] , identifier[lineno] + literal[int] , identifier[line] ) identifier[self] . identifier[corrupted_lineno] . identifier[append] ( identifier[lineno] + literal[int] ) keyword[continue] identifier[errmsg] = literal[string] . identifier[format] ( identifier[self] . identifier[real_filename] , identifier[lineno] + literal[int] , identifier[line] ) identifier[self] . identifier[logger] . identifier[error] ( identifier[errmsg] ) keyword[raise] identifier[IOError] ( identifier[errno] . identifier[ENODATA] , identifier[errmsg] , identifier[self] . identifier[real_filename] ) keyword[if] identifier[irow] % identifier[stride] == literal[int] : identifier[ncol] = identifier[len] ( identifier[row] ) identifier[rows] . identifier[append] ( identifier[row] ) identifier[irow] += literal[int] keyword[try] : identifier[self] . identifier[__array] = identifier[numpy] . identifier[array] ( identifier[rows] ). identifier[transpose] () keyword[except] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] literal[string] , identifier[self] . identifier[real_filename] ) keyword[raise] keyword[finally] : keyword[del] identifier[rows]
def parse(self, stride=None): """Read and cache the file as a numpy array. Store every *stride* line of data; if ``None`` then the class default is used. The array is returned with column-first indexing, i.e. for a data file with columns X Y1 Y2 Y3 ... the array a will be a[0] = X, a[1] = Y1, ... . """ if stride is None: stride = self.stride # depends on [control=['if'], data=['stride']] self.corrupted_lineno = [] irow = 0 # count rows of data # cannot use numpy.loadtxt() because xvg can have two types of 'comment' lines with utilities.openany(self.real_filename) as xvg: rows = [] ncol = None for (lineno, line) in enumerate(xvg): line = line.strip() if len(line) == 0: continue # depends on [control=['if'], data=[]] if 'label' in line and 'xaxis' in line: self.xaxis = line.split('"')[-2] # depends on [control=['if'], data=[]] if 'label' in line and 'yaxis' in line: self.yaxis = line.split('"')[-2] # depends on [control=['if'], data=[]] if line.startswith('@ legend'): if not 'legend' in self.metadata: self.metadata['legend'] = [] # depends on [control=['if'], data=[]] self.metadata['legend'].append(line.split('legend ')[-1]) # depends on [control=['if'], data=[]] if line.startswith('@ s') and 'subtitle' not in line: name = line.split('legend ')[-1].replace('"', '').strip() self.names.append(name) # depends on [control=['if'], data=[]] if line.startswith(('#', '@')): continue # depends on [control=['if'], data=[]] if line.startswith('&'): raise NotImplementedError('{0!s}: Multi-data not supported, only simple NXY format.'.format(self.real_filename)) # depends on [control=['if'], data=[]] # parse line as floats try: row = [float(el) for el in line.split()] # depends on [control=['try'], data=[]] except: if self.permissive: self.logger.warn('%s: SKIPPING unparsable line %d: %r', self.real_filename, lineno + 1, line) self.corrupted_lineno.append(lineno + 1) continue # depends on [control=['if'], data=[]] self.logger.error('%s: Cannot parse line %d: %r', self.real_filename, lineno + 1, line) raise # depends on [control=['except'], data=[]] # check for same number of columns as in previous step if ncol is not None and len(row) != ncol: if self.permissive: self.logger.warn('%s: SKIPPING line %d with wrong number of columns: %r', self.real_filename, lineno + 1, line) self.corrupted_lineno.append(lineno + 1) continue # depends on [control=['if'], data=[]] errmsg = '{0!s}: Wrong number of columns in line {1:d}: {2!r}'.format(self.real_filename, lineno + 1, line) self.logger.error(errmsg) raise IOError(errno.ENODATA, errmsg, self.real_filename) # depends on [control=['if'], data=[]] # finally: a good line if irow % stride == 0: ncol = len(row) rows.append(row) # depends on [control=['if'], data=[]] irow += 1 # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['xvg']] try: self.__array = numpy.array(rows).transpose() # cache result # depends on [control=['try'], data=[]] except: self.logger.error('%s: Failed reading XVG file, possibly data corrupted. Check the last line of the file...', self.real_filename) raise # depends on [control=['except'], data=[]] finally: del rows
def parse(filename): """Parses file content into events stream""" for event, elt in et.iterparse(filename, events= ('start', 'end', 'comment', 'pi'), huge_tree=True): if event == 'start': obj = _elt2obj(elt) obj['type'] = ENTER yield obj if elt.text: yield {'type': TEXT, 'text': elt.text} elif event == 'end': yield {'type': EXIT} if elt.tail: yield {'type': TEXT, 'text': elt.tail} elt.clear() elif event == 'comment': yield {'type': COMMENT, 'text': elt.text} elif event == 'pi': yield {'type': PI, 'text': elt.text} else: assert False, (event, elt)
def function[parse, parameter[filename]]: constant[Parses file content into events stream] for taget[tuple[[<ast.Name object at 0x7da1b00230d0>, <ast.Name object at 0x7da1b0021420>]]] in starred[call[name[et].iterparse, parameter[name[filename]]]] begin[:] if compare[name[event] equal[==] constant[start]] begin[:] variable[obj] assign[=] call[name[_elt2obj], parameter[name[elt]]] call[name[obj]][constant[type]] assign[=] name[ENTER] <ast.Yield object at 0x7da1b008f8e0> if name[elt].text begin[:] <ast.Yield object at 0x7da1b008e5c0>
keyword[def] identifier[parse] ( identifier[filename] ): literal[string] keyword[for] identifier[event] , identifier[elt] keyword[in] identifier[et] . identifier[iterparse] ( identifier[filename] , identifier[events] =( literal[string] , literal[string] , literal[string] , literal[string] ), identifier[huge_tree] = keyword[True] ): keyword[if] identifier[event] == literal[string] : identifier[obj] = identifier[_elt2obj] ( identifier[elt] ) identifier[obj] [ literal[string] ]= identifier[ENTER] keyword[yield] identifier[obj] keyword[if] identifier[elt] . identifier[text] : keyword[yield] { literal[string] : identifier[TEXT] , literal[string] : identifier[elt] . identifier[text] } keyword[elif] identifier[event] == literal[string] : keyword[yield] { literal[string] : identifier[EXIT] } keyword[if] identifier[elt] . identifier[tail] : keyword[yield] { literal[string] : identifier[TEXT] , literal[string] : identifier[elt] . identifier[tail] } identifier[elt] . identifier[clear] () keyword[elif] identifier[event] == literal[string] : keyword[yield] { literal[string] : identifier[COMMENT] , literal[string] : identifier[elt] . identifier[text] } keyword[elif] identifier[event] == literal[string] : keyword[yield] { literal[string] : identifier[PI] , literal[string] : identifier[elt] . identifier[text] } keyword[else] : keyword[assert] keyword[False] ,( identifier[event] , identifier[elt] )
def parse(filename): """Parses file content into events stream""" for (event, elt) in et.iterparse(filename, events=('start', 'end', 'comment', 'pi'), huge_tree=True): if event == 'start': obj = _elt2obj(elt) obj['type'] = ENTER yield obj if elt.text: yield {'type': TEXT, 'text': elt.text} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif event == 'end': yield {'type': EXIT} if elt.tail: yield {'type': TEXT, 'text': elt.tail} # depends on [control=['if'], data=[]] elt.clear() # depends on [control=['if'], data=[]] elif event == 'comment': yield {'type': COMMENT, 'text': elt.text} # depends on [control=['if'], data=[]] elif event == 'pi': yield {'type': PI, 'text': elt.text} # depends on [control=['if'], data=[]] else: assert False, (event, elt) # depends on [control=['for'], data=[]]
def zip(self, store=False, store_params=None): """ Returns a zip file of the current transformation. This is different from the zip function that lives on the Filestack Client *returns* [Filestack.Transform] """ params = locals() params.pop('store') params.pop('store_params') new_transform = self.add_transform_task('zip', params) if store: return new_transform.store(**store_params) if store_params else new_transform.store() return utils.make_call(CDN_URL, 'get', transform_url=new_transform.url)
def function[zip, parameter[self, store, store_params]]: constant[ Returns a zip file of the current transformation. This is different from the zip function that lives on the Filestack Client *returns* [Filestack.Transform] ] variable[params] assign[=] call[name[locals], parameter[]] call[name[params].pop, parameter[constant[store]]] call[name[params].pop, parameter[constant[store_params]]] variable[new_transform] assign[=] call[name[self].add_transform_task, parameter[constant[zip], name[params]]] if name[store] begin[:] return[<ast.IfExp object at 0x7da1b0c88dc0>] return[call[name[utils].make_call, parameter[name[CDN_URL], constant[get]]]]
keyword[def] identifier[zip] ( identifier[self] , identifier[store] = keyword[False] , identifier[store_params] = keyword[None] ): literal[string] identifier[params] = identifier[locals] () identifier[params] . identifier[pop] ( literal[string] ) identifier[params] . identifier[pop] ( literal[string] ) identifier[new_transform] = identifier[self] . identifier[add_transform_task] ( literal[string] , identifier[params] ) keyword[if] identifier[store] : keyword[return] identifier[new_transform] . identifier[store] (** identifier[store_params] ) keyword[if] identifier[store_params] keyword[else] identifier[new_transform] . identifier[store] () keyword[return] identifier[utils] . identifier[make_call] ( identifier[CDN_URL] , literal[string] , identifier[transform_url] = identifier[new_transform] . identifier[url] )
def zip(self, store=False, store_params=None): """ Returns a zip file of the current transformation. This is different from the zip function that lives on the Filestack Client *returns* [Filestack.Transform] """ params = locals() params.pop('store') params.pop('store_params') new_transform = self.add_transform_task('zip', params) if store: return new_transform.store(**store_params) if store_params else new_transform.store() # depends on [control=['if'], data=[]] return utils.make_call(CDN_URL, 'get', transform_url=new_transform.url)
def inject(self, inst, **renames): """Injects dependencies and propagates dependency injector""" if renames: di = self.clone(**renames) else: di = self pro = di._provides inst.__injections_source__ = di deps = getattr(inst, '__injections__', None) if deps: for attr, dep in deps.items(): val = pro.get(dep.name) if val is None: raise MissingDependencyError(dep.name) if not isinstance(val, dep.type): raise TypeError("Wrong provider for {!r}".format(val)) setattr(inst, attr, val) meth = getattr(inst, '__injected__', None) if meth is not None: meth() return inst
def function[inject, parameter[self, inst]]: constant[Injects dependencies and propagates dependency injector] if name[renames] begin[:] variable[di] assign[=] call[name[self].clone, parameter[]] variable[pro] assign[=] name[di]._provides name[inst].__injections_source__ assign[=] name[di] variable[deps] assign[=] call[name[getattr], parameter[name[inst], constant[__injections__], constant[None]]] if name[deps] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b26178b0>, <ast.Name object at 0x7da1b2617a60>]]] in starred[call[name[deps].items, parameter[]]] begin[:] variable[val] assign[=] call[name[pro].get, parameter[name[dep].name]] if compare[name[val] is constant[None]] begin[:] <ast.Raise object at 0x7da1b26179d0> if <ast.UnaryOp object at 0x7da1b2615330> begin[:] <ast.Raise object at 0x7da1b2614490> call[name[setattr], parameter[name[inst], name[attr], name[val]]] variable[meth] assign[=] call[name[getattr], parameter[name[inst], constant[__injected__], constant[None]]] if compare[name[meth] is_not constant[None]] begin[:] call[name[meth], parameter[]] return[name[inst]]
keyword[def] identifier[inject] ( identifier[self] , identifier[inst] ,** identifier[renames] ): literal[string] keyword[if] identifier[renames] : identifier[di] = identifier[self] . identifier[clone] (** identifier[renames] ) keyword[else] : identifier[di] = identifier[self] identifier[pro] = identifier[di] . identifier[_provides] identifier[inst] . identifier[__injections_source__] = identifier[di] identifier[deps] = identifier[getattr] ( identifier[inst] , literal[string] , keyword[None] ) keyword[if] identifier[deps] : keyword[for] identifier[attr] , identifier[dep] keyword[in] identifier[deps] . identifier[items] (): identifier[val] = identifier[pro] . identifier[get] ( identifier[dep] . identifier[name] ) keyword[if] identifier[val] keyword[is] keyword[None] : keyword[raise] identifier[MissingDependencyError] ( identifier[dep] . identifier[name] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[dep] . identifier[type] ): keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[val] )) identifier[setattr] ( identifier[inst] , identifier[attr] , identifier[val] ) identifier[meth] = identifier[getattr] ( identifier[inst] , literal[string] , keyword[None] ) keyword[if] identifier[meth] keyword[is] keyword[not] keyword[None] : identifier[meth] () keyword[return] identifier[inst]
def inject(self, inst, **renames): """Injects dependencies and propagates dependency injector""" if renames: di = self.clone(**renames) # depends on [control=['if'], data=[]] else: di = self pro = di._provides inst.__injections_source__ = di deps = getattr(inst, '__injections__', None) if deps: for (attr, dep) in deps.items(): val = pro.get(dep.name) if val is None: raise MissingDependencyError(dep.name) # depends on [control=['if'], data=[]] if not isinstance(val, dep.type): raise TypeError('Wrong provider for {!r}'.format(val)) # depends on [control=['if'], data=[]] setattr(inst, attr, val) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] meth = getattr(inst, '__injected__', None) if meth is not None: meth() # depends on [control=['if'], data=['meth']] return inst
def union_update(self, *others): r"""Update the multiset, adding elements from all others using the maximum multiplicity. >>> ms = Multiset('aab') >>> ms.union_update('bc') >>> sorted(ms) ['a', 'a', 'b', 'c'] You can also use the ``|=`` operator for the same effect. However, the operator version will only accept a set as other operator, not any iterable, to avoid errors. >>> ms = Multiset('aab') >>> ms |= Multiset('bccd') >>> sorted(ms) ['a', 'a', 'b', 'c', 'c', 'd'] For a variant of the operation which does not modify the multiset, but returns a new multiset instead see :meth:`union`. Args: others: The other sets to union this multiset with. Can also be any :class:`~typing.Iterable`\[~T] or :class:`~typing.Mapping`\[~T, :class:`int`] which are then converted to :class:`Multiset`\[~T]. """ _elements = self._elements _total = self._total for other in map(self._as_mapping, others): for element, multiplicity in other.items(): old_multiplicity = _elements.get(element, 0) if multiplicity > old_multiplicity: _elements[element] = multiplicity _total += multiplicity - old_multiplicity self._total = _total
def function[union_update, parameter[self]]: constant[Update the multiset, adding elements from all others using the maximum multiplicity. >>> ms = Multiset('aab') >>> ms.union_update('bc') >>> sorted(ms) ['a', 'a', 'b', 'c'] You can also use the ``|=`` operator for the same effect. However, the operator version will only accept a set as other operator, not any iterable, to avoid errors. >>> ms = Multiset('aab') >>> ms |= Multiset('bccd') >>> sorted(ms) ['a', 'a', 'b', 'c', 'c', 'd'] For a variant of the operation which does not modify the multiset, but returns a new multiset instead see :meth:`union`. Args: others: The other sets to union this multiset with. Can also be any :class:`~typing.Iterable`\[~T] or :class:`~typing.Mapping`\[~T, :class:`int`] which are then converted to :class:`Multiset`\[~T]. ] variable[_elements] assign[=] name[self]._elements variable[_total] assign[=] name[self]._total for taget[name[other]] in starred[call[name[map], parameter[name[self]._as_mapping, name[others]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18dc07b80>, <ast.Name object at 0x7da18dc07dc0>]]] in starred[call[name[other].items, parameter[]]] begin[:] variable[old_multiplicity] assign[=] call[name[_elements].get, parameter[name[element], constant[0]]] if compare[name[multiplicity] greater[>] name[old_multiplicity]] begin[:] call[name[_elements]][name[element]] assign[=] name[multiplicity] <ast.AugAssign object at 0x7da20c9918d0> name[self]._total assign[=] name[_total]
keyword[def] identifier[union_update] ( identifier[self] ,* identifier[others] ): literal[string] identifier[_elements] = identifier[self] . identifier[_elements] identifier[_total] = identifier[self] . identifier[_total] keyword[for] identifier[other] keyword[in] identifier[map] ( identifier[self] . identifier[_as_mapping] , identifier[others] ): keyword[for] identifier[element] , identifier[multiplicity] keyword[in] identifier[other] . identifier[items] (): identifier[old_multiplicity] = identifier[_elements] . identifier[get] ( identifier[element] , literal[int] ) keyword[if] identifier[multiplicity] > identifier[old_multiplicity] : identifier[_elements] [ identifier[element] ]= identifier[multiplicity] identifier[_total] += identifier[multiplicity] - identifier[old_multiplicity] identifier[self] . identifier[_total] = identifier[_total]
def union_update(self, *others): """Update the multiset, adding elements from all others using the maximum multiplicity. >>> ms = Multiset('aab') >>> ms.union_update('bc') >>> sorted(ms) ['a', 'a', 'b', 'c'] You can also use the ``|=`` operator for the same effect. However, the operator version will only accept a set as other operator, not any iterable, to avoid errors. >>> ms = Multiset('aab') >>> ms |= Multiset('bccd') >>> sorted(ms) ['a', 'a', 'b', 'c', 'c', 'd'] For a variant of the operation which does not modify the multiset, but returns a new multiset instead see :meth:`union`. Args: others: The other sets to union this multiset with. Can also be any :class:`~typing.Iterable`\\[~T] or :class:`~typing.Mapping`\\[~T, :class:`int`] which are then converted to :class:`Multiset`\\[~T]. """ _elements = self._elements _total = self._total for other in map(self._as_mapping, others): for (element, multiplicity) in other.items(): old_multiplicity = _elements.get(element, 0) if multiplicity > old_multiplicity: _elements[element] = multiplicity _total += multiplicity - old_multiplicity # depends on [control=['if'], data=['multiplicity', 'old_multiplicity']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['other']] self._total = _total
def extend(self, items): """ Adds @items to the end of the list -> #int length of list after operation """ if items: if self.serialized: items = list(map(self._dumps, items)) self._client.rpush(self.key_prefix, *items)
def function[extend, parameter[self, items]]: constant[ Adds @items to the end of the list -> #int length of list after operation ] if name[items] begin[:] if name[self].serialized begin[:] variable[items] assign[=] call[name[list], parameter[call[name[map], parameter[name[self]._dumps, name[items]]]]] call[name[self]._client.rpush, parameter[name[self].key_prefix, <ast.Starred object at 0x7da1b28a8d00>]]
keyword[def] identifier[extend] ( identifier[self] , identifier[items] ): literal[string] keyword[if] identifier[items] : keyword[if] identifier[self] . identifier[serialized] : identifier[items] = identifier[list] ( identifier[map] ( identifier[self] . identifier[_dumps] , identifier[items] )) identifier[self] . identifier[_client] . identifier[rpush] ( identifier[self] . identifier[key_prefix] ,* identifier[items] )
def extend(self, items): """ Adds @items to the end of the list -> #int length of list after operation """ if items: if self.serialized: items = list(map(self._dumps, items)) # depends on [control=['if'], data=[]] self._client.rpush(self.key_prefix, *items) # depends on [control=['if'], data=[]]
def remove(self, id): """ Remove a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) if 'confirmed' not in request.params: return render('/prefix_remove_confirm.html') c.prefix.remove() redirect(url(controller='prefix', action='list'))
def function[remove, parameter[self, id]]: constant[ Remove a prefix. ] name[c].prefix assign[=] call[name[Prefix].get, parameter[call[name[int], parameter[name[id]]]]] if compare[constant[confirmed] <ast.NotIn object at 0x7da2590d7190> name[request].params] begin[:] return[call[name[render], parameter[constant[/prefix_remove_confirm.html]]]] call[name[c].prefix.remove, parameter[]] call[name[redirect], parameter[call[name[url], parameter[]]]]
keyword[def] identifier[remove] ( identifier[self] , identifier[id] ): literal[string] identifier[c] . identifier[prefix] = identifier[Prefix] . identifier[get] ( identifier[int] ( identifier[id] )) keyword[if] literal[string] keyword[not] keyword[in] identifier[request] . identifier[params] : keyword[return] identifier[render] ( literal[string] ) identifier[c] . identifier[prefix] . identifier[remove] () identifier[redirect] ( identifier[url] ( identifier[controller] = literal[string] , identifier[action] = literal[string] ))
def remove(self, id): """ Remove a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) if 'confirmed' not in request.params: return render('/prefix_remove_confirm.html') # depends on [control=['if'], data=[]] c.prefix.remove() redirect(url(controller='prefix', action='list'))
def __get_supported_file_types_string(self): """ Returns the supported file types dialog string. """ languages = ["All Files (*)"] for language in self.__languages_model.languages: languages.append("{0} Files ({1})".format(language.name, " ".join(language.extensions.split("|")).replace("\\", "*"))) return ";;".join(languages)
def function[__get_supported_file_types_string, parameter[self]]: constant[ Returns the supported file types dialog string. ] variable[languages] assign[=] list[[<ast.Constant object at 0x7da1b0911870>]] for taget[name[language]] in starred[name[self].__languages_model.languages] begin[:] call[name[languages].append, parameter[call[constant[{0} Files ({1})].format, parameter[name[language].name, call[call[constant[ ].join, parameter[call[name[language].extensions.split, parameter[constant[|]]]]].replace, parameter[constant[\], constant[*]]]]]]] return[call[constant[;;].join, parameter[name[languages]]]]
keyword[def] identifier[__get_supported_file_types_string] ( identifier[self] ): literal[string] identifier[languages] =[ literal[string] ] keyword[for] identifier[language] keyword[in] identifier[self] . identifier[__languages_model] . identifier[languages] : identifier[languages] . identifier[append] ( literal[string] . identifier[format] ( identifier[language] . identifier[name] , literal[string] . identifier[join] ( identifier[language] . identifier[extensions] . identifier[split] ( literal[string] )). identifier[replace] ( literal[string] , literal[string] ))) keyword[return] literal[string] . identifier[join] ( identifier[languages] )
def __get_supported_file_types_string(self): """ Returns the supported file types dialog string. """ languages = ['All Files (*)'] for language in self.__languages_model.languages: languages.append('{0} Files ({1})'.format(language.name, ' '.join(language.extensions.split('|')).replace('\\', '*'))) # depends on [control=['for'], data=['language']] return ';;'.join(languages)
def seq_sha512(seq, normalize=True): """returns unicode sequence sha512 hexdigest for sequence `seq`. >>> seq_sha512('') 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' >>> seq_sha512('ACGT') '68a178f7c740c5c240aa67ba41843b119d3bf9f8b0f0ac36cf701d26672964efbd536d197f51ce634fc70634d1eefe575bec34c83247abc52010f6e2bbdb8253' >>> seq_sha512('acgt') '68a178f7c740c5c240aa67ba41843b119d3bf9f8b0f0ac36cf701d26672964efbd536d197f51ce634fc70634d1eefe575bec34c83247abc52010f6e2bbdb8253' >>> seq_sha512('acgt', normalize=False) '785c1ac071dd89b69904372cf645b7826df587534d25c41edb2862e54fb2940d697218f2883d2bf1a11cdaee658c7f7ab945a1cfd08eb26cbce57ee88790250a' """ seq = normalize_sequence(seq) if normalize else seq bseq = seq.encode("ascii") return hashlib.sha512(bseq).hexdigest()
def function[seq_sha512, parameter[seq, normalize]]: constant[returns unicode sequence sha512 hexdigest for sequence `seq`. >>> seq_sha512('') 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' >>> seq_sha512('ACGT') '68a178f7c740c5c240aa67ba41843b119d3bf9f8b0f0ac36cf701d26672964efbd536d197f51ce634fc70634d1eefe575bec34c83247abc52010f6e2bbdb8253' >>> seq_sha512('acgt') '68a178f7c740c5c240aa67ba41843b119d3bf9f8b0f0ac36cf701d26672964efbd536d197f51ce634fc70634d1eefe575bec34c83247abc52010f6e2bbdb8253' >>> seq_sha512('acgt', normalize=False) '785c1ac071dd89b69904372cf645b7826df587534d25c41edb2862e54fb2940d697218f2883d2bf1a11cdaee658c7f7ab945a1cfd08eb26cbce57ee88790250a' ] variable[seq] assign[=] <ast.IfExp object at 0x7da1b0ca6dd0> variable[bseq] assign[=] call[name[seq].encode, parameter[constant[ascii]]] return[call[call[name[hashlib].sha512, parameter[name[bseq]]].hexdigest, parameter[]]]
keyword[def] identifier[seq_sha512] ( identifier[seq] , identifier[normalize] = keyword[True] ): literal[string] identifier[seq] = identifier[normalize_sequence] ( identifier[seq] ) keyword[if] identifier[normalize] keyword[else] identifier[seq] identifier[bseq] = identifier[seq] . identifier[encode] ( literal[string] ) keyword[return] identifier[hashlib] . identifier[sha512] ( identifier[bseq] ). identifier[hexdigest] ()
def seq_sha512(seq, normalize=True): """returns unicode sequence sha512 hexdigest for sequence `seq`. >>> seq_sha512('') 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' >>> seq_sha512('ACGT') '68a178f7c740c5c240aa67ba41843b119d3bf9f8b0f0ac36cf701d26672964efbd536d197f51ce634fc70634d1eefe575bec34c83247abc52010f6e2bbdb8253' >>> seq_sha512('acgt') '68a178f7c740c5c240aa67ba41843b119d3bf9f8b0f0ac36cf701d26672964efbd536d197f51ce634fc70634d1eefe575bec34c83247abc52010f6e2bbdb8253' >>> seq_sha512('acgt', normalize=False) '785c1ac071dd89b69904372cf645b7826df587534d25c41edb2862e54fb2940d697218f2883d2bf1a11cdaee658c7f7ab945a1cfd08eb26cbce57ee88790250a' """ seq = normalize_sequence(seq) if normalize else seq bseq = seq.encode('ascii') return hashlib.sha512(bseq).hexdigest()
def __getNetworkStateDirectory(self, extraDataDir): """ extraDataDir: Model's extra data directory path Returns: Absolute directory path for saving CLA Network """ if self.__restoringFromV1: if self.getInferenceType() == InferenceType.TemporalNextStep: leafName = 'temporal'+ "-network.nta" else: leafName = 'nonTemporal'+ "-network.nta" else: leafName = InferenceType.getLabel(self.getInferenceType()) + "-network.nta" path = os.path.join(extraDataDir, leafName) path = os.path.abspath(path) return path
def function[__getNetworkStateDirectory, parameter[self, extraDataDir]]: constant[ extraDataDir: Model's extra data directory path Returns: Absolute directory path for saving CLA Network ] if name[self].__restoringFromV1 begin[:] if compare[call[name[self].getInferenceType, parameter[]] equal[==] name[InferenceType].TemporalNextStep] begin[:] variable[leafName] assign[=] binary_operation[constant[temporal] + constant[-network.nta]] variable[path] assign[=] call[name[os].path.join, parameter[name[extraDataDir], name[leafName]]] variable[path] assign[=] call[name[os].path.abspath, parameter[name[path]]] return[name[path]]
keyword[def] identifier[__getNetworkStateDirectory] ( identifier[self] , identifier[extraDataDir] ): literal[string] keyword[if] identifier[self] . identifier[__restoringFromV1] : keyword[if] identifier[self] . identifier[getInferenceType] ()== identifier[InferenceType] . identifier[TemporalNextStep] : identifier[leafName] = literal[string] + literal[string] keyword[else] : identifier[leafName] = literal[string] + literal[string] keyword[else] : identifier[leafName] = identifier[InferenceType] . identifier[getLabel] ( identifier[self] . identifier[getInferenceType] ())+ literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[extraDataDir] , identifier[leafName] ) identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ) keyword[return] identifier[path]
def __getNetworkStateDirectory(self, extraDataDir): """ extraDataDir: Model's extra data directory path Returns: Absolute directory path for saving CLA Network """ if self.__restoringFromV1: if self.getInferenceType() == InferenceType.TemporalNextStep: leafName = 'temporal' + '-network.nta' # depends on [control=['if'], data=[]] else: leafName = 'nonTemporal' + '-network.nta' # depends on [control=['if'], data=[]] else: leafName = InferenceType.getLabel(self.getInferenceType()) + '-network.nta' path = os.path.join(extraDataDir, leafName) path = os.path.abspath(path) return path
def to_query(self): """ Returns a json-serializable representation. """ return { self.name: { 'lang': self.lang, 'script': self.script, 'params': self.script_params } }
def function[to_query, parameter[self]]: constant[ Returns a json-serializable representation. ] return[dictionary[[<ast.Attribute object at 0x7da20c7cb1f0>], [<ast.Dict object at 0x7da20c7caa70>]]]
keyword[def] identifier[to_query] ( identifier[self] ): literal[string] keyword[return] { identifier[self] . identifier[name] :{ literal[string] : identifier[self] . identifier[lang] , literal[string] : identifier[self] . identifier[script] , literal[string] : identifier[self] . identifier[script_params] } }
def to_query(self): """ Returns a json-serializable representation. """ return {self.name: {'lang': self.lang, 'script': self.script, 'params': self.script_params}}
def put_conf(self, configuration, test=False): """Send the configuration to the satellite HTTP request to the satellite (POST /push_configuration) If test is True, store the configuration internally :param configuration: The conf to send (data depend on the satellite) :type configuration: :return: None """ logger.debug("Sending configuration to %s, %s %s", self.name, self.alive, self.reachable) # ---------- if test: setattr(self, 'unit_test_pushed_configuration', configuration) # print("*** unit tests - sent configuration %s: %s" % (self.name, configuration)) return True # ---------- return self.con.post('_push_configuration', {'conf': configuration}, wait=True)
def function[put_conf, parameter[self, configuration, test]]: constant[Send the configuration to the satellite HTTP request to the satellite (POST /push_configuration) If test is True, store the configuration internally :param configuration: The conf to send (data depend on the satellite) :type configuration: :return: None ] call[name[logger].debug, parameter[constant[Sending configuration to %s, %s %s], name[self].name, name[self].alive, name[self].reachable]] if name[test] begin[:] call[name[setattr], parameter[name[self], constant[unit_test_pushed_configuration], name[configuration]]] return[constant[True]] return[call[name[self].con.post, parameter[constant[_push_configuration], dictionary[[<ast.Constant object at 0x7da20c6a9420>], [<ast.Name object at 0x7da20c6abcd0>]]]]]
keyword[def] identifier[put_conf] ( identifier[self] , identifier[configuration] , identifier[test] = keyword[False] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[alive] , identifier[self] . identifier[reachable] ) keyword[if] identifier[test] : identifier[setattr] ( identifier[self] , literal[string] , identifier[configuration] ) keyword[return] keyword[True] keyword[return] identifier[self] . identifier[con] . identifier[post] ( literal[string] ,{ literal[string] : identifier[configuration] }, identifier[wait] = keyword[True] )
def put_conf(self, configuration, test=False): """Send the configuration to the satellite HTTP request to the satellite (POST /push_configuration) If test is True, store the configuration internally :param configuration: The conf to send (data depend on the satellite) :type configuration: :return: None """ logger.debug('Sending configuration to %s, %s %s', self.name, self.alive, self.reachable) # ---------- if test: setattr(self, 'unit_test_pushed_configuration', configuration) # print("*** unit tests - sent configuration %s: %s" % (self.name, configuration)) return True # depends on [control=['if'], data=[]] # ---------- return self.con.post('_push_configuration', {'conf': configuration}, wait=True)
def _get_adjtime_timezone(): ''' Return the timezone in /etc/adjtime of the system clock ''' adjtime_file = '/etc/adjtime' if os.path.exists(adjtime_file): cmd = ['tail', '-n', '1', adjtime_file] return __salt__['cmd.run'](cmd, python_shell=False) elif os.path.exists('/dev/rtc'): raise CommandExecutionError( 'Unable to get hwclock timezone from ' + adjtime_file ) else: # There is no RTC. return None
def function[_get_adjtime_timezone, parameter[]]: constant[ Return the timezone in /etc/adjtime of the system clock ] variable[adjtime_file] assign[=] constant[/etc/adjtime] if call[name[os].path.exists, parameter[name[adjtime_file]]] begin[:] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da207f02170>, <ast.Constant object at 0x7da207f02fb0>, <ast.Constant object at 0x7da207f01a80>, <ast.Name object at 0x7da207f02ec0>]] return[call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]]
keyword[def] identifier[_get_adjtime_timezone] (): literal[string] identifier[adjtime_file] = literal[string] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[adjtime_file] ): identifier[cmd] =[ literal[string] , literal[string] , literal[string] , identifier[adjtime_file] ] keyword[return] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ) keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( literal[string] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] + identifier[adjtime_file] ) keyword[else] : keyword[return] keyword[None]
def _get_adjtime_timezone(): """ Return the timezone in /etc/adjtime of the system clock """ adjtime_file = '/etc/adjtime' if os.path.exists(adjtime_file): cmd = ['tail', '-n', '1', adjtime_file] return __salt__['cmd.run'](cmd, python_shell=False) # depends on [control=['if'], data=[]] elif os.path.exists('/dev/rtc'): raise CommandExecutionError('Unable to get hwclock timezone from ' + adjtime_file) # depends on [control=['if'], data=[]] else: # There is no RTC. return None
def get_params(self, deep=True): """Get parameters for this estimator. Parameters ---------- deep : bool, optional (default=True) If True, will return the parameters for this estimator and contained subobjects that are estimators. Returns ------- params : dict Parameter names mapped to their values. """ params = super(LGBMModel, self).get_params(deep=deep) params.update(self._other_params) return params
def function[get_params, parameter[self, deep]]: constant[Get parameters for this estimator. Parameters ---------- deep : bool, optional (default=True) If True, will return the parameters for this estimator and contained subobjects that are estimators. Returns ------- params : dict Parameter names mapped to their values. ] variable[params] assign[=] call[call[name[super], parameter[name[LGBMModel], name[self]]].get_params, parameter[]] call[name[params].update, parameter[name[self]._other_params]] return[name[params]]
keyword[def] identifier[get_params] ( identifier[self] , identifier[deep] = keyword[True] ): literal[string] identifier[params] = identifier[super] ( identifier[LGBMModel] , identifier[self] ). identifier[get_params] ( identifier[deep] = identifier[deep] ) identifier[params] . identifier[update] ( identifier[self] . identifier[_other_params] ) keyword[return] identifier[params]
def get_params(self, deep=True): """Get parameters for this estimator. Parameters ---------- deep : bool, optional (default=True) If True, will return the parameters for this estimator and contained subobjects that are estimators. Returns ------- params : dict Parameter names mapped to their values. """ params = super(LGBMModel, self).get_params(deep=deep) params.update(self._other_params) return params
def Fold(seglist1, seglist2): """ An iterator that generates the results of taking the intersection of seglist1 with each segment in seglist2 in turn. In each result, the segment start and stop values are adjusted to be with respect to the start of the corresponding segment in seglist2. See also the segmentlist_range() function. This has use in applications that wish to convert ranges of values to ranges relative to epoch boundaries. Below, a list of time intervals in hours is converted to a sequence of daily interval lists with times relative to midnight. Example: >>> from pycbc_glue.segments import * >>> x = segmentlist([segment(0, 13), segment(14, 20), segment(22, 36)]) >>> for y in Fold(x, segmentlist_range(0, 48, 24)): print y ... [segment(0, 13), segment(14, 20), segment(22, 24)] [segment(0, 12)] """ for seg in seglist2: yield (seglist1 & segments.segmentlist([seg])).shift(-seg[0])
def function[Fold, parameter[seglist1, seglist2]]: constant[ An iterator that generates the results of taking the intersection of seglist1 with each segment in seglist2 in turn. In each result, the segment start and stop values are adjusted to be with respect to the start of the corresponding segment in seglist2. See also the segmentlist_range() function. This has use in applications that wish to convert ranges of values to ranges relative to epoch boundaries. Below, a list of time intervals in hours is converted to a sequence of daily interval lists with times relative to midnight. Example: >>> from pycbc_glue.segments import * >>> x = segmentlist([segment(0, 13), segment(14, 20), segment(22, 36)]) >>> for y in Fold(x, segmentlist_range(0, 48, 24)): print y ... [segment(0, 13), segment(14, 20), segment(22, 24)] [segment(0, 12)] ] for taget[name[seg]] in starred[name[seglist2]] begin[:] <ast.Yield object at 0x7da18f00c220>
keyword[def] identifier[Fold] ( identifier[seglist1] , identifier[seglist2] ): literal[string] keyword[for] identifier[seg] keyword[in] identifier[seglist2] : keyword[yield] ( identifier[seglist1] & identifier[segments] . identifier[segmentlist] ([ identifier[seg] ])). identifier[shift] (- identifier[seg] [ literal[int] ])
def Fold(seglist1, seglist2): """ An iterator that generates the results of taking the intersection of seglist1 with each segment in seglist2 in turn. In each result, the segment start and stop values are adjusted to be with respect to the start of the corresponding segment in seglist2. See also the segmentlist_range() function. This has use in applications that wish to convert ranges of values to ranges relative to epoch boundaries. Below, a list of time intervals in hours is converted to a sequence of daily interval lists with times relative to midnight. Example: >>> from pycbc_glue.segments import * >>> x = segmentlist([segment(0, 13), segment(14, 20), segment(22, 36)]) >>> for y in Fold(x, segmentlist_range(0, 48, 24)): print y ... [segment(0, 13), segment(14, 20), segment(22, 24)] [segment(0, 12)] """ for seg in seglist2: yield (seglist1 & segments.segmentlist([seg])).shift(-seg[0]) # depends on [control=['for'], data=['seg']]
def Chueh_Prausnitz_Vc(zs, Vcs, nus): r'''Calculates critical volume of a mixture according to mixing rules in [1]_ with an interaction parameter. .. math:: V_{cm} = \sum_i^n \theta_i V_{ci} + \sum_i^n\sum_j^n(\theta_i \theta_j \nu_{ij})V_{ref} \theta = \frac{x_i V_{ci}^{2/3}}{\sum_{j=1}^n x_j V_{cj}^{2/3}} Parameters ---------- zs : float Mole fractions of all components Vcs : float Critical volumes of all components, [m^3/mol] nus : matrix Interaction parameters, [cm^3/mol] Returns ------- Vcm : float Critical volume of the mixture, [m^3/mol] Notes ----- All parameters, even if zero, must be given to this function. nu parameters are in cm^3/mol, but are converted to m^3/mol inside the function Examples -------- 1-butanol/benzene 0.4271/0.5729 mixture, Vcm = 268.096 mL/mol. >>> Chueh_Prausnitz_Vc([0.4271, 0.5729], [0.000273, 0.000256], [[0, 5.61847], [5.61847, 0]]) 0.00026620503424517445 References ---------- .. [1] Chueh, P. L., and J. M. Prausnitz. "Vapor-Liquid Equilibria at High Pressures: Calculation of Critical Temperatures, Volumes, and Pressures of Nonpolar Mixtures." AIChE Journal 13, no. 6 (November 1, 1967): 1107-13. doi:10.1002/aic.690130613. .. [2] Najafi, Hamidreza, Babak Maghbooli, and Mohammad Amin Sobati. "Prediction of True Critical Volume of Multi-Component Mixtures: Extending Fast Estimation Methods." Fluid Phase Equilibria 386 (January 25, 2015): 13-29. doi:10.1016/j.fluid.2014.11.008. ''' if not none_and_length_check([zs, Vcs]): # check same-length inputs raise Exception('Function inputs are incorrect format') denominator = sum(zs[i]*Vcs[i]**(2/3.) for i in range(len(zs))) Vcm = 0 for i in range(len(zs)): Vcm += zs[i]*Vcs[i]**(2/3.)*Vcs[i]/denominator for j in range(len(zs)): Vcm += (zs[i]*Vcs[i]**(2/3.)/denominator)*(zs[j]*Vcs[j]**(2/3.)/denominator)*nus[i][j]/1E6 return Vcm
def function[Chueh_Prausnitz_Vc, parameter[zs, Vcs, nus]]: constant[Calculates critical volume of a mixture according to mixing rules in [1]_ with an interaction parameter. .. math:: V_{cm} = \sum_i^n \theta_i V_{ci} + \sum_i^n\sum_j^n(\theta_i \theta_j \nu_{ij})V_{ref} \theta = \frac{x_i V_{ci}^{2/3}}{\sum_{j=1}^n x_j V_{cj}^{2/3}} Parameters ---------- zs : float Mole fractions of all components Vcs : float Critical volumes of all components, [m^3/mol] nus : matrix Interaction parameters, [cm^3/mol] Returns ------- Vcm : float Critical volume of the mixture, [m^3/mol] Notes ----- All parameters, even if zero, must be given to this function. nu parameters are in cm^3/mol, but are converted to m^3/mol inside the function Examples -------- 1-butanol/benzene 0.4271/0.5729 mixture, Vcm = 268.096 mL/mol. >>> Chueh_Prausnitz_Vc([0.4271, 0.5729], [0.000273, 0.000256], [[0, 5.61847], [5.61847, 0]]) 0.00026620503424517445 References ---------- .. [1] Chueh, P. L., and J. M. Prausnitz. "Vapor-Liquid Equilibria at High Pressures: Calculation of Critical Temperatures, Volumes, and Pressures of Nonpolar Mixtures." AIChE Journal 13, no. 6 (November 1, 1967): 1107-13. doi:10.1002/aic.690130613. .. [2] Najafi, Hamidreza, Babak Maghbooli, and Mohammad Amin Sobati. "Prediction of True Critical Volume of Multi-Component Mixtures: Extending Fast Estimation Methods." Fluid Phase Equilibria 386 (January 25, 2015): 13-29. doi:10.1016/j.fluid.2014.11.008. ] if <ast.UnaryOp object at 0x7da18f00d000> begin[:] <ast.Raise object at 0x7da18f00dc30> variable[denominator] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18f00e6b0>]] variable[Vcm] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[zs]]]]]] begin[:] <ast.AugAssign object at 0x7da18f00f580> for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[zs]]]]]] begin[:] <ast.AugAssign object at 0x7da18f00efb0> return[name[Vcm]]
keyword[def] identifier[Chueh_Prausnitz_Vc] ( identifier[zs] , identifier[Vcs] , identifier[nus] ): literal[string] keyword[if] keyword[not] identifier[none_and_length_check] ([ identifier[zs] , identifier[Vcs] ]): keyword[raise] identifier[Exception] ( literal[string] ) identifier[denominator] = identifier[sum] ( identifier[zs] [ identifier[i] ]* identifier[Vcs] [ identifier[i] ]**( literal[int] / literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[zs] ))) identifier[Vcm] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[zs] )): identifier[Vcm] += identifier[zs] [ identifier[i] ]* identifier[Vcs] [ identifier[i] ]**( literal[int] / literal[int] )* identifier[Vcs] [ identifier[i] ]/ identifier[denominator] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[zs] )): identifier[Vcm] +=( identifier[zs] [ identifier[i] ]* identifier[Vcs] [ identifier[i] ]**( literal[int] / literal[int] )/ identifier[denominator] )*( identifier[zs] [ identifier[j] ]* identifier[Vcs] [ identifier[j] ]**( literal[int] / literal[int] )/ identifier[denominator] )* identifier[nus] [ identifier[i] ][ identifier[j] ]/ literal[int] keyword[return] identifier[Vcm]
def Chueh_Prausnitz_Vc(zs, Vcs, nus): """Calculates critical volume of a mixture according to mixing rules in [1]_ with an interaction parameter. .. math:: V_{cm} = \\sum_i^n \\theta_i V_{ci} + \\sum_i^n\\sum_j^n(\\theta_i \\theta_j \\nu_{ij})V_{ref} \\theta = \\frac{x_i V_{ci}^{2/3}}{\\sum_{j=1}^n x_j V_{cj}^{2/3}} Parameters ---------- zs : float Mole fractions of all components Vcs : float Critical volumes of all components, [m^3/mol] nus : matrix Interaction parameters, [cm^3/mol] Returns ------- Vcm : float Critical volume of the mixture, [m^3/mol] Notes ----- All parameters, even if zero, must be given to this function. nu parameters are in cm^3/mol, but are converted to m^3/mol inside the function Examples -------- 1-butanol/benzene 0.4271/0.5729 mixture, Vcm = 268.096 mL/mol. >>> Chueh_Prausnitz_Vc([0.4271, 0.5729], [0.000273, 0.000256], [[0, 5.61847], [5.61847, 0]]) 0.00026620503424517445 References ---------- .. [1] Chueh, P. L., and J. M. Prausnitz. "Vapor-Liquid Equilibria at High Pressures: Calculation of Critical Temperatures, Volumes, and Pressures of Nonpolar Mixtures." AIChE Journal 13, no. 6 (November 1, 1967): 1107-13. doi:10.1002/aic.690130613. .. [2] Najafi, Hamidreza, Babak Maghbooli, and Mohammad Amin Sobati. "Prediction of True Critical Volume of Multi-Component Mixtures: Extending Fast Estimation Methods." Fluid Phase Equilibria 386 (January 25, 2015): 13-29. doi:10.1016/j.fluid.2014.11.008. """ if not none_and_length_check([zs, Vcs]): # check same-length inputs raise Exception('Function inputs are incorrect format') # depends on [control=['if'], data=[]] denominator = sum((zs[i] * Vcs[i] ** (2 / 3.0) for i in range(len(zs)))) Vcm = 0 for i in range(len(zs)): Vcm += zs[i] * Vcs[i] ** (2 / 3.0) * Vcs[i] / denominator for j in range(len(zs)): Vcm += zs[i] * Vcs[i] ** (2 / 3.0) / denominator * (zs[j] * Vcs[j] ** (2 / 3.0) / denominator) * nus[i][j] / 1000000.0 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return Vcm
def parse_headers(fp, _class=http.client.HTTPMessage): """Parses only RFC2822 headers from a file pointer. email Parser wants to see strings rather than bytes. But a TextIOWrapper around self.rfile would buffer too many bytes from the stream, bytes which we later need to read as bytes. So we read the correct bytes here, as bytes, for email Parser to parse. Note: Monkey-patched version to try to more intelligently determine header encoding """ headers = [] while True: line = fp.readline(http.client._MAXLINE + 1) if len(line) > http.client._MAXLINE: raise http.client.LineTooLong("header line") headers.append(line) if len(headers) > http.client._MAXHEADERS: raise HTTPException("got more than %d headers" % http.client._MAXHEADERS) if line in (b'\r\n', b'\n', b''): break decoded_headers = decode_headers(headers) hstring = ''.join(decoded_headers) return email.parser.Parser(_class=_class).parsestr(hstring)
def function[parse_headers, parameter[fp, _class]]: constant[Parses only RFC2822 headers from a file pointer. email Parser wants to see strings rather than bytes. But a TextIOWrapper around self.rfile would buffer too many bytes from the stream, bytes which we later need to read as bytes. So we read the correct bytes here, as bytes, for email Parser to parse. Note: Monkey-patched version to try to more intelligently determine header encoding ] variable[headers] assign[=] list[[]] while constant[True] begin[:] variable[line] assign[=] call[name[fp].readline, parameter[binary_operation[name[http].client._MAXLINE + constant[1]]]] if compare[call[name[len], parameter[name[line]]] greater[>] name[http].client._MAXLINE] begin[:] <ast.Raise object at 0x7da18f09eec0> call[name[headers].append, parameter[name[line]]] if compare[call[name[len], parameter[name[headers]]] greater[>] name[http].client._MAXHEADERS] begin[:] <ast.Raise object at 0x7da18f09d8a0> if compare[name[line] in tuple[[<ast.Constant object at 0x7da18f09dbd0>, <ast.Constant object at 0x7da18f09fd30>, <ast.Constant object at 0x7da18f09ddb0>]]] begin[:] break variable[decoded_headers] assign[=] call[name[decode_headers], parameter[name[headers]]] variable[hstring] assign[=] call[constant[].join, parameter[name[decoded_headers]]] return[call[call[name[email].parser.Parser, parameter[]].parsestr, parameter[name[hstring]]]]
keyword[def] identifier[parse_headers] ( identifier[fp] , identifier[_class] = identifier[http] . identifier[client] . identifier[HTTPMessage] ): literal[string] identifier[headers] =[] keyword[while] keyword[True] : identifier[line] = identifier[fp] . identifier[readline] ( identifier[http] . identifier[client] . identifier[_MAXLINE] + literal[int] ) keyword[if] identifier[len] ( identifier[line] )> identifier[http] . identifier[client] . identifier[_MAXLINE] : keyword[raise] identifier[http] . identifier[client] . identifier[LineTooLong] ( literal[string] ) identifier[headers] . identifier[append] ( identifier[line] ) keyword[if] identifier[len] ( identifier[headers] )> identifier[http] . identifier[client] . identifier[_MAXHEADERS] : keyword[raise] identifier[HTTPException] ( literal[string] % identifier[http] . identifier[client] . identifier[_MAXHEADERS] ) keyword[if] identifier[line] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[break] identifier[decoded_headers] = identifier[decode_headers] ( identifier[headers] ) identifier[hstring] = literal[string] . identifier[join] ( identifier[decoded_headers] ) keyword[return] identifier[email] . identifier[parser] . identifier[Parser] ( identifier[_class] = identifier[_class] ). identifier[parsestr] ( identifier[hstring] )
def parse_headers(fp, _class=http.client.HTTPMessage): """Parses only RFC2822 headers from a file pointer. email Parser wants to see strings rather than bytes. But a TextIOWrapper around self.rfile would buffer too many bytes from the stream, bytes which we later need to read as bytes. So we read the correct bytes here, as bytes, for email Parser to parse. Note: Monkey-patched version to try to more intelligently determine header encoding """ headers = [] while True: line = fp.readline(http.client._MAXLINE + 1) if len(line) > http.client._MAXLINE: raise http.client.LineTooLong('header line') # depends on [control=['if'], data=[]] headers.append(line) if len(headers) > http.client._MAXHEADERS: raise HTTPException('got more than %d headers' % http.client._MAXHEADERS) # depends on [control=['if'], data=[]] if line in (b'\r\n', b'\n', b''): break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] decoded_headers = decode_headers(headers) hstring = ''.join(decoded_headers) return email.parser.Parser(_class=_class).parsestr(hstring)
def sentences(self, nb=3, ext_word_list=None): """ Generate an array of sentences :example ['Lorem ipsum dolor sit amet.', 'Consectetur adipisicing eli.'] Keyword arguments: :param nb: how many sentences to return :param ext_word_list: a list of words you would like to have instead of 'Lorem ipsum'. :rtype: list """ return [self.sentence(ext_word_list=ext_word_list) for _ in range(0, nb)]
def function[sentences, parameter[self, nb, ext_word_list]]: constant[ Generate an array of sentences :example ['Lorem ipsum dolor sit amet.', 'Consectetur adipisicing eli.'] Keyword arguments: :param nb: how many sentences to return :param ext_word_list: a list of words you would like to have instead of 'Lorem ipsum'. :rtype: list ] return[<ast.ListComp object at 0x7da207f99c00>]
keyword[def] identifier[sentences] ( identifier[self] , identifier[nb] = literal[int] , identifier[ext_word_list] = keyword[None] ): literal[string] keyword[return] [ identifier[self] . identifier[sentence] ( identifier[ext_word_list] = identifier[ext_word_list] ) keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] , identifier[nb] )]
def sentences(self, nb=3, ext_word_list=None): """ Generate an array of sentences :example ['Lorem ipsum dolor sit amet.', 'Consectetur adipisicing eli.'] Keyword arguments: :param nb: how many sentences to return :param ext_word_list: a list of words you would like to have instead of 'Lorem ipsum'. :rtype: list """ return [self.sentence(ext_word_list=ext_word_list) for _ in range(0, nb)]
def substring_index(str, delim, count): """ Returns the substring from string str before count occurrences of the delimiter delim. If count is positive, everything the left of the final delimiter (counting from left) is returned. If count is negative, every to the right of the final delimiter (counting from the right) is returned. substring_index performs a case-sensitive match when searching for delim. >>> df = spark.createDataFrame([('a.b.c.d',)], ['s']) >>> df.select(substring_index(df.s, '.', 2).alias('s')).collect() [Row(s=u'a.b')] >>> df.select(substring_index(df.s, '.', -3).alias('s')).collect() [Row(s=u'b.c.d')] """ sc = SparkContext._active_spark_context return Column(sc._jvm.functions.substring_index(_to_java_column(str), delim, count))
def function[substring_index, parameter[str, delim, count]]: constant[ Returns the substring from string str before count occurrences of the delimiter delim. If count is positive, everything the left of the final delimiter (counting from left) is returned. If count is negative, every to the right of the final delimiter (counting from the right) is returned. substring_index performs a case-sensitive match when searching for delim. >>> df = spark.createDataFrame([('a.b.c.d',)], ['s']) >>> df.select(substring_index(df.s, '.', 2).alias('s')).collect() [Row(s=u'a.b')] >>> df.select(substring_index(df.s, '.', -3).alias('s')).collect() [Row(s=u'b.c.d')] ] variable[sc] assign[=] name[SparkContext]._active_spark_context return[call[name[Column], parameter[call[name[sc]._jvm.functions.substring_index, parameter[call[name[_to_java_column], parameter[name[str]]], name[delim], name[count]]]]]]
keyword[def] identifier[substring_index] ( identifier[str] , identifier[delim] , identifier[count] ): literal[string] identifier[sc] = identifier[SparkContext] . identifier[_active_spark_context] keyword[return] identifier[Column] ( identifier[sc] . identifier[_jvm] . identifier[functions] . identifier[substring_index] ( identifier[_to_java_column] ( identifier[str] ), identifier[delim] , identifier[count] ))
def substring_index(str, delim, count): """ Returns the substring from string str before count occurrences of the delimiter delim. If count is positive, everything the left of the final delimiter (counting from left) is returned. If count is negative, every to the right of the final delimiter (counting from the right) is returned. substring_index performs a case-sensitive match when searching for delim. >>> df = spark.createDataFrame([('a.b.c.d',)], ['s']) >>> df.select(substring_index(df.s, '.', 2).alias('s')).collect() [Row(s=u'a.b')] >>> df.select(substring_index(df.s, '.', -3).alias('s')).collect() [Row(s=u'b.c.d')] """ sc = SparkContext._active_spark_context return Column(sc._jvm.functions.substring_index(_to_java_column(str), delim, count))
def fix_music(file_name): ''' Searches for '.mp3' files in directory (optionally recursive) and checks whether they already contain album art and album name tags or not. ''' setup() if not Py3: file_name = file_name.encode('utf-8') tags = File(file_name) log.log(file_name) log.log('> Adding metadata') try: artist, album, song_name, lyrics, match_bool, score = get_details_spotify( file_name) # Try finding details through spotify except Exception: artist, album, song_name, lyrics, match_bool, score = get_details_letssingit( file_name) # Use bad scraping method as last resort try: log.log_indented('* Trying to extract album art from Google.com') albumart = albumsearch.img_search_google(artist+' '+album) except Exception: log.log_indented('* Trying to extract album art from Bing.com') albumart = albumsearch.img_search_bing(artist+' '+album) if match_bool: add_albumart(albumart, file_name) add_details(file_name, song_name, artist, album, lyrics) try: rename(file_name, artist+' - '+song_name+'.mp3') except Exception: log.log_error("Couldn't rename file") pass else: log.log_error( "* Couldn't find appropriate details of your song", indented=True) log.log("Match score: %s/10.0" % round(score * 10, 1)) log.log(LOG_LINE_SEPERATOR) log.log_success()
def function[fix_music, parameter[file_name]]: constant[ Searches for '.mp3' files in directory (optionally recursive) and checks whether they already contain album art and album name tags or not. ] call[name[setup], parameter[]] if <ast.UnaryOp object at 0x7da1b27bb520> begin[:] variable[file_name] assign[=] call[name[file_name].encode, parameter[constant[utf-8]]] variable[tags] assign[=] call[name[File], parameter[name[file_name]]] call[name[log].log, parameter[name[file_name]]] call[name[log].log, parameter[constant[> Adding metadata]]] <ast.Try object at 0x7da1b27bafb0> <ast.Try object at 0x7da1b27bb9d0> if name[match_bool] begin[:] call[name[add_albumart], parameter[name[albumart], name[file_name]]] call[name[add_details], parameter[name[file_name], name[song_name], name[artist], name[album], name[lyrics]]] <ast.Try object at 0x7da20e9b07c0> call[name[log].log, parameter[binary_operation[constant[Match score: %s/10.0] <ast.Mod object at 0x7da2590d6920> call[name[round], parameter[binary_operation[name[score] * constant[10]], constant[1]]]]]] call[name[log].log, parameter[name[LOG_LINE_SEPERATOR]]] call[name[log].log_success, parameter[]]
keyword[def] identifier[fix_music] ( identifier[file_name] ): literal[string] identifier[setup] () keyword[if] keyword[not] identifier[Py3] : identifier[file_name] = identifier[file_name] . identifier[encode] ( literal[string] ) identifier[tags] = identifier[File] ( identifier[file_name] ) identifier[log] . identifier[log] ( identifier[file_name] ) identifier[log] . identifier[log] ( literal[string] ) keyword[try] : identifier[artist] , identifier[album] , identifier[song_name] , identifier[lyrics] , identifier[match_bool] , identifier[score] = identifier[get_details_spotify] ( identifier[file_name] ) keyword[except] identifier[Exception] : identifier[artist] , identifier[album] , identifier[song_name] , identifier[lyrics] , identifier[match_bool] , identifier[score] = identifier[get_details_letssingit] ( identifier[file_name] ) keyword[try] : identifier[log] . identifier[log_indented] ( literal[string] ) identifier[albumart] = identifier[albumsearch] . identifier[img_search_google] ( identifier[artist] + literal[string] + identifier[album] ) keyword[except] identifier[Exception] : identifier[log] . identifier[log_indented] ( literal[string] ) identifier[albumart] = identifier[albumsearch] . identifier[img_search_bing] ( identifier[artist] + literal[string] + identifier[album] ) keyword[if] identifier[match_bool] : identifier[add_albumart] ( identifier[albumart] , identifier[file_name] ) identifier[add_details] ( identifier[file_name] , identifier[song_name] , identifier[artist] , identifier[album] , identifier[lyrics] ) keyword[try] : identifier[rename] ( identifier[file_name] , identifier[artist] + literal[string] + identifier[song_name] + literal[string] ) keyword[except] identifier[Exception] : identifier[log] . identifier[log_error] ( literal[string] ) keyword[pass] keyword[else] : identifier[log] . identifier[log_error] ( literal[string] , identifier[indented] = keyword[True] ) identifier[log] . identifier[log] ( literal[string] % identifier[round] ( identifier[score] * literal[int] , literal[int] )) identifier[log] . identifier[log] ( identifier[LOG_LINE_SEPERATOR] ) identifier[log] . identifier[log_success] ()
def fix_music(file_name): """ Searches for '.mp3' files in directory (optionally recursive) and checks whether they already contain album art and album name tags or not. """ setup() if not Py3: file_name = file_name.encode('utf-8') # depends on [control=['if'], data=[]] tags = File(file_name) log.log(file_name) log.log('> Adding metadata') try: (artist, album, song_name, lyrics, match_bool, score) = get_details_spotify(file_name) # Try finding details through spotify # depends on [control=['try'], data=[]] except Exception: (artist, album, song_name, lyrics, match_bool, score) = get_details_letssingit(file_name) # Use bad scraping method as last resort # depends on [control=['except'], data=[]] try: log.log_indented('* Trying to extract album art from Google.com') albumart = albumsearch.img_search_google(artist + ' ' + album) # depends on [control=['try'], data=[]] except Exception: log.log_indented('* Trying to extract album art from Bing.com') albumart = albumsearch.img_search_bing(artist + ' ' + album) # depends on [control=['except'], data=[]] if match_bool: add_albumart(albumart, file_name) add_details(file_name, song_name, artist, album, lyrics) try: rename(file_name, artist + ' - ' + song_name + '.mp3') # depends on [control=['try'], data=[]] except Exception: log.log_error("Couldn't rename file") pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: log.log_error("* Couldn't find appropriate details of your song", indented=True) log.log('Match score: %s/10.0' % round(score * 10, 1)) log.log(LOG_LINE_SEPERATOR) log.log_success()
def update_pop(self): """Assigns fitnesses to particles that are within bounds.""" valid_particles = [] invalid_particles = [] for part in self.population: if any(x > 1 or x < -1 for x in part): invalid_particles.append(part) else: valid_particles.append(part) self._params['model_count'] += len(valid_particles) for part in valid_particles: self.update_particle(part) self.assign_fitnesses(valid_particles) for part in valid_particles: if part.fitness > part.best.fitness: part.best = creator.Particle(part) part.best.fitness = part.fitness for part in invalid_particles: self.update_particle(part) self.population[:] = valid_particles + invalid_particles self.population.sort(key=lambda x: x.ident)
def function[update_pop, parameter[self]]: constant[Assigns fitnesses to particles that are within bounds.] variable[valid_particles] assign[=] list[[]] variable[invalid_particles] assign[=] list[[]] for taget[name[part]] in starred[name[self].population] begin[:] if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b2626860>]] begin[:] call[name[invalid_particles].append, parameter[name[part]]] <ast.AugAssign object at 0x7da1b2624d60> for taget[name[part]] in starred[name[valid_particles]] begin[:] call[name[self].update_particle, parameter[name[part]]] call[name[self].assign_fitnesses, parameter[name[valid_particles]]] for taget[name[part]] in starred[name[valid_particles]] begin[:] if compare[name[part].fitness greater[>] name[part].best.fitness] begin[:] name[part].best assign[=] call[name[creator].Particle, parameter[name[part]]] name[part].best.fitness assign[=] name[part].fitness for taget[name[part]] in starred[name[invalid_particles]] begin[:] call[name[self].update_particle, parameter[name[part]]] call[name[self].population][<ast.Slice object at 0x7da1b2625030>] assign[=] binary_operation[name[valid_particles] + name[invalid_particles]] call[name[self].population.sort, parameter[]]
keyword[def] identifier[update_pop] ( identifier[self] ): literal[string] identifier[valid_particles] =[] identifier[invalid_particles] =[] keyword[for] identifier[part] keyword[in] identifier[self] . identifier[population] : keyword[if] identifier[any] ( identifier[x] > literal[int] keyword[or] identifier[x] <- literal[int] keyword[for] identifier[x] keyword[in] identifier[part] ): identifier[invalid_particles] . identifier[append] ( identifier[part] ) keyword[else] : identifier[valid_particles] . identifier[append] ( identifier[part] ) identifier[self] . identifier[_params] [ literal[string] ]+= identifier[len] ( identifier[valid_particles] ) keyword[for] identifier[part] keyword[in] identifier[valid_particles] : identifier[self] . identifier[update_particle] ( identifier[part] ) identifier[self] . identifier[assign_fitnesses] ( identifier[valid_particles] ) keyword[for] identifier[part] keyword[in] identifier[valid_particles] : keyword[if] identifier[part] . identifier[fitness] > identifier[part] . identifier[best] . identifier[fitness] : identifier[part] . identifier[best] = identifier[creator] . identifier[Particle] ( identifier[part] ) identifier[part] . identifier[best] . identifier[fitness] = identifier[part] . identifier[fitness] keyword[for] identifier[part] keyword[in] identifier[invalid_particles] : identifier[self] . identifier[update_particle] ( identifier[part] ) identifier[self] . identifier[population] [:]= identifier[valid_particles] + identifier[invalid_particles] identifier[self] . identifier[population] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[ident] )
def update_pop(self): """Assigns fitnesses to particles that are within bounds.""" valid_particles = [] invalid_particles = [] for part in self.population: if any((x > 1 or x < -1 for x in part)): invalid_particles.append(part) # depends on [control=['if'], data=[]] else: valid_particles.append(part) # depends on [control=['for'], data=['part']] self._params['model_count'] += len(valid_particles) for part in valid_particles: self.update_particle(part) # depends on [control=['for'], data=['part']] self.assign_fitnesses(valid_particles) for part in valid_particles: if part.fitness > part.best.fitness: part.best = creator.Particle(part) part.best.fitness = part.fitness # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['part']] for part in invalid_particles: self.update_particle(part) # depends on [control=['for'], data=['part']] self.population[:] = valid_particles + invalid_particles self.population.sort(key=lambda x: x.ident)
def _st_decode(self, msg): """ST: Temperature update.""" group = int(msg[4:5]) temperature = int(msg[7:10]) if group == 0: temperature -= 60 elif group == 1: temperature -= 40 return {'group': group, 'device': int(msg[5:7])-1, 'temperature': temperature}
def function[_st_decode, parameter[self, msg]]: constant[ST: Temperature update.] variable[group] assign[=] call[name[int], parameter[call[name[msg]][<ast.Slice object at 0x7da18eb577c0>]]] variable[temperature] assign[=] call[name[int], parameter[call[name[msg]][<ast.Slice object at 0x7da18eb54490>]]] if compare[name[group] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da18eb57640> return[dictionary[[<ast.Constant object at 0x7da18eb54d00>, <ast.Constant object at 0x7da18eb56e60>, <ast.Constant object at 0x7da18eb57a30>], [<ast.Name object at 0x7da18eb55f90>, <ast.BinOp object at 0x7da18eb55ab0>, <ast.Name object at 0x7da18eb54eb0>]]]
keyword[def] identifier[_st_decode] ( identifier[self] , identifier[msg] ): literal[string] identifier[group] = identifier[int] ( identifier[msg] [ literal[int] : literal[int] ]) identifier[temperature] = identifier[int] ( identifier[msg] [ literal[int] : literal[int] ]) keyword[if] identifier[group] == literal[int] : identifier[temperature] -= literal[int] keyword[elif] identifier[group] == literal[int] : identifier[temperature] -= literal[int] keyword[return] { literal[string] : identifier[group] , literal[string] : identifier[int] ( identifier[msg] [ literal[int] : literal[int] ])- literal[int] , literal[string] : identifier[temperature] }
def _st_decode(self, msg): """ST: Temperature update.""" group = int(msg[4:5]) temperature = int(msg[7:10]) if group == 0: temperature -= 60 # depends on [control=['if'], data=[]] elif group == 1: temperature -= 40 # depends on [control=['if'], data=[]] return {'group': group, 'device': int(msg[5:7]) - 1, 'temperature': temperature}
def run(self): """ Actual run method that starts the processing of jobs and initiates the status polling, or performs job cancelling or cleaning, depending on the task parameters. """ task = self.task self._outputs = self.output() # create the job dashboard interface self.dashboard = task.create_job_dashboard() or NoJobDashboard() # read submission data and reset some values submitted = not task.ignore_submission and self._outputs["submission"].exists() if submitted: self.submission_data.update(self._outputs["submission"].load(formatter="json")) task.tasks_per_job = self.submission_data.tasks_per_job self.dashboard.apply_config(self.submission_data.dashboard_config) # when the branch outputs, i.e. the "collection" exists, just create dummy control outputs if "collection" in self._outputs and self._outputs["collection"].exists(): self.touch_control_outputs() # cancel jobs? elif self._cancel_jobs: if submitted: self.cancel() # cleanup jobs? elif self._cleanup_jobs: if submitted: self.cleanup() # submit and/or wait while polling else: # maybe set a tracking url tracking_url = self.dashboard.create_tracking_url() if tracking_url: task.set_tracking_url(tracking_url) # ensure the output directory exists if not submitted: self._outputs["submission"].parent.touch() # at this point, when the status file exists, it is considered outdated if "status" in self._outputs: self._outputs["status"].remove() try: # instantiate the configured job file factory, not kwargs yet self.job_file_factory = self.create_job_file_factory() # submit if not submitted: # set the initial list of unsubmitted jobs branches = sorted(task.branch_map.keys()) branch_chunks = list(iter_chunks(branches, task.tasks_per_job)) self.submission_data.unsubmitted_jobs = OrderedDict( (i + 1, branches) for i, branches in enumerate(branch_chunks) ) self.submit() # sleep once to give the job interface time to register the jobs post_submit_delay = self._get_task_attribute("post_submit_delay")() if post_submit_delay: time.sleep(post_submit_delay) # start status polling when a) no_poll is not set, or b) the jobs were already # submitted so that failed jobs are resubmitted after a single polling iteration if not task.no_poll or submitted: self.poll() finally: # in any event, cleanup the job file if self.job_file_factory: self.job_file_factory.cleanup_dir(force=False)
def function[run, parameter[self]]: constant[ Actual run method that starts the processing of jobs and initiates the status polling, or performs job cancelling or cleaning, depending on the task parameters. ] variable[task] assign[=] name[self].task name[self]._outputs assign[=] call[name[self].output, parameter[]] name[self].dashboard assign[=] <ast.BoolOp object at 0x7da1b0557040> variable[submitted] assign[=] <ast.BoolOp object at 0x7da1b05575b0> if name[submitted] begin[:] call[name[self].submission_data.update, parameter[call[call[name[self]._outputs][constant[submission]].load, parameter[]]]] name[task].tasks_per_job assign[=] name[self].submission_data.tasks_per_job call[name[self].dashboard.apply_config, parameter[name[self].submission_data.dashboard_config]] if <ast.BoolOp object at 0x7da1b0555e10> begin[:] call[name[self].touch_control_outputs, parameter[]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[task] = identifier[self] . identifier[task] identifier[self] . identifier[_outputs] = identifier[self] . identifier[output] () identifier[self] . identifier[dashboard] = identifier[task] . identifier[create_job_dashboard] () keyword[or] identifier[NoJobDashboard] () identifier[submitted] = keyword[not] identifier[task] . identifier[ignore_submission] keyword[and] identifier[self] . identifier[_outputs] [ literal[string] ]. identifier[exists] () keyword[if] identifier[submitted] : identifier[self] . identifier[submission_data] . identifier[update] ( identifier[self] . identifier[_outputs] [ literal[string] ]. identifier[load] ( identifier[formatter] = literal[string] )) identifier[task] . identifier[tasks_per_job] = identifier[self] . identifier[submission_data] . identifier[tasks_per_job] identifier[self] . identifier[dashboard] . identifier[apply_config] ( identifier[self] . identifier[submission_data] . identifier[dashboard_config] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[_outputs] keyword[and] identifier[self] . identifier[_outputs] [ literal[string] ]. identifier[exists] (): identifier[self] . identifier[touch_control_outputs] () keyword[elif] identifier[self] . identifier[_cancel_jobs] : keyword[if] identifier[submitted] : identifier[self] . identifier[cancel] () keyword[elif] identifier[self] . identifier[_cleanup_jobs] : keyword[if] identifier[submitted] : identifier[self] . identifier[cleanup] () keyword[else] : identifier[tracking_url] = identifier[self] . identifier[dashboard] . identifier[create_tracking_url] () keyword[if] identifier[tracking_url] : identifier[task] . identifier[set_tracking_url] ( identifier[tracking_url] ) keyword[if] keyword[not] identifier[submitted] : identifier[self] . identifier[_outputs] [ literal[string] ]. identifier[parent] . identifier[touch] () keyword[if] literal[string] keyword[in] identifier[self] . identifier[_outputs] : identifier[self] . identifier[_outputs] [ literal[string] ]. identifier[remove] () keyword[try] : identifier[self] . identifier[job_file_factory] = identifier[self] . identifier[create_job_file_factory] () keyword[if] keyword[not] identifier[submitted] : identifier[branches] = identifier[sorted] ( identifier[task] . identifier[branch_map] . identifier[keys] ()) identifier[branch_chunks] = identifier[list] ( identifier[iter_chunks] ( identifier[branches] , identifier[task] . identifier[tasks_per_job] )) identifier[self] . identifier[submission_data] . identifier[unsubmitted_jobs] = identifier[OrderedDict] ( ( identifier[i] + literal[int] , identifier[branches] ) keyword[for] identifier[i] , identifier[branches] keyword[in] identifier[enumerate] ( identifier[branch_chunks] ) ) identifier[self] . identifier[submit] () identifier[post_submit_delay] = identifier[self] . identifier[_get_task_attribute] ( literal[string] )() keyword[if] identifier[post_submit_delay] : identifier[time] . identifier[sleep] ( identifier[post_submit_delay] ) keyword[if] keyword[not] identifier[task] . identifier[no_poll] keyword[or] identifier[submitted] : identifier[self] . identifier[poll] () keyword[finally] : keyword[if] identifier[self] . identifier[job_file_factory] : identifier[self] . identifier[job_file_factory] . identifier[cleanup_dir] ( identifier[force] = keyword[False] )
def run(self): """ Actual run method that starts the processing of jobs and initiates the status polling, or performs job cancelling or cleaning, depending on the task parameters. """ task = self.task self._outputs = self.output() # create the job dashboard interface self.dashboard = task.create_job_dashboard() or NoJobDashboard() # read submission data and reset some values submitted = not task.ignore_submission and self._outputs['submission'].exists() if submitted: self.submission_data.update(self._outputs['submission'].load(formatter='json')) task.tasks_per_job = self.submission_data.tasks_per_job self.dashboard.apply_config(self.submission_data.dashboard_config) # depends on [control=['if'], data=[]] # when the branch outputs, i.e. the "collection" exists, just create dummy control outputs if 'collection' in self._outputs and self._outputs['collection'].exists(): self.touch_control_outputs() # depends on [control=['if'], data=[]] # cancel jobs? elif self._cancel_jobs: if submitted: self.cancel() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # cleanup jobs? elif self._cleanup_jobs: if submitted: self.cleanup() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # submit and/or wait while polling # maybe set a tracking url tracking_url = self.dashboard.create_tracking_url() if tracking_url: task.set_tracking_url(tracking_url) # depends on [control=['if'], data=[]] # ensure the output directory exists if not submitted: self._outputs['submission'].parent.touch() # depends on [control=['if'], data=[]] # at this point, when the status file exists, it is considered outdated if 'status' in self._outputs: self._outputs['status'].remove() # depends on [control=['if'], data=[]] try: # instantiate the configured job file factory, not kwargs yet self.job_file_factory = self.create_job_file_factory() # submit if not submitted: # set the initial list of unsubmitted jobs branches = sorted(task.branch_map.keys()) branch_chunks = list(iter_chunks(branches, task.tasks_per_job)) self.submission_data.unsubmitted_jobs = OrderedDict(((i + 1, branches) for (i, branches) in enumerate(branch_chunks))) self.submit() # sleep once to give the job interface time to register the jobs post_submit_delay = self._get_task_attribute('post_submit_delay')() if post_submit_delay: time.sleep(post_submit_delay) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # start status polling when a) no_poll is not set, or b) the jobs were already # submitted so that failed jobs are resubmitted after a single polling iteration if not task.no_poll or submitted: self.poll() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: # in any event, cleanup the job file if self.job_file_factory: self.job_file_factory.cleanup_dir(force=False) # depends on [control=['if'], data=[]]