code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def deserialize(cls, value): """ Creates a new Node instance via a JSON map string. Note that `port` and `ip` and are required keys for the JSON map, `peer` and `host` are optional. If `peer` is not present, the new Node instance will use the current peer. If `host` is not present, the hostname of the given `ip` is looked up. """ if getattr(value, "decode", None): value = value.decode() logger.debug("Deserializing node data: '%s'", value) parsed = json.loads(value) if "port" not in parsed: raise ValueError("No port defined for node.") if "ip" not in parsed: raise ValueError("No IP address defined for node.") if "host" not in parsed: host, aliases, ip_list = socket.gethostbyaddr(parsed["ip"]) parsed["host"] = socket.get_fqdn(host) if "peer" in parsed: peer = Peer.deserialize(parsed["peer"]) else: peer = None return cls( parsed["host"], parsed["ip"], parsed["port"], peer=peer, metadata=parsed.get("metadata") )
def function[deserialize, parameter[cls, value]]: constant[ Creates a new Node instance via a JSON map string. Note that `port` and `ip` and are required keys for the JSON map, `peer` and `host` are optional. If `peer` is not present, the new Node instance will use the current peer. If `host` is not present, the hostname of the given `ip` is looked up. ] if call[name[getattr], parameter[name[value], constant[decode], constant[None]]] begin[:] variable[value] assign[=] call[name[value].decode, parameter[]] call[name[logger].debug, parameter[constant[Deserializing node data: '%s'], name[value]]] variable[parsed] assign[=] call[name[json].loads, parameter[name[value]]] if compare[constant[port] <ast.NotIn object at 0x7da2590d7190> name[parsed]] begin[:] <ast.Raise object at 0x7da2049621d0> if compare[constant[ip] <ast.NotIn object at 0x7da2590d7190> name[parsed]] begin[:] <ast.Raise object at 0x7da2049610f0> if compare[constant[host] <ast.NotIn object at 0x7da2590d7190> name[parsed]] begin[:] <ast.Tuple object at 0x7da204961120> assign[=] call[name[socket].gethostbyaddr, parameter[call[name[parsed]][constant[ip]]]] call[name[parsed]][constant[host]] assign[=] call[name[socket].get_fqdn, parameter[name[host]]] if compare[constant[peer] in name[parsed]] begin[:] variable[peer] assign[=] call[name[Peer].deserialize, parameter[call[name[parsed]][constant[peer]]]] return[call[name[cls], parameter[call[name[parsed]][constant[host]], call[name[parsed]][constant[ip]], call[name[parsed]][constant[port]]]]]
keyword[def] identifier[deserialize] ( identifier[cls] , identifier[value] ): literal[string] keyword[if] identifier[getattr] ( identifier[value] , literal[string] , keyword[None] ): identifier[value] = identifier[value] . identifier[decode] () identifier[logger] . identifier[debug] ( literal[string] , identifier[value] ) identifier[parsed] = identifier[json] . identifier[loads] ( identifier[value] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[parsed] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[parsed] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[parsed] : identifier[host] , identifier[aliases] , identifier[ip_list] = identifier[socket] . identifier[gethostbyaddr] ( identifier[parsed] [ literal[string] ]) identifier[parsed] [ literal[string] ]= identifier[socket] . identifier[get_fqdn] ( identifier[host] ) keyword[if] literal[string] keyword[in] identifier[parsed] : identifier[peer] = identifier[Peer] . identifier[deserialize] ( identifier[parsed] [ literal[string] ]) keyword[else] : identifier[peer] = keyword[None] keyword[return] identifier[cls] ( identifier[parsed] [ literal[string] ], identifier[parsed] [ literal[string] ], identifier[parsed] [ literal[string] ], identifier[peer] = identifier[peer] , identifier[metadata] = identifier[parsed] . identifier[get] ( literal[string] ) )
def deserialize(cls, value): """ Creates a new Node instance via a JSON map string. Note that `port` and `ip` and are required keys for the JSON map, `peer` and `host` are optional. If `peer` is not present, the new Node instance will use the current peer. If `host` is not present, the hostname of the given `ip` is looked up. """ if getattr(value, 'decode', None): value = value.decode() # depends on [control=['if'], data=[]] logger.debug("Deserializing node data: '%s'", value) parsed = json.loads(value) if 'port' not in parsed: raise ValueError('No port defined for node.') # depends on [control=['if'], data=[]] if 'ip' not in parsed: raise ValueError('No IP address defined for node.') # depends on [control=['if'], data=[]] if 'host' not in parsed: (host, aliases, ip_list) = socket.gethostbyaddr(parsed['ip']) parsed['host'] = socket.get_fqdn(host) # depends on [control=['if'], data=['parsed']] if 'peer' in parsed: peer = Peer.deserialize(parsed['peer']) # depends on [control=['if'], data=['parsed']] else: peer = None return cls(parsed['host'], parsed['ip'], parsed['port'], peer=peer, metadata=parsed.get('metadata'))
def camel_to_underscore(name): """Convert camel case name to underscore name. Examples:: >>> camel_to_underscore('HttpRequest') 'http_request' >>> camel_to_underscore('httpRequest') 'http_request' >>> camel_to_underscore('HTTPRequest') 'http_request' >>> camel_to_underscore('myHTTPRequest') 'my_http_request' >>> camel_to_underscore('MyHTTPRequest') 'my_http_request' >>> camel_to_underscore('my_http_request') 'my_http_request' >>> camel_to_underscore('MyHTTPRequestXYZ') 'my_http_request_xyz' >>> camel_to_underscore('_HTTPRequest') '_http_request' >>> camel_to_underscore('Request') 'request' >>> camel_to_underscore('REQUEST') 'request' >>> camel_to_underscore('_Request') '_request' >>> camel_to_underscore('__Request') '__request' >>> camel_to_underscore('_request') '_request' >>> camel_to_underscore('Request_') 'request_' """ name = re.sub(r'(?<!\b)(?<!_)([A-Z][a-z])', r'_\1', name) name = re.sub(r'(?<!\b)(?<!_)([a-z])([A-Z])', r'\1_\2', name) name = name.lower() return name
def function[camel_to_underscore, parameter[name]]: constant[Convert camel case name to underscore name. Examples:: >>> camel_to_underscore('HttpRequest') 'http_request' >>> camel_to_underscore('httpRequest') 'http_request' >>> camel_to_underscore('HTTPRequest') 'http_request' >>> camel_to_underscore('myHTTPRequest') 'my_http_request' >>> camel_to_underscore('MyHTTPRequest') 'my_http_request' >>> camel_to_underscore('my_http_request') 'my_http_request' >>> camel_to_underscore('MyHTTPRequestXYZ') 'my_http_request_xyz' >>> camel_to_underscore('_HTTPRequest') '_http_request' >>> camel_to_underscore('Request') 'request' >>> camel_to_underscore('REQUEST') 'request' >>> camel_to_underscore('_Request') '_request' >>> camel_to_underscore('__Request') '__request' >>> camel_to_underscore('_request') '_request' >>> camel_to_underscore('Request_') 'request_' ] variable[name] assign[=] call[name[re].sub, parameter[constant[(?<!\b)(?<!_)([A-Z][a-z])], constant[_\1], name[name]]] variable[name] assign[=] call[name[re].sub, parameter[constant[(?<!\b)(?<!_)([a-z])([A-Z])], constant[\1_\2], name[name]]] variable[name] assign[=] call[name[name].lower, parameter[]] return[name[name]]
keyword[def] identifier[camel_to_underscore] ( identifier[name] ): literal[string] identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] ) identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] ) identifier[name] = identifier[name] . identifier[lower] () keyword[return] identifier[name]
def camel_to_underscore(name): """Convert camel case name to underscore name. Examples:: >>> camel_to_underscore('HttpRequest') 'http_request' >>> camel_to_underscore('httpRequest') 'http_request' >>> camel_to_underscore('HTTPRequest') 'http_request' >>> camel_to_underscore('myHTTPRequest') 'my_http_request' >>> camel_to_underscore('MyHTTPRequest') 'my_http_request' >>> camel_to_underscore('my_http_request') 'my_http_request' >>> camel_to_underscore('MyHTTPRequestXYZ') 'my_http_request_xyz' >>> camel_to_underscore('_HTTPRequest') '_http_request' >>> camel_to_underscore('Request') 'request' >>> camel_to_underscore('REQUEST') 'request' >>> camel_to_underscore('_Request') '_request' >>> camel_to_underscore('__Request') '__request' >>> camel_to_underscore('_request') '_request' >>> camel_to_underscore('Request_') 'request_' """ name = re.sub('(?<!\\b)(?<!_)([A-Z][a-z])', '_\\1', name) name = re.sub('(?<!\\b)(?<!_)([a-z])([A-Z])', '\\1_\\2', name) name = name.lower() return name
def persist(name, value, config=None): ''' Assign and persist a simple sysctl parameter for this minion. If ``config`` is not specified, a sensible default will be chosen using :mod:`sysctl.default_config <salt.modules.linux_sysctl.default_config>`. CLI Example: .. code-block:: bash salt '*' sysctl.persist net.ipv4.ip_forward 1 ''' if config is None: config = default_config() edited = False # If the sysctl.conf is not present, add it if not os.path.isfile(config): sysctl_dir = os.path.dirname(config) if not os.path.exists(sysctl_dir): os.makedirs(sysctl_dir) try: with salt.utils.files.fopen(config, 'w+') as _fh: _fh.write('#\n# Kernel sysctl configuration\n#\n') except (IOError, OSError): msg = 'Could not write to file: {0}' raise CommandExecutionError(msg.format(config)) # Read the existing sysctl.conf nlines = [] try: with salt.utils.files.fopen(config, 'r') as _fh: # Use readlines because this should be a small file # and it seems unnecessary to indent the below for # loop since it is a fairly large block of code. config_data = salt.utils.data.decode(_fh.readlines()) except (IOError, OSError): msg = 'Could not read from file: {0}' raise CommandExecutionError(msg.format(config)) for line in config_data: if line.startswith('#'): nlines.append(line) continue if '=' not in line: nlines.append(line) continue # Strip trailing whitespace and split the k,v comps = [i.strip() for i in line.split('=', 1)] # On Linux procfs, files such as /proc/sys/net/ipv4/tcp_rmem or any # other sysctl with whitespace in it consistently uses 1 tab. Lets # allow our users to put a space or tab between multi-value sysctls # and have salt not try to set it every single time. if isinstance(comps[1], string_types) and ' ' in comps[1]: comps[1] = re.sub(r'\s+', '\t', comps[1]) # Do the same thing for the value 'just in case' if isinstance(value, string_types) and ' ' in value: value = re.sub(r'\s+', '\t', value) if len(comps) < 2: nlines.append(line) continue if name == comps[0]: # This is the line to edit if six.text_type(comps[1]) == six.text_type(value): # It is correct in the config, check if it is correct in /proc if six.text_type(get(name)) != six.text_type(value): assign(name, value) return 'Updated' else: return 'Already set' nlines.append('{0} = {1}\n'.format(name, value)) edited = True continue else: nlines.append(line) if not edited: nlines.append('{0} = {1}\n'.format(name, value)) try: with salt.utils.files.fopen(config, 'wb') as _fh: _fh.writelines(salt.utils.data.encode(nlines)) except (IOError, OSError): msg = 'Could not write to file: {0}' raise CommandExecutionError(msg.format(config)) assign(name, value) return 'Updated'
def function[persist, parameter[name, value, config]]: constant[ Assign and persist a simple sysctl parameter for this minion. If ``config`` is not specified, a sensible default will be chosen using :mod:`sysctl.default_config <salt.modules.linux_sysctl.default_config>`. CLI Example: .. code-block:: bash salt '*' sysctl.persist net.ipv4.ip_forward 1 ] if compare[name[config] is constant[None]] begin[:] variable[config] assign[=] call[name[default_config], parameter[]] variable[edited] assign[=] constant[False] if <ast.UnaryOp object at 0x7da18ede5930> begin[:] variable[sysctl_dir] assign[=] call[name[os].path.dirname, parameter[name[config]]] if <ast.UnaryOp object at 0x7da18ede4700> begin[:] call[name[os].makedirs, parameter[name[sysctl_dir]]] <ast.Try object at 0x7da18ede7610> variable[nlines] assign[=] list[[]] <ast.Try object at 0x7da18ede5cc0> for taget[name[line]] in starred[name[config_data]] begin[:] if call[name[line].startswith, parameter[constant[#]]] begin[:] call[name[nlines].append, parameter[name[line]]] continue if compare[constant[=] <ast.NotIn object at 0x7da2590d7190> name[line]] begin[:] call[name[nlines].append, parameter[name[line]]] continue variable[comps] assign[=] <ast.ListComp object at 0x7da1b1f492d0> if <ast.BoolOp object at 0x7da1b1f49420> begin[:] call[name[comps]][constant[1]] assign[=] call[name[re].sub, parameter[constant[\s+], constant[ ], call[name[comps]][constant[1]]]] if <ast.BoolOp object at 0x7da1b1f48e20> begin[:] variable[value] assign[=] call[name[re].sub, parameter[constant[\s+], constant[ ], name[value]]] if compare[call[name[len], parameter[name[comps]]] less[<] constant[2]] begin[:] call[name[nlines].append, parameter[name[line]]] continue if compare[name[name] equal[==] call[name[comps]][constant[0]]] begin[:] if compare[call[name[six].text_type, parameter[call[name[comps]][constant[1]]]] equal[==] call[name[six].text_type, parameter[name[value]]]] begin[:] if compare[call[name[six].text_type, parameter[call[name[get], parameter[name[name]]]]] not_equal[!=] call[name[six].text_type, parameter[name[value]]]] begin[:] call[name[assign], parameter[name[name], name[value]]] return[constant[Updated]] call[name[nlines].append, parameter[call[constant[{0} = {1} ].format, parameter[name[name], name[value]]]]] variable[edited] assign[=] constant[True] continue if <ast.UnaryOp object at 0x7da18ede57e0> begin[:] call[name[nlines].append, parameter[call[constant[{0} = {1} ].format, parameter[name[name], name[value]]]]] <ast.Try object at 0x7da18ede77c0> call[name[assign], parameter[name[name], name[value]]] return[constant[Updated]]
keyword[def] identifier[persist] ( identifier[name] , identifier[value] , identifier[config] = keyword[None] ): literal[string] keyword[if] identifier[config] keyword[is] keyword[None] : identifier[config] = identifier[default_config] () identifier[edited] = keyword[False] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[config] ): identifier[sysctl_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[config] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[sysctl_dir] ): identifier[os] . identifier[makedirs] ( identifier[sysctl_dir] ) keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[config] , literal[string] ) keyword[as] identifier[_fh] : identifier[_fh] . identifier[write] ( literal[string] ) keyword[except] ( identifier[IOError] , identifier[OSError] ): identifier[msg] = literal[string] keyword[raise] identifier[CommandExecutionError] ( identifier[msg] . identifier[format] ( identifier[config] )) identifier[nlines] =[] keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[config] , literal[string] ) keyword[as] identifier[_fh] : identifier[config_data] = identifier[salt] . identifier[utils] . identifier[data] . identifier[decode] ( identifier[_fh] . identifier[readlines] ()) keyword[except] ( identifier[IOError] , identifier[OSError] ): identifier[msg] = literal[string] keyword[raise] identifier[CommandExecutionError] ( identifier[msg] . identifier[format] ( identifier[config] )) keyword[for] identifier[line] keyword[in] identifier[config_data] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[nlines] . identifier[append] ( identifier[line] ) keyword[continue] keyword[if] literal[string] keyword[not] keyword[in] identifier[line] : identifier[nlines] . identifier[append] ( identifier[line] ) keyword[continue] identifier[comps] =[ identifier[i] . identifier[strip] () keyword[for] identifier[i] keyword[in] identifier[line] . identifier[split] ( literal[string] , literal[int] )] keyword[if] identifier[isinstance] ( identifier[comps] [ literal[int] ], identifier[string_types] ) keyword[and] literal[string] keyword[in] identifier[comps] [ literal[int] ]: identifier[comps] [ literal[int] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[comps] [ literal[int] ]) keyword[if] identifier[isinstance] ( identifier[value] , identifier[string_types] ) keyword[and] literal[string] keyword[in] identifier[value] : identifier[value] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[value] ) keyword[if] identifier[len] ( identifier[comps] )< literal[int] : identifier[nlines] . identifier[append] ( identifier[line] ) keyword[continue] keyword[if] identifier[name] == identifier[comps] [ literal[int] ]: keyword[if] identifier[six] . identifier[text_type] ( identifier[comps] [ literal[int] ])== identifier[six] . identifier[text_type] ( identifier[value] ): keyword[if] identifier[six] . identifier[text_type] ( identifier[get] ( identifier[name] ))!= identifier[six] . identifier[text_type] ( identifier[value] ): identifier[assign] ( identifier[name] , identifier[value] ) keyword[return] literal[string] keyword[else] : keyword[return] literal[string] identifier[nlines] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] , identifier[value] )) identifier[edited] = keyword[True] keyword[continue] keyword[else] : identifier[nlines] . identifier[append] ( identifier[line] ) keyword[if] keyword[not] identifier[edited] : identifier[nlines] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] , identifier[value] )) keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[config] , literal[string] ) keyword[as] identifier[_fh] : identifier[_fh] . identifier[writelines] ( identifier[salt] . identifier[utils] . identifier[data] . identifier[encode] ( identifier[nlines] )) keyword[except] ( identifier[IOError] , identifier[OSError] ): identifier[msg] = literal[string] keyword[raise] identifier[CommandExecutionError] ( identifier[msg] . identifier[format] ( identifier[config] )) identifier[assign] ( identifier[name] , identifier[value] ) keyword[return] literal[string]
def persist(name, value, config=None): """ Assign and persist a simple sysctl parameter for this minion. If ``config`` is not specified, a sensible default will be chosen using :mod:`sysctl.default_config <salt.modules.linux_sysctl.default_config>`. CLI Example: .. code-block:: bash salt '*' sysctl.persist net.ipv4.ip_forward 1 """ if config is None: config = default_config() # depends on [control=['if'], data=['config']] edited = False # If the sysctl.conf is not present, add it if not os.path.isfile(config): sysctl_dir = os.path.dirname(config) if not os.path.exists(sysctl_dir): os.makedirs(sysctl_dir) # depends on [control=['if'], data=[]] try: with salt.utils.files.fopen(config, 'w+') as _fh: _fh.write('#\n# Kernel sysctl configuration\n#\n') # depends on [control=['with'], data=['_fh']] # depends on [control=['try'], data=[]] except (IOError, OSError): msg = 'Could not write to file: {0}' raise CommandExecutionError(msg.format(config)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Read the existing sysctl.conf nlines = [] try: with salt.utils.files.fopen(config, 'r') as _fh: # Use readlines because this should be a small file # and it seems unnecessary to indent the below for # loop since it is a fairly large block of code. config_data = salt.utils.data.decode(_fh.readlines()) # depends on [control=['with'], data=['_fh']] # depends on [control=['try'], data=[]] except (IOError, OSError): msg = 'Could not read from file: {0}' raise CommandExecutionError(msg.format(config)) # depends on [control=['except'], data=[]] for line in config_data: if line.startswith('#'): nlines.append(line) continue # depends on [control=['if'], data=[]] if '=' not in line: nlines.append(line) continue # depends on [control=['if'], data=['line']] # Strip trailing whitespace and split the k,v comps = [i.strip() for i in line.split('=', 1)] # On Linux procfs, files such as /proc/sys/net/ipv4/tcp_rmem or any # other sysctl with whitespace in it consistently uses 1 tab. Lets # allow our users to put a space or tab between multi-value sysctls # and have salt not try to set it every single time. if isinstance(comps[1], string_types) and ' ' in comps[1]: comps[1] = re.sub('\\s+', '\t', comps[1]) # depends on [control=['if'], data=[]] # Do the same thing for the value 'just in case' if isinstance(value, string_types) and ' ' in value: value = re.sub('\\s+', '\t', value) # depends on [control=['if'], data=[]] if len(comps) < 2: nlines.append(line) continue # depends on [control=['if'], data=[]] if name == comps[0]: # This is the line to edit if six.text_type(comps[1]) == six.text_type(value): # It is correct in the config, check if it is correct in /proc if six.text_type(get(name)) != six.text_type(value): assign(name, value) return 'Updated' # depends on [control=['if'], data=[]] else: return 'Already set' # depends on [control=['if'], data=[]] nlines.append('{0} = {1}\n'.format(name, value)) edited = True continue # depends on [control=['if'], data=['name']] else: nlines.append(line) # depends on [control=['for'], data=['line']] if not edited: nlines.append('{0} = {1}\n'.format(name, value)) # depends on [control=['if'], data=[]] try: with salt.utils.files.fopen(config, 'wb') as _fh: _fh.writelines(salt.utils.data.encode(nlines)) # depends on [control=['with'], data=['_fh']] # depends on [control=['try'], data=[]] except (IOError, OSError): msg = 'Could not write to file: {0}' raise CommandExecutionError(msg.format(config)) # depends on [control=['except'], data=[]] assign(name, value) return 'Updated'
def autosummary_table_visit_html(self, node): """Make the first column of the table non-breaking.""" try: tbody = node[0][0][-1] for row in tbody: col1_entry = row[0] par = col1_entry[0] for j, subnode in enumerate(list(par)): if isinstance(subnode, nodes.Text): new_text = unicode(subnode.astext()) new_text = new_text.replace(u" ", u"\u00a0") par[j] = nodes.Text(new_text) except IndexError: pass
def function[autosummary_table_visit_html, parameter[self, node]]: constant[Make the first column of the table non-breaking.] <ast.Try object at 0x7da20c6abf10>
keyword[def] identifier[autosummary_table_visit_html] ( identifier[self] , identifier[node] ): literal[string] keyword[try] : identifier[tbody] = identifier[node] [ literal[int] ][ literal[int] ][- literal[int] ] keyword[for] identifier[row] keyword[in] identifier[tbody] : identifier[col1_entry] = identifier[row] [ literal[int] ] identifier[par] = identifier[col1_entry] [ literal[int] ] keyword[for] identifier[j] , identifier[subnode] keyword[in] identifier[enumerate] ( identifier[list] ( identifier[par] )): keyword[if] identifier[isinstance] ( identifier[subnode] , identifier[nodes] . identifier[Text] ): identifier[new_text] = identifier[unicode] ( identifier[subnode] . identifier[astext] ()) identifier[new_text] = identifier[new_text] . identifier[replace] ( literal[string] , literal[string] ) identifier[par] [ identifier[j] ]= identifier[nodes] . identifier[Text] ( identifier[new_text] ) keyword[except] identifier[IndexError] : keyword[pass]
def autosummary_table_visit_html(self, node): """Make the first column of the table non-breaking.""" try: tbody = node[0][0][-1] for row in tbody: col1_entry = row[0] par = col1_entry[0] for (j, subnode) in enumerate(list(par)): if isinstance(subnode, nodes.Text): new_text = unicode(subnode.astext()) new_text = new_text.replace(u' ', u'\xa0') par[j] = nodes.Text(new_text) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['row']] # depends on [control=['try'], data=[]] except IndexError: pass # depends on [control=['except'], data=[]]
def get_filters(self): """ Coroutine based filters for render pipeline. """ return [ self.compute_style_filter, self.render_filter, self.calc_widths_filter, self.format_row_filter, self.align_rows_filter, ]
def function[get_filters, parameter[self]]: constant[ Coroutine based filters for render pipeline. ] return[list[[<ast.Attribute object at 0x7da207f98790>, <ast.Attribute object at 0x7da207f9ab30>, <ast.Attribute object at 0x7da207f98400>, <ast.Attribute object at 0x7da207f991e0>, <ast.Attribute object at 0x7da207f98c10>]]]
keyword[def] identifier[get_filters] ( identifier[self] ): literal[string] keyword[return] [ identifier[self] . identifier[compute_style_filter] , identifier[self] . identifier[render_filter] , identifier[self] . identifier[calc_widths_filter] , identifier[self] . identifier[format_row_filter] , identifier[self] . identifier[align_rows_filter] , ]
def get_filters(self): """ Coroutine based filters for render pipeline. """ return [self.compute_style_filter, self.render_filter, self.calc_widths_filter, self.format_row_filter, self.align_rows_filter]
def constant_pad(X, multiple_of, up_down_rule='even', left_right_rule='even', pad_value=0): """Function pads an image of shape (rows, columns, channels) with zeros. It pads an image so that the shape becomes (rows + padded_rows, columns + padded_columns, channels), where padded_rows = (int(rows/multiple_of[0]) + 1) * multiple_of[0] - rows Same rule is applied to columns. :type X: array of shape (rows, columns, channels) or (rows, columns) :param multiple_of: make X' rows and columns multiple of this tuple :type multiple_of: tuple (rows, columns) :param up_down_rule: Add padded rows evenly to the top/bottom of the image, or up (top) / down (bottom) only :type up_down_rule: up_down_rule: string, (even, up, down) :param up_down_rule: Add padded columns evenly to the left/right of the image, or left / right only :type up_down_rule: up_down_rule: string, (even, left, right) :param pad_value: Value to be assigned to padded rows and columns :type pad_value: int """ # pylint: disable=invalid-name shape = X.shape row_padding, col_padding = 0, 0 if shape[0] % multiple_of[0]: row_padding = (int(shape[0] / multiple_of[0]) + 1) * multiple_of[0] - shape[0] if shape[1] % multiple_of[1]: col_padding = (int(shape[1] / multiple_of[1]) + 1) * multiple_of[1] - shape[1] row_padding_up, row_padding_down, col_padding_left, col_padding_right = 0, 0, 0, 0 if row_padding > 0: if up_down_rule == 'up': row_padding_up = row_padding elif up_down_rule == 'down': row_padding_down = row_padding elif up_down_rule == 'even': row_padding_up = int(row_padding / 2) row_padding_down = row_padding_up + (row_padding % 2) else: raise ValueError('Padding rule for rows not supported. Choose beteen even, down or up!') if col_padding > 0: if left_right_rule == 'left': col_padding_left = col_padding elif left_right_rule == 'right': col_padding_right = col_padding elif left_right_rule == 'even': col_padding_left = int(col_padding / 2) col_padding_right = col_padding_left + (col_padding % 2) else: raise ValueError('Padding rule for columns not supported. Choose beteen even, left or right!') return np.lib.pad(X, ((row_padding_up, row_padding_down), (col_padding_left, col_padding_right)), 'constant', constant_values=((pad_value, pad_value), (pad_value, pad_value)))
def function[constant_pad, parameter[X, multiple_of, up_down_rule, left_right_rule, pad_value]]: constant[Function pads an image of shape (rows, columns, channels) with zeros. It pads an image so that the shape becomes (rows + padded_rows, columns + padded_columns, channels), where padded_rows = (int(rows/multiple_of[0]) + 1) * multiple_of[0] - rows Same rule is applied to columns. :type X: array of shape (rows, columns, channels) or (rows, columns) :param multiple_of: make X' rows and columns multiple of this tuple :type multiple_of: tuple (rows, columns) :param up_down_rule: Add padded rows evenly to the top/bottom of the image, or up (top) / down (bottom) only :type up_down_rule: up_down_rule: string, (even, up, down) :param up_down_rule: Add padded columns evenly to the left/right of the image, or left / right only :type up_down_rule: up_down_rule: string, (even, left, right) :param pad_value: Value to be assigned to padded rows and columns :type pad_value: int ] variable[shape] assign[=] name[X].shape <ast.Tuple object at 0x7da18f00e530> assign[=] tuple[[<ast.Constant object at 0x7da18f00d330>, <ast.Constant object at 0x7da18f00c5b0>]] if binary_operation[call[name[shape]][constant[0]] <ast.Mod object at 0x7da2590d6920> call[name[multiple_of]][constant[0]]] begin[:] variable[row_padding] assign[=] binary_operation[binary_operation[binary_operation[call[name[int], parameter[binary_operation[call[name[shape]][constant[0]] / call[name[multiple_of]][constant[0]]]]] + constant[1]] * call[name[multiple_of]][constant[0]]] - call[name[shape]][constant[0]]] if binary_operation[call[name[shape]][constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[multiple_of]][constant[1]]] begin[:] variable[col_padding] assign[=] binary_operation[binary_operation[binary_operation[call[name[int], parameter[binary_operation[call[name[shape]][constant[1]] / call[name[multiple_of]][constant[1]]]]] + constant[1]] * call[name[multiple_of]][constant[1]]] - call[name[shape]][constant[1]]] <ast.Tuple object at 0x7da18f00dea0> assign[=] tuple[[<ast.Constant object at 0x7da18f00d1b0>, <ast.Constant object at 0x7da18f00e1a0>, <ast.Constant object at 0x7da18f00c8b0>, <ast.Constant object at 0x7da18f00e170>]] if compare[name[row_padding] greater[>] constant[0]] begin[:] if compare[name[up_down_rule] equal[==] constant[up]] begin[:] variable[row_padding_up] assign[=] name[row_padding] if compare[name[col_padding] greater[>] constant[0]] begin[:] if compare[name[left_right_rule] equal[==] constant[left]] begin[:] variable[col_padding_left] assign[=] name[col_padding] return[call[name[np].lib.pad, parameter[name[X], tuple[[<ast.Tuple object at 0x7da18f00dc90>, <ast.Tuple object at 0x7da18f00fa90>]], constant[constant]]]]
keyword[def] identifier[constant_pad] ( identifier[X] , identifier[multiple_of] , identifier[up_down_rule] = literal[string] , identifier[left_right_rule] = literal[string] , identifier[pad_value] = literal[int] ): literal[string] identifier[shape] = identifier[X] . identifier[shape] identifier[row_padding] , identifier[col_padding] = literal[int] , literal[int] keyword[if] identifier[shape] [ literal[int] ]% identifier[multiple_of] [ literal[int] ]: identifier[row_padding] =( identifier[int] ( identifier[shape] [ literal[int] ]/ identifier[multiple_of] [ literal[int] ])+ literal[int] )* identifier[multiple_of] [ literal[int] ]- identifier[shape] [ literal[int] ] keyword[if] identifier[shape] [ literal[int] ]% identifier[multiple_of] [ literal[int] ]: identifier[col_padding] =( identifier[int] ( identifier[shape] [ literal[int] ]/ identifier[multiple_of] [ literal[int] ])+ literal[int] )* identifier[multiple_of] [ literal[int] ]- identifier[shape] [ literal[int] ] identifier[row_padding_up] , identifier[row_padding_down] , identifier[col_padding_left] , identifier[col_padding_right] = literal[int] , literal[int] , literal[int] , literal[int] keyword[if] identifier[row_padding] > literal[int] : keyword[if] identifier[up_down_rule] == literal[string] : identifier[row_padding_up] = identifier[row_padding] keyword[elif] identifier[up_down_rule] == literal[string] : identifier[row_padding_down] = identifier[row_padding] keyword[elif] identifier[up_down_rule] == literal[string] : identifier[row_padding_up] = identifier[int] ( identifier[row_padding] / literal[int] ) identifier[row_padding_down] = identifier[row_padding_up] +( identifier[row_padding] % literal[int] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[col_padding] > literal[int] : keyword[if] identifier[left_right_rule] == literal[string] : identifier[col_padding_left] = identifier[col_padding] keyword[elif] identifier[left_right_rule] == literal[string] : identifier[col_padding_right] = identifier[col_padding] keyword[elif] identifier[left_right_rule] == literal[string] : identifier[col_padding_left] = identifier[int] ( identifier[col_padding] / literal[int] ) identifier[col_padding_right] = identifier[col_padding_left] +( identifier[col_padding] % literal[int] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[np] . identifier[lib] . identifier[pad] ( identifier[X] ,(( identifier[row_padding_up] , identifier[row_padding_down] ),( identifier[col_padding_left] , identifier[col_padding_right] )), literal[string] , identifier[constant_values] =(( identifier[pad_value] , identifier[pad_value] ),( identifier[pad_value] , identifier[pad_value] )))
def constant_pad(X, multiple_of, up_down_rule='even', left_right_rule='even', pad_value=0): """Function pads an image of shape (rows, columns, channels) with zeros. It pads an image so that the shape becomes (rows + padded_rows, columns + padded_columns, channels), where padded_rows = (int(rows/multiple_of[0]) + 1) * multiple_of[0] - rows Same rule is applied to columns. :type X: array of shape (rows, columns, channels) or (rows, columns) :param multiple_of: make X' rows and columns multiple of this tuple :type multiple_of: tuple (rows, columns) :param up_down_rule: Add padded rows evenly to the top/bottom of the image, or up (top) / down (bottom) only :type up_down_rule: up_down_rule: string, (even, up, down) :param up_down_rule: Add padded columns evenly to the left/right of the image, or left / right only :type up_down_rule: up_down_rule: string, (even, left, right) :param pad_value: Value to be assigned to padded rows and columns :type pad_value: int """ # pylint: disable=invalid-name shape = X.shape (row_padding, col_padding) = (0, 0) if shape[0] % multiple_of[0]: row_padding = (int(shape[0] / multiple_of[0]) + 1) * multiple_of[0] - shape[0] # depends on [control=['if'], data=[]] if shape[1] % multiple_of[1]: col_padding = (int(shape[1] / multiple_of[1]) + 1) * multiple_of[1] - shape[1] # depends on [control=['if'], data=[]] (row_padding_up, row_padding_down, col_padding_left, col_padding_right) = (0, 0, 0, 0) if row_padding > 0: if up_down_rule == 'up': row_padding_up = row_padding # depends on [control=['if'], data=[]] elif up_down_rule == 'down': row_padding_down = row_padding # depends on [control=['if'], data=[]] elif up_down_rule == 'even': row_padding_up = int(row_padding / 2) row_padding_down = row_padding_up + row_padding % 2 # depends on [control=['if'], data=[]] else: raise ValueError('Padding rule for rows not supported. Choose beteen even, down or up!') # depends on [control=['if'], data=['row_padding']] if col_padding > 0: if left_right_rule == 'left': col_padding_left = col_padding # depends on [control=['if'], data=[]] elif left_right_rule == 'right': col_padding_right = col_padding # depends on [control=['if'], data=[]] elif left_right_rule == 'even': col_padding_left = int(col_padding / 2) col_padding_right = col_padding_left + col_padding % 2 # depends on [control=['if'], data=[]] else: raise ValueError('Padding rule for columns not supported. Choose beteen even, left or right!') # depends on [control=['if'], data=['col_padding']] return np.lib.pad(X, ((row_padding_up, row_padding_down), (col_padding_left, col_padding_right)), 'constant', constant_values=((pad_value, pad_value), (pad_value, pad_value)))
def GetBatchJobHelper(self, version=sorted(_SERVICE_MAP.keys())[-1], server=None): """Returns a BatchJobHelper to work with the BatchJobService. This is a convenience method. It is functionally identical to calling BatchJobHelper(adwords_client, version). Args: [optional] version: A string identifying the AdWords version to connect to. This defaults to what is currently the latest version. This will be updated in future releases to point to what is then the latest version. server: A string identifying the webserver hosting the AdWords API. Returns: An initialized BatchJobHelper tied to this client. """ if not server: server = _DEFAULT_ENDPOINT request_builder = BatchJobHelper.GetRequestBuilder( self, version=version, server=server) response_parser = BatchJobHelper.GetResponseParser() return BatchJobHelper(request_builder, response_parser)
def function[GetBatchJobHelper, parameter[self, version, server]]: constant[Returns a BatchJobHelper to work with the BatchJobService. This is a convenience method. It is functionally identical to calling BatchJobHelper(adwords_client, version). Args: [optional] version: A string identifying the AdWords version to connect to. This defaults to what is currently the latest version. This will be updated in future releases to point to what is then the latest version. server: A string identifying the webserver hosting the AdWords API. Returns: An initialized BatchJobHelper tied to this client. ] if <ast.UnaryOp object at 0x7da1b1baed10> begin[:] variable[server] assign[=] name[_DEFAULT_ENDPOINT] variable[request_builder] assign[=] call[name[BatchJobHelper].GetRequestBuilder, parameter[name[self]]] variable[response_parser] assign[=] call[name[BatchJobHelper].GetResponseParser, parameter[]] return[call[name[BatchJobHelper], parameter[name[request_builder], name[response_parser]]]]
keyword[def] identifier[GetBatchJobHelper] ( identifier[self] , identifier[version] = identifier[sorted] ( identifier[_SERVICE_MAP] . identifier[keys] ())[- literal[int] ], identifier[server] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[server] : identifier[server] = identifier[_DEFAULT_ENDPOINT] identifier[request_builder] = identifier[BatchJobHelper] . identifier[GetRequestBuilder] ( identifier[self] , identifier[version] = identifier[version] , identifier[server] = identifier[server] ) identifier[response_parser] = identifier[BatchJobHelper] . identifier[GetResponseParser] () keyword[return] identifier[BatchJobHelper] ( identifier[request_builder] , identifier[response_parser] )
def GetBatchJobHelper(self, version=sorted(_SERVICE_MAP.keys())[-1], server=None): """Returns a BatchJobHelper to work with the BatchJobService. This is a convenience method. It is functionally identical to calling BatchJobHelper(adwords_client, version). Args: [optional] version: A string identifying the AdWords version to connect to. This defaults to what is currently the latest version. This will be updated in future releases to point to what is then the latest version. server: A string identifying the webserver hosting the AdWords API. Returns: An initialized BatchJobHelper tied to this client. """ if not server: server = _DEFAULT_ENDPOINT # depends on [control=['if'], data=[]] request_builder = BatchJobHelper.GetRequestBuilder(self, version=version, server=server) response_parser = BatchJobHelper.GetResponseParser() return BatchJobHelper(request_builder, response_parser)
def create_cvmfs_persistent_volume_claim(cvmfs_volume): """Create CVMFS persistent volume claim.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.\ create_namespaced_persistent_volume_claim( "default", render_cvmfs_pvc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
def function[create_cvmfs_persistent_volume_claim, parameter[cvmfs_volume]]: constant[Create CVMFS persistent volume claim.] from relative_module[kubernetes.client.rest] import module[ApiException] from relative_module[reana_commons.k8s.api_client] import module[current_k8s_corev1_api_client] <ast.Try object at 0x7da1b04019f0>
keyword[def] identifier[create_cvmfs_persistent_volume_claim] ( identifier[cvmfs_volume] ): literal[string] keyword[from] identifier[kubernetes] . identifier[client] . identifier[rest] keyword[import] identifier[ApiException] keyword[from] identifier[reana_commons] . identifier[k8s] . identifier[api_client] keyword[import] identifier[current_k8s_corev1_api_client] keyword[try] : identifier[current_k8s_corev1_api_client] . identifier[create_namespaced_persistent_volume_claim] ( literal[string] , identifier[render_cvmfs_pvc] ( identifier[cvmfs_volume] ) ) keyword[except] identifier[ApiException] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[status] != literal[int] : keyword[raise] identifier[e]
def create_cvmfs_persistent_volume_claim(cvmfs_volume): """Create CVMFS persistent volume claim.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.create_namespaced_persistent_volume_claim('default', render_cvmfs_pvc(cvmfs_volume)) # depends on [control=['try'], data=[]] except ApiException as e: if e.status != 409: raise e # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
def is_all_field_none(self): """ :rtype: bool """ if self._monetary_account_id is not None: return False if self._alias is not None: return False if self._counterparty_alias is not None: return False if self._amount_guaranteed is not None: return False if self._amount_requested is not None: return False if self._expiration is not None: return False if self._issuer is not None: return False if self._issuer_name is not None: return False if self._issuer_authentication_url is not None: return False if self._purchase_identifier is not None: return False if self._status is not None: return False if self._status_timestamp is not None: return False if self._transaction_identifier is not None: return False if self._allow_chat is not None: return False return True
def function[is_all_field_none, parameter[self]]: constant[ :rtype: bool ] if compare[name[self]._monetary_account_id is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._alias is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._counterparty_alias is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._amount_guaranteed is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._amount_requested is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._expiration is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._issuer is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._issuer_name is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._issuer_authentication_url is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._purchase_identifier is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._status is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._status_timestamp is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._transaction_identifier is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._allow_chat is_not constant[None]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_all_field_none] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_monetary_account_id] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_alias] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_counterparty_alias] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_amount_guaranteed] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_amount_requested] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_expiration] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_issuer] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_issuer_name] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_issuer_authentication_url] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_purchase_identifier] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_status] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_status_timestamp] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_transaction_identifier] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_allow_chat] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_all_field_none(self): """ :rtype: bool """ if self._monetary_account_id is not None: return False # depends on [control=['if'], data=[]] if self._alias is not None: return False # depends on [control=['if'], data=[]] if self._counterparty_alias is not None: return False # depends on [control=['if'], data=[]] if self._amount_guaranteed is not None: return False # depends on [control=['if'], data=[]] if self._amount_requested is not None: return False # depends on [control=['if'], data=[]] if self._expiration is not None: return False # depends on [control=['if'], data=[]] if self._issuer is not None: return False # depends on [control=['if'], data=[]] if self._issuer_name is not None: return False # depends on [control=['if'], data=[]] if self._issuer_authentication_url is not None: return False # depends on [control=['if'], data=[]] if self._purchase_identifier is not None: return False # depends on [control=['if'], data=[]] if self._status is not None: return False # depends on [control=['if'], data=[]] if self._status_timestamp is not None: return False # depends on [control=['if'], data=[]] if self._transaction_identifier is not None: return False # depends on [control=['if'], data=[]] if self._allow_chat is not None: return False # depends on [control=['if'], data=[]] return True
def listdir(self, name): """ TODO collect directories """ return [], [obj.filename for obj in cloudstorage.listbucket(self.path(name))]
def function[listdir, parameter[self, name]]: constant[ TODO collect directories ] return[tuple[[<ast.List object at 0x7da20e9b1fc0>, <ast.ListComp object at 0x7da20e9b33d0>]]]
keyword[def] identifier[listdir] ( identifier[self] , identifier[name] ): literal[string] keyword[return] [],[ identifier[obj] . identifier[filename] keyword[for] identifier[obj] keyword[in] identifier[cloudstorage] . identifier[listbucket] ( identifier[self] . identifier[path] ( identifier[name] ))]
def listdir(self, name): """ TODO collect directories """ return ([], [obj.filename for obj in cloudstorage.listbucket(self.path(name))])
def roc(args): """ Calculate ROC_AUC and other metrics and optionally plot ROC curve.""" outputfile = args.outfile # Default extension for image if outputfile and not outputfile.endswith(".png"): outputfile += ".png" motifs = read_motifs(args.pwmfile, fmt="pwm") ids = [] if args.ids: ids = args.ids.split(",") else: ids = [m.id for m in motifs] motifs = [m for m in motifs if (m.id in ids)] stats = [ "phyper_at_fpr", "roc_auc", "pr_auc", "enr_at_fpr", "recall_at_fdr", "roc_values", "matches_at_fpr", ] plot_x = [] plot_y = [] legend = [] f_out = sys.stdout if args.outdir: if not os.path.exists(args.outdir): os.makedirs(args.outdir) f_out = open(args.outdir + "/gimme.roc.report.txt", "w") # Print the metrics f_out.write("Motif\t# matches\t# matches background\tP-value\tlog10 P-value\tROC AUC\tPR AUC\tEnr. at 1% FPR\tRecall at 10% FDR\n") for motif_stats in calc_stats_iterator(motifs, args.sample, args.background, genome=args.genome, stats=stats, ncpus=args.ncpus): for motif in motifs: if str(motif) in motif_stats: if outputfile: x, y = motif_stats[str(motif)]["roc_values"] plot_x.append(x) plot_y.append(y) legend.append(motif.id) log_pvalue = np.inf if motif_stats[str(motif)]["phyper_at_fpr"] > 0: log_pvalue = -np.log10(motif_stats[str(motif)]["phyper_at_fpr"]) f_out.write("{}\t{:d}\t{:d}\t{:.2e}\t{:.3f}\t{:.3f}\t{:.3f}\t{:.2f}\t{:0.4f}\n".format( motif.id, motif_stats[str(motif)]["matches_at_fpr"][0], motif_stats[str(motif)]["matches_at_fpr"][1], motif_stats[str(motif)]["phyper_at_fpr"], log_pvalue, motif_stats[str(motif)]["roc_auc"], motif_stats[str(motif)]["pr_auc"], motif_stats[str(motif)]["enr_at_fpr"], motif_stats[str(motif)]["recall_at_fdr"], )) f_out.close() if args.outdir: html_report( args.outdir, args.outdir + "/gimme.roc.report.txt", args.pwmfile, 0.01, ) # Plot the ROC curve if outputfile: roc_plot(outputfile, plot_x, plot_y, ids=legend)
def function[roc, parameter[args]]: constant[ Calculate ROC_AUC and other metrics and optionally plot ROC curve.] variable[outputfile] assign[=] name[args].outfile if <ast.BoolOp object at 0x7da2044c30d0> begin[:] <ast.AugAssign object at 0x7da2044c25f0> variable[motifs] assign[=] call[name[read_motifs], parameter[name[args].pwmfile]] variable[ids] assign[=] list[[]] if name[args].ids begin[:] variable[ids] assign[=] call[name[args].ids.split, parameter[constant[,]]] variable[motifs] assign[=] <ast.ListComp object at 0x7da2044c2110> variable[stats] assign[=] list[[<ast.Constant object at 0x7da2044c0a30>, <ast.Constant object at 0x7da2044c08b0>, <ast.Constant object at 0x7da2044c2b30>, <ast.Constant object at 0x7da2044c03a0>, <ast.Constant object at 0x7da2044c3af0>, <ast.Constant object at 0x7da2044c3730>, <ast.Constant object at 0x7da2044c27a0>]] variable[plot_x] assign[=] list[[]] variable[plot_y] assign[=] list[[]] variable[legend] assign[=] list[[]] variable[f_out] assign[=] name[sys].stdout if name[args].outdir begin[:] if <ast.UnaryOp object at 0x7da2044c2230> begin[:] call[name[os].makedirs, parameter[name[args].outdir]] variable[f_out] assign[=] call[name[open], parameter[binary_operation[name[args].outdir + constant[/gimme.roc.report.txt]], constant[w]]] call[name[f_out].write, parameter[constant[Motif # matches # matches background P-value log10 P-value ROC AUC PR AUC Enr. at 1% FPR Recall at 10% FDR ]]] for taget[name[motif_stats]] in starred[call[name[calc_stats_iterator], parameter[name[motifs], name[args].sample, name[args].background]]] begin[:] for taget[name[motif]] in starred[name[motifs]] begin[:] if compare[call[name[str], parameter[name[motif]]] in name[motif_stats]] begin[:] if name[outputfile] begin[:] <ast.Tuple object at 0x7da2044c0c70> assign[=] call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[roc_values]] call[name[plot_x].append, parameter[name[x]]] call[name[plot_y].append, parameter[name[y]]] call[name[legend].append, parameter[name[motif].id]] variable[log_pvalue] assign[=] name[np].inf if compare[call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[phyper_at_fpr]] greater[>] constant[0]] begin[:] variable[log_pvalue] assign[=] <ast.UnaryOp object at 0x7da1b0fe8d00> call[name[f_out].write, parameter[call[constant[{} {:d} {:d} {:.2e} {:.3f} {:.3f} {:.3f} {:.2f} {:0.4f} ].format, parameter[name[motif].id, call[call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[matches_at_fpr]]][constant[0]], call[call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[matches_at_fpr]]][constant[1]], call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[phyper_at_fpr]], name[log_pvalue], call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[roc_auc]], call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[pr_auc]], call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[enr_at_fpr]], call[call[name[motif_stats]][call[name[str], parameter[name[motif]]]]][constant[recall_at_fdr]]]]]] call[name[f_out].close, parameter[]] if name[args].outdir begin[:] call[name[html_report], parameter[name[args].outdir, binary_operation[name[args].outdir + constant[/gimme.roc.report.txt]], name[args].pwmfile, constant[0.01]]] if name[outputfile] begin[:] call[name[roc_plot], parameter[name[outputfile], name[plot_x], name[plot_y]]]
keyword[def] identifier[roc] ( identifier[args] ): literal[string] identifier[outputfile] = identifier[args] . identifier[outfile] keyword[if] identifier[outputfile] keyword[and] keyword[not] identifier[outputfile] . identifier[endswith] ( literal[string] ): identifier[outputfile] += literal[string] identifier[motifs] = identifier[read_motifs] ( identifier[args] . identifier[pwmfile] , identifier[fmt] = literal[string] ) identifier[ids] =[] keyword[if] identifier[args] . identifier[ids] : identifier[ids] = identifier[args] . identifier[ids] . identifier[split] ( literal[string] ) keyword[else] : identifier[ids] =[ identifier[m] . identifier[id] keyword[for] identifier[m] keyword[in] identifier[motifs] ] identifier[motifs] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[motifs] keyword[if] ( identifier[m] . identifier[id] keyword[in] identifier[ids] )] identifier[stats] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , ] identifier[plot_x] =[] identifier[plot_y] =[] identifier[legend] =[] identifier[f_out] = identifier[sys] . identifier[stdout] keyword[if] identifier[args] . identifier[outdir] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[args] . identifier[outdir] ): identifier[os] . identifier[makedirs] ( identifier[args] . identifier[outdir] ) identifier[f_out] = identifier[open] ( identifier[args] . identifier[outdir] + literal[string] , literal[string] ) identifier[f_out] . identifier[write] ( literal[string] ) keyword[for] identifier[motif_stats] keyword[in] identifier[calc_stats_iterator] ( identifier[motifs] , identifier[args] . identifier[sample] , identifier[args] . identifier[background] , identifier[genome] = identifier[args] . identifier[genome] , identifier[stats] = identifier[stats] , identifier[ncpus] = identifier[args] . identifier[ncpus] ): keyword[for] identifier[motif] keyword[in] identifier[motifs] : keyword[if] identifier[str] ( identifier[motif] ) keyword[in] identifier[motif_stats] : keyword[if] identifier[outputfile] : identifier[x] , identifier[y] = identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ] identifier[plot_x] . identifier[append] ( identifier[x] ) identifier[plot_y] . identifier[append] ( identifier[y] ) identifier[legend] . identifier[append] ( identifier[motif] . identifier[id] ) identifier[log_pvalue] = identifier[np] . identifier[inf] keyword[if] identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ]> literal[int] : identifier[log_pvalue] =- identifier[np] . identifier[log10] ( identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ]) identifier[f_out] . identifier[write] ( literal[string] . identifier[format] ( identifier[motif] . identifier[id] , identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ][ literal[int] ], identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ][ literal[int] ], identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ], identifier[log_pvalue] , identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ], identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ], identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ], identifier[motif_stats] [ identifier[str] ( identifier[motif] )][ literal[string] ], )) identifier[f_out] . identifier[close] () keyword[if] identifier[args] . identifier[outdir] : identifier[html_report] ( identifier[args] . identifier[outdir] , identifier[args] . identifier[outdir] + literal[string] , identifier[args] . identifier[pwmfile] , literal[int] , ) keyword[if] identifier[outputfile] : identifier[roc_plot] ( identifier[outputfile] , identifier[plot_x] , identifier[plot_y] , identifier[ids] = identifier[legend] )
def roc(args): """ Calculate ROC_AUC and other metrics and optionally plot ROC curve.""" outputfile = args.outfile # Default extension for image if outputfile and (not outputfile.endswith('.png')): outputfile += '.png' # depends on [control=['if'], data=[]] motifs = read_motifs(args.pwmfile, fmt='pwm') ids = [] if args.ids: ids = args.ids.split(',') # depends on [control=['if'], data=[]] else: ids = [m.id for m in motifs] motifs = [m for m in motifs if m.id in ids] stats = ['phyper_at_fpr', 'roc_auc', 'pr_auc', 'enr_at_fpr', 'recall_at_fdr', 'roc_values', 'matches_at_fpr'] plot_x = [] plot_y = [] legend = [] f_out = sys.stdout if args.outdir: if not os.path.exists(args.outdir): os.makedirs(args.outdir) # depends on [control=['if'], data=[]] f_out = open(args.outdir + '/gimme.roc.report.txt', 'w') # depends on [control=['if'], data=[]] # Print the metrics f_out.write('Motif\t# matches\t# matches background\tP-value\tlog10 P-value\tROC AUC\tPR AUC\tEnr. at 1% FPR\tRecall at 10% FDR\n') for motif_stats in calc_stats_iterator(motifs, args.sample, args.background, genome=args.genome, stats=stats, ncpus=args.ncpus): for motif in motifs: if str(motif) in motif_stats: if outputfile: (x, y) = motif_stats[str(motif)]['roc_values'] plot_x.append(x) plot_y.append(y) legend.append(motif.id) # depends on [control=['if'], data=[]] log_pvalue = np.inf if motif_stats[str(motif)]['phyper_at_fpr'] > 0: log_pvalue = -np.log10(motif_stats[str(motif)]['phyper_at_fpr']) # depends on [control=['if'], data=[]] f_out.write('{}\t{:d}\t{:d}\t{:.2e}\t{:.3f}\t{:.3f}\t{:.3f}\t{:.2f}\t{:0.4f}\n'.format(motif.id, motif_stats[str(motif)]['matches_at_fpr'][0], motif_stats[str(motif)]['matches_at_fpr'][1], motif_stats[str(motif)]['phyper_at_fpr'], log_pvalue, motif_stats[str(motif)]['roc_auc'], motif_stats[str(motif)]['pr_auc'], motif_stats[str(motif)]['enr_at_fpr'], motif_stats[str(motif)]['recall_at_fdr'])) # depends on [control=['if'], data=['motif_stats']] # depends on [control=['for'], data=['motif']] # depends on [control=['for'], data=['motif_stats']] f_out.close() if args.outdir: html_report(args.outdir, args.outdir + '/gimme.roc.report.txt', args.pwmfile, 0.01) # depends on [control=['if'], data=[]] # Plot the ROC curve if outputfile: roc_plot(outputfile, plot_x, plot_y, ids=legend) # depends on [control=['if'], data=[]]
def compile_state_usage(self): ''' Return all used and unused states for the minion based on the top match data ''' err = [] top = self.get_top() err += self.verify_tops(top) if err: return err matches = self.top_matches(top) state_usage = {} for saltenv, states in self.avail.items(): env_usage = { 'used': [], 'unused': [], 'count_all': 0, 'count_used': 0, 'count_unused': 0 } env_matches = matches.get(saltenv) for state in states: env_usage['count_all'] += 1 if state in env_matches: env_usage['count_used'] += 1 env_usage['used'].append(state) else: env_usage['count_unused'] += 1 env_usage['unused'].append(state) state_usage[saltenv] = env_usage return state_usage
def function[compile_state_usage, parameter[self]]: constant[ Return all used and unused states for the minion based on the top match data ] variable[err] assign[=] list[[]] variable[top] assign[=] call[name[self].get_top, parameter[]] <ast.AugAssign object at 0x7da18fe91540> if name[err] begin[:] return[name[err]] variable[matches] assign[=] call[name[self].top_matches, parameter[name[top]]] variable[state_usage] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18fe912a0>, <ast.Name object at 0x7da18fe91870>]]] in starred[call[name[self].avail.items, parameter[]]] begin[:] variable[env_usage] assign[=] dictionary[[<ast.Constant object at 0x7da18fe92470>, <ast.Constant object at 0x7da18fe928f0>, <ast.Constant object at 0x7da18fe92e60>, <ast.Constant object at 0x7da18fe91ea0>, <ast.Constant object at 0x7da2041db490>], [<ast.List object at 0x7da2041d8760>, <ast.List object at 0x7da2041d8700>, <ast.Constant object at 0x7da2041daef0>, <ast.Constant object at 0x7da2041daa40>, <ast.Constant object at 0x7da2041d95d0>]] variable[env_matches] assign[=] call[name[matches].get, parameter[name[saltenv]]] for taget[name[state]] in starred[name[states]] begin[:] <ast.AugAssign object at 0x7da2041d9780> if compare[name[state] in name[env_matches]] begin[:] <ast.AugAssign object at 0x7da2041da1a0> call[call[name[env_usage]][constant[used]].append, parameter[name[state]]] call[name[state_usage]][name[saltenv]] assign[=] name[env_usage] return[name[state_usage]]
keyword[def] identifier[compile_state_usage] ( identifier[self] ): literal[string] identifier[err] =[] identifier[top] = identifier[self] . identifier[get_top] () identifier[err] += identifier[self] . identifier[verify_tops] ( identifier[top] ) keyword[if] identifier[err] : keyword[return] identifier[err] identifier[matches] = identifier[self] . identifier[top_matches] ( identifier[top] ) identifier[state_usage] ={} keyword[for] identifier[saltenv] , identifier[states] keyword[in] identifier[self] . identifier[avail] . identifier[items] (): identifier[env_usage] ={ literal[string] :[], literal[string] :[], literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] } identifier[env_matches] = identifier[matches] . identifier[get] ( identifier[saltenv] ) keyword[for] identifier[state] keyword[in] identifier[states] : identifier[env_usage] [ literal[string] ]+= literal[int] keyword[if] identifier[state] keyword[in] identifier[env_matches] : identifier[env_usage] [ literal[string] ]+= literal[int] identifier[env_usage] [ literal[string] ]. identifier[append] ( identifier[state] ) keyword[else] : identifier[env_usage] [ literal[string] ]+= literal[int] identifier[env_usage] [ literal[string] ]. identifier[append] ( identifier[state] ) identifier[state_usage] [ identifier[saltenv] ]= identifier[env_usage] keyword[return] identifier[state_usage]
def compile_state_usage(self): """ Return all used and unused states for the minion based on the top match data """ err = [] top = self.get_top() err += self.verify_tops(top) if err: return err # depends on [control=['if'], data=[]] matches = self.top_matches(top) state_usage = {} for (saltenv, states) in self.avail.items(): env_usage = {'used': [], 'unused': [], 'count_all': 0, 'count_used': 0, 'count_unused': 0} env_matches = matches.get(saltenv) for state in states: env_usage['count_all'] += 1 if state in env_matches: env_usage['count_used'] += 1 env_usage['used'].append(state) # depends on [control=['if'], data=['state']] else: env_usage['count_unused'] += 1 env_usage['unused'].append(state) # depends on [control=['for'], data=['state']] state_usage[saltenv] = env_usage # depends on [control=['for'], data=[]] return state_usage
def server(name='proxy-server', headers_middleware=None, server_software=None, **kwargs): '''Function to Create a WSGI Proxy Server.''' if headers_middleware is None: headers_middleware = [x_forwarded_for] wsgi_proxy = ProxyServerWsgiHandler(headers_middleware) kwargs['server_software'] = server_software or SERVER_SOFTWARE return wsgi.WSGIServer(wsgi_proxy, name=name, **kwargs)
def function[server, parameter[name, headers_middleware, server_software]]: constant[Function to Create a WSGI Proxy Server.] if compare[name[headers_middleware] is constant[None]] begin[:] variable[headers_middleware] assign[=] list[[<ast.Name object at 0x7da18eb55750>]] variable[wsgi_proxy] assign[=] call[name[ProxyServerWsgiHandler], parameter[name[headers_middleware]]] call[name[kwargs]][constant[server_software]] assign[=] <ast.BoolOp object at 0x7da18eb578b0> return[call[name[wsgi].WSGIServer, parameter[name[wsgi_proxy]]]]
keyword[def] identifier[server] ( identifier[name] = literal[string] , identifier[headers_middleware] = keyword[None] , identifier[server_software] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[headers_middleware] keyword[is] keyword[None] : identifier[headers_middleware] =[ identifier[x_forwarded_for] ] identifier[wsgi_proxy] = identifier[ProxyServerWsgiHandler] ( identifier[headers_middleware] ) identifier[kwargs] [ literal[string] ]= identifier[server_software] keyword[or] identifier[SERVER_SOFTWARE] keyword[return] identifier[wsgi] . identifier[WSGIServer] ( identifier[wsgi_proxy] , identifier[name] = identifier[name] ,** identifier[kwargs] )
def server(name='proxy-server', headers_middleware=None, server_software=None, **kwargs): """Function to Create a WSGI Proxy Server.""" if headers_middleware is None: headers_middleware = [x_forwarded_for] # depends on [control=['if'], data=['headers_middleware']] wsgi_proxy = ProxyServerWsgiHandler(headers_middleware) kwargs['server_software'] = server_software or SERVER_SOFTWARE return wsgi.WSGIServer(wsgi_proxy, name=name, **kwargs)
def gamma(phi1,phi2,theta1,theta2): """ calculate third rotation angle inputs are angles from 2 pulsars returns the angle. """ if phi1 == phi2 and theta1 == theta2: gamma = 0 else: gamma = atan( sin(theta2)*sin(phi2-phi1) / \ (cos(theta1)*sin(theta2)*cos(phi1-phi2) - \ sin(theta1)*cos(theta2)) ) dummy_arg = (cos(gamma)*cos(theta1)*sin(theta2)*cos(phi1-phi2) + \ sin(gamma)*sin(theta2)*sin(phi2-phi1) - \ cos(gamma)*sin(theta1)*cos(theta2)) if dummy_arg >= 0: return gamma else: return pi + gamma
def function[gamma, parameter[phi1, phi2, theta1, theta2]]: constant[ calculate third rotation angle inputs are angles from 2 pulsars returns the angle. ] if <ast.BoolOp object at 0x7da1b0549690> begin[:] variable[gamma] assign[=] constant[0] variable[dummy_arg] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[cos], parameter[name[gamma]]] * call[name[cos], parameter[name[theta1]]]] * call[name[sin], parameter[name[theta2]]]] * call[name[cos], parameter[binary_operation[name[phi1] - name[phi2]]]]] + binary_operation[binary_operation[call[name[sin], parameter[name[gamma]]] * call[name[sin], parameter[name[theta2]]]] * call[name[sin], parameter[binary_operation[name[phi2] - name[phi1]]]]]] - binary_operation[binary_operation[call[name[cos], parameter[name[gamma]]] * call[name[sin], parameter[name[theta1]]]] * call[name[cos], parameter[name[theta2]]]]] if compare[name[dummy_arg] greater_or_equal[>=] constant[0]] begin[:] return[name[gamma]]
keyword[def] identifier[gamma] ( identifier[phi1] , identifier[phi2] , identifier[theta1] , identifier[theta2] ): literal[string] keyword[if] identifier[phi1] == identifier[phi2] keyword[and] identifier[theta1] == identifier[theta2] : identifier[gamma] = literal[int] keyword[else] : identifier[gamma] = identifier[atan] ( identifier[sin] ( identifier[theta2] )* identifier[sin] ( identifier[phi2] - identifier[phi1] )/( identifier[cos] ( identifier[theta1] )* identifier[sin] ( identifier[theta2] )* identifier[cos] ( identifier[phi1] - identifier[phi2] )- identifier[sin] ( identifier[theta1] )* identifier[cos] ( identifier[theta2] ))) identifier[dummy_arg] =( identifier[cos] ( identifier[gamma] )* identifier[cos] ( identifier[theta1] )* identifier[sin] ( identifier[theta2] )* identifier[cos] ( identifier[phi1] - identifier[phi2] )+ identifier[sin] ( identifier[gamma] )* identifier[sin] ( identifier[theta2] )* identifier[sin] ( identifier[phi2] - identifier[phi1] )- identifier[cos] ( identifier[gamma] )* identifier[sin] ( identifier[theta1] )* identifier[cos] ( identifier[theta2] )) keyword[if] identifier[dummy_arg] >= literal[int] : keyword[return] identifier[gamma] keyword[else] : keyword[return] identifier[pi] + identifier[gamma]
def gamma(phi1, phi2, theta1, theta2): """ calculate third rotation angle inputs are angles from 2 pulsars returns the angle. """ if phi1 == phi2 and theta1 == theta2: gamma = 0 # depends on [control=['if'], data=[]] else: gamma = atan(sin(theta2) * sin(phi2 - phi1) / (cos(theta1) * sin(theta2) * cos(phi1 - phi2) - sin(theta1) * cos(theta2))) dummy_arg = cos(gamma) * cos(theta1) * sin(theta2) * cos(phi1 - phi2) + sin(gamma) * sin(theta2) * sin(phi2 - phi1) - cos(gamma) * sin(theta1) * cos(theta2) if dummy_arg >= 0: return gamma # depends on [control=['if'], data=[]] else: return pi + gamma
def add_triple(self, subj, pred, obj): ''' Adds an entity property to an existing entity ''' subj_data, pred_data, obj_data = self.are_ilx([subj, pred, obj]) # RELATIONSHIP PROPERTY if subj_data.get('id') and pred_data.get('id') and obj_data.get('id'): if pred_data['type'] != 'relationship': return self.test_check('Adding a relationship as formate \ "term1_ilx relationship_ilx term2_ilx"') return self.add_relationship(term1=subj_data, relationship=pred_data, term2=obj_data) # ANNOTATION PROPERTY elif subj_data.get('id') and pred_data.get('id'): if pred_data['type'] != 'annotation': return self.test_check('Adding a relationship as formate \ "term_ilx annotation_ilx value"') return self.add_annotation(entity=subj_data, annotation=pred_data, value=obj) # UPDATE ENTITY elif subj_data.get('id'): data = subj_data _pred = self.ttl2sci_map.get(pred) if not _pred: error = pred + " doesnt not have correct RDF format or It is not an option" return self.test_check(error) data = self.custom_update(data, _pred, obj) if data == 'failed': # for debugging custom_update return data data = superclasses_bug_fix(data) url_base = self.base_path + 'term/edit/{id}' url = url_base.format(id=data['id']) return self.post(url, data) else: return self.test_check('The ILX ID(s) provided do not exist')
def function[add_triple, parameter[self, subj, pred, obj]]: constant[ Adds an entity property to an existing entity ] <ast.Tuple object at 0x7da1b1b378e0> assign[=] call[name[self].are_ilx, parameter[list[[<ast.Name object at 0x7da1b1b34040>, <ast.Name object at 0x7da1b1b35900>, <ast.Name object at 0x7da1b1b37250>]]]] if <ast.BoolOp object at 0x7da1b1b36680> begin[:] if compare[call[name[pred_data]][constant[type]] not_equal[!=] constant[relationship]] begin[:] return[call[name[self].test_check, parameter[constant[Adding a relationship as formate "term1_ilx relationship_ilx term2_ilx"]]]] return[call[name[self].add_relationship, parameter[]]]
keyword[def] identifier[add_triple] ( identifier[self] , identifier[subj] , identifier[pred] , identifier[obj] ): literal[string] identifier[subj_data] , identifier[pred_data] , identifier[obj_data] = identifier[self] . identifier[are_ilx] ([ identifier[subj] , identifier[pred] , identifier[obj] ]) keyword[if] identifier[subj_data] . identifier[get] ( literal[string] ) keyword[and] identifier[pred_data] . identifier[get] ( literal[string] ) keyword[and] identifier[obj_data] . identifier[get] ( literal[string] ): keyword[if] identifier[pred_data] [ literal[string] ]!= literal[string] : keyword[return] identifier[self] . identifier[test_check] ( literal[string] ) keyword[return] identifier[self] . identifier[add_relationship] ( identifier[term1] = identifier[subj_data] , identifier[relationship] = identifier[pred_data] , identifier[term2] = identifier[obj_data] ) keyword[elif] identifier[subj_data] . identifier[get] ( literal[string] ) keyword[and] identifier[pred_data] . identifier[get] ( literal[string] ): keyword[if] identifier[pred_data] [ literal[string] ]!= literal[string] : keyword[return] identifier[self] . identifier[test_check] ( literal[string] ) keyword[return] identifier[self] . identifier[add_annotation] ( identifier[entity] = identifier[subj_data] , identifier[annotation] = identifier[pred_data] , identifier[value] = identifier[obj] ) keyword[elif] identifier[subj_data] . identifier[get] ( literal[string] ): identifier[data] = identifier[subj_data] identifier[_pred] = identifier[self] . identifier[ttl2sci_map] . identifier[get] ( identifier[pred] ) keyword[if] keyword[not] identifier[_pred] : identifier[error] = identifier[pred] + literal[string] keyword[return] identifier[self] . identifier[test_check] ( identifier[error] ) identifier[data] = identifier[self] . identifier[custom_update] ( identifier[data] , identifier[_pred] , identifier[obj] ) keyword[if] identifier[data] == literal[string] : keyword[return] identifier[data] identifier[data] = identifier[superclasses_bug_fix] ( identifier[data] ) identifier[url_base] = identifier[self] . identifier[base_path] + literal[string] identifier[url] = identifier[url_base] . identifier[format] ( identifier[id] = identifier[data] [ literal[string] ]) keyword[return] identifier[self] . identifier[post] ( identifier[url] , identifier[data] ) keyword[else] : keyword[return] identifier[self] . identifier[test_check] ( literal[string] )
def add_triple(self, subj, pred, obj): """ Adds an entity property to an existing entity """ (subj_data, pred_data, obj_data) = self.are_ilx([subj, pred, obj]) # RELATIONSHIP PROPERTY if subj_data.get('id') and pred_data.get('id') and obj_data.get('id'): if pred_data['type'] != 'relationship': return self.test_check('Adding a relationship as formate "term1_ilx relationship_ilx term2_ilx"') # depends on [control=['if'], data=[]] return self.add_relationship(term1=subj_data, relationship=pred_data, term2=obj_data) # depends on [control=['if'], data=[]] # ANNOTATION PROPERTY elif subj_data.get('id') and pred_data.get('id'): if pred_data['type'] != 'annotation': return self.test_check('Adding a relationship as formate "term_ilx annotation_ilx value"') # depends on [control=['if'], data=[]] return self.add_annotation(entity=subj_data, annotation=pred_data, value=obj) # depends on [control=['if'], data=[]] # UPDATE ENTITY elif subj_data.get('id'): data = subj_data _pred = self.ttl2sci_map.get(pred) if not _pred: error = pred + ' doesnt not have correct RDF format or It is not an option' return self.test_check(error) # depends on [control=['if'], data=[]] data = self.custom_update(data, _pred, obj) if data == 'failed': # for debugging custom_update return data # depends on [control=['if'], data=['data']] data = superclasses_bug_fix(data) url_base = self.base_path + 'term/edit/{id}' url = url_base.format(id=data['id']) return self.post(url, data) # depends on [control=['if'], data=[]] else: return self.test_check('The ILX ID(s) provided do not exist')
def sigma(self): """ This method returns the sigma value of the gb. If using 'quick_gen' to generate GB, this value is not valid. """ return int(round(self.oriented_unit_cell.volume / self.init_cell.volume))
def function[sigma, parameter[self]]: constant[ This method returns the sigma value of the gb. If using 'quick_gen' to generate GB, this value is not valid. ] return[call[name[int], parameter[call[name[round], parameter[binary_operation[name[self].oriented_unit_cell.volume / name[self].init_cell.volume]]]]]]
keyword[def] identifier[sigma] ( identifier[self] ): literal[string] keyword[return] identifier[int] ( identifier[round] ( identifier[self] . identifier[oriented_unit_cell] . identifier[volume] / identifier[self] . identifier[init_cell] . identifier[volume] ))
def sigma(self): """ This method returns the sigma value of the gb. If using 'quick_gen' to generate GB, this value is not valid. """ return int(round(self.oriented_unit_cell.volume / self.init_cell.volume))
def generate_trivial_layout(*regs): """ Creates a trivial ("one-to-one") Layout with the registers in `regs`. Args: *regs (Registers): registers to include in the layout. Returns: Layout: A layout with all the `regs` in the given order. """ layout = Layout() for reg in regs: layout.add_register(reg) return layout
def function[generate_trivial_layout, parameter[]]: constant[ Creates a trivial ("one-to-one") Layout with the registers in `regs`. Args: *regs (Registers): registers to include in the layout. Returns: Layout: A layout with all the `regs` in the given order. ] variable[layout] assign[=] call[name[Layout], parameter[]] for taget[name[reg]] in starred[name[regs]] begin[:] call[name[layout].add_register, parameter[name[reg]]] return[name[layout]]
keyword[def] identifier[generate_trivial_layout] (* identifier[regs] ): literal[string] identifier[layout] = identifier[Layout] () keyword[for] identifier[reg] keyword[in] identifier[regs] : identifier[layout] . identifier[add_register] ( identifier[reg] ) keyword[return] identifier[layout]
def generate_trivial_layout(*regs): """ Creates a trivial ("one-to-one") Layout with the registers in `regs`. Args: *regs (Registers): registers to include in the layout. Returns: Layout: A layout with all the `regs` in the given order. """ layout = Layout() for reg in regs: layout.add_register(reg) # depends on [control=['for'], data=['reg']] return layout
def refresh_stack(self): """ Recompute the stack after e.g. show_hidden_frames has been modified """ self.stack, _ = self.compute_stack(self.fullstack) # find the current frame in the new stack for i, (frame, _) in enumerate(self.stack): if frame is self.curframe: self.curindex = i break else: self.curindex = len(self.stack)-1 self.curframe = self.stack[-1][0] self.print_current_stack_entry()
def function[refresh_stack, parameter[self]]: constant[ Recompute the stack after e.g. show_hidden_frames has been modified ] <ast.Tuple object at 0x7da20e749540> assign[=] call[name[self].compute_stack, parameter[name[self].fullstack]] for taget[tuple[[<ast.Name object at 0x7da20e74be20>, <ast.Tuple object at 0x7da20e74b670>]]] in starred[call[name[enumerate], parameter[name[self].stack]]] begin[:] if compare[name[frame] is name[self].curframe] begin[:] name[self].curindex assign[=] name[i] break
keyword[def] identifier[refresh_stack] ( identifier[self] ): literal[string] identifier[self] . identifier[stack] , identifier[_] = identifier[self] . identifier[compute_stack] ( identifier[self] . identifier[fullstack] ) keyword[for] identifier[i] ,( identifier[frame] , identifier[_] ) keyword[in] identifier[enumerate] ( identifier[self] . identifier[stack] ): keyword[if] identifier[frame] keyword[is] identifier[self] . identifier[curframe] : identifier[self] . identifier[curindex] = identifier[i] keyword[break] keyword[else] : identifier[self] . identifier[curindex] = identifier[len] ( identifier[self] . identifier[stack] )- literal[int] identifier[self] . identifier[curframe] = identifier[self] . identifier[stack] [- literal[int] ][ literal[int] ] identifier[self] . identifier[print_current_stack_entry] ()
def refresh_stack(self): """ Recompute the stack after e.g. show_hidden_frames has been modified """ (self.stack, _) = self.compute_stack(self.fullstack) # find the current frame in the new stack for (i, (frame, _)) in enumerate(self.stack): if frame is self.curframe: self.curindex = i break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] else: self.curindex = len(self.stack) - 1 self.curframe = self.stack[-1][0] self.print_current_stack_entry()
def check_dependencies(self): "Checks if the test program is available in the python environnement" if self.test_program == 'nose': try: import nose except ImportError: sys.exit('Nosetests is not available on your system. Please install it and try to run it again') if self.test_program == 'py': try: import py except: sys.exit('py.test is not available on your system. Please install it and try to run it again') if self.test_program == 'django': try: import django except: sys.exit('django is not available on your system. Please install it and try to run it again') if self.test_program == 'phpunit': try: process = subprocess.check_call(['phpunit','--version']) except: sys.exit('phpunit is not available on your system. Please install it and try to run it again') if self.test_program == 'tox': try: import tox except ImportError: sys.exit('tox is not available on your system. Please install it and try to run it again')
def function[check_dependencies, parameter[self]]: constant[Checks if the test program is available in the python environnement] if compare[name[self].test_program equal[==] constant[nose]] begin[:] <ast.Try object at 0x7da1b25d1510> if compare[name[self].test_program equal[==] constant[py]] begin[:] <ast.Try object at 0x7da1b25d10c0> if compare[name[self].test_program equal[==] constant[django]] begin[:] <ast.Try object at 0x7da1b25d3610> if compare[name[self].test_program equal[==] constant[phpunit]] begin[:] <ast.Try object at 0x7da1b25d0e80> if compare[name[self].test_program equal[==] constant[tox]] begin[:] <ast.Try object at 0x7da1b25d1a50>
keyword[def] identifier[check_dependencies] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[test_program] == literal[string] : keyword[try] : keyword[import] identifier[nose] keyword[except] identifier[ImportError] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] identifier[self] . identifier[test_program] == literal[string] : keyword[try] : keyword[import] identifier[py] keyword[except] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] identifier[self] . identifier[test_program] == literal[string] : keyword[try] : keyword[import] identifier[django] keyword[except] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] identifier[self] . identifier[test_program] == literal[string] : keyword[try] : identifier[process] = identifier[subprocess] . identifier[check_call] ([ literal[string] , literal[string] ]) keyword[except] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] identifier[self] . identifier[test_program] == literal[string] : keyword[try] : keyword[import] identifier[tox] keyword[except] identifier[ImportError] : identifier[sys] . identifier[exit] ( literal[string] )
def check_dependencies(self): """Checks if the test program is available in the python environnement""" if self.test_program == 'nose': try: import nose # depends on [control=['try'], data=[]] except ImportError: sys.exit('Nosetests is not available on your system. Please install it and try to run it again') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if self.test_program == 'py': try: import py # depends on [control=['try'], data=[]] except: sys.exit('py.test is not available on your system. Please install it and try to run it again') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if self.test_program == 'django': try: import django # depends on [control=['try'], data=[]] except: sys.exit('django is not available on your system. Please install it and try to run it again') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if self.test_program == 'phpunit': try: process = subprocess.check_call(['phpunit', '--version']) # depends on [control=['try'], data=[]] except: sys.exit('phpunit is not available on your system. Please install it and try to run it again') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if self.test_program == 'tox': try: import tox # depends on [control=['try'], data=[]] except ImportError: sys.exit('tox is not available on your system. Please install it and try to run it again') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def setParameter(self, name, index, value): """ Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.setParameter`. """ if name == "learningMode": self.learningMode = bool(int(value)) elif name == "inferenceMode": self.inferenceMode = bool(int(value)) else: return PyRegion.setParameter(self, name, index, value)
def function[setParameter, parameter[self, name, index, value]]: constant[ Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.setParameter`. ] if compare[name[name] equal[==] constant[learningMode]] begin[:] name[self].learningMode assign[=] call[name[bool], parameter[call[name[int], parameter[name[value]]]]]
keyword[def] identifier[setParameter] ( identifier[self] , identifier[name] , identifier[index] , identifier[value] ): literal[string] keyword[if] identifier[name] == literal[string] : identifier[self] . identifier[learningMode] = identifier[bool] ( identifier[int] ( identifier[value] )) keyword[elif] identifier[name] == literal[string] : identifier[self] . identifier[inferenceMode] = identifier[bool] ( identifier[int] ( identifier[value] )) keyword[else] : keyword[return] identifier[PyRegion] . identifier[setParameter] ( identifier[self] , identifier[name] , identifier[index] , identifier[value] )
def setParameter(self, name, index, value): """ Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.setParameter`. """ if name == 'learningMode': self.learningMode = bool(int(value)) # depends on [control=['if'], data=[]] elif name == 'inferenceMode': self.inferenceMode = bool(int(value)) # depends on [control=['if'], data=[]] else: return PyRegion.setParameter(self, name, index, value)
def allow_client_incoming(self, client_name): """ Allow the user of this token to accept incoming connections. :param str client_name: Client name to accept calls from """ self.client_name = client_name self.capabilities['incoming'] = ScopeURI('client', 'incoming', {'clientName': client_name})
def function[allow_client_incoming, parameter[self, client_name]]: constant[ Allow the user of this token to accept incoming connections. :param str client_name: Client name to accept calls from ] name[self].client_name assign[=] name[client_name] call[name[self].capabilities][constant[incoming]] assign[=] call[name[ScopeURI], parameter[constant[client], constant[incoming], dictionary[[<ast.Constant object at 0x7da2054a4070>], [<ast.Name object at 0x7da2054a6ef0>]]]]
keyword[def] identifier[allow_client_incoming] ( identifier[self] , identifier[client_name] ): literal[string] identifier[self] . identifier[client_name] = identifier[client_name] identifier[self] . identifier[capabilities] [ literal[string] ]= identifier[ScopeURI] ( literal[string] , literal[string] ,{ literal[string] : identifier[client_name] })
def allow_client_incoming(self, client_name): """ Allow the user of this token to accept incoming connections. :param str client_name: Client name to accept calls from """ self.client_name = client_name self.capabilities['incoming'] = ScopeURI('client', 'incoming', {'clientName': client_name})
def _add_tag_manifest_file(zip_file, dir_name, tag_info_list): """Generate the tag manifest file and add it to the zip.""" _add_tag_file( zip_file, dir_name, tag_info_list, _gen_tag_manifest_file_tup(tag_info_list) )
def function[_add_tag_manifest_file, parameter[zip_file, dir_name, tag_info_list]]: constant[Generate the tag manifest file and add it to the zip.] call[name[_add_tag_file], parameter[name[zip_file], name[dir_name], name[tag_info_list], call[name[_gen_tag_manifest_file_tup], parameter[name[tag_info_list]]]]]
keyword[def] identifier[_add_tag_manifest_file] ( identifier[zip_file] , identifier[dir_name] , identifier[tag_info_list] ): literal[string] identifier[_add_tag_file] ( identifier[zip_file] , identifier[dir_name] , identifier[tag_info_list] , identifier[_gen_tag_manifest_file_tup] ( identifier[tag_info_list] ) )
def _add_tag_manifest_file(zip_file, dir_name, tag_info_list): """Generate the tag manifest file and add it to the zip.""" _add_tag_file(zip_file, dir_name, tag_info_list, _gen_tag_manifest_file_tup(tag_info_list))
def leave_multicast(self, universe: int) -> None: """ Try to leave the multicast group with the specified universe. This does not throw any exception if the group could not be leaved. :param universe: the universe to leave the multicast group. The network hardware has to support the multicast feature! """ try: self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_aton(calculate_multicast_addr(universe)) + socket.inet_aton(self._bindAddress)) except: # try to leave the multicast group for the universe pass
def function[leave_multicast, parameter[self, universe]]: constant[ Try to leave the multicast group with the specified universe. This does not throw any exception if the group could not be leaved. :param universe: the universe to leave the multicast group. The network hardware has to support the multicast feature! ] <ast.Try object at 0x7da18f722290>
keyword[def] identifier[leave_multicast] ( identifier[self] , identifier[universe] : identifier[int] )-> keyword[None] : literal[string] keyword[try] : identifier[self] . identifier[sock] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_IP] , identifier[socket] . identifier[IP_DROP_MEMBERSHIP] , identifier[socket] . identifier[inet_aton] ( identifier[calculate_multicast_addr] ( identifier[universe] ))+ identifier[socket] . identifier[inet_aton] ( identifier[self] . identifier[_bindAddress] )) keyword[except] : keyword[pass]
def leave_multicast(self, universe: int) -> None: """ Try to leave the multicast group with the specified universe. This does not throw any exception if the group could not be leaved. :param universe: the universe to leave the multicast group. The network hardware has to support the multicast feature! """ try: self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_aton(calculate_multicast_addr(universe)) + socket.inet_aton(self._bindAddress)) # depends on [control=['try'], data=[]] except: # try to leave the multicast group for the universe pass # depends on [control=['except'], data=[]]
def _begin_stream(self, command: Command): '''Start data stream transfer.''' begin_reply = yield from self._commander.begin_stream(command) self._response.reply = begin_reply self.event_dispatcher.notify(self.Event.begin_transfer, self._response)
def function[_begin_stream, parameter[self, command]]: constant[Start data stream transfer.] variable[begin_reply] assign[=] <ast.YieldFrom object at 0x7da1b2344b20> name[self]._response.reply assign[=] name[begin_reply] call[name[self].event_dispatcher.notify, parameter[name[self].Event.begin_transfer, name[self]._response]]
keyword[def] identifier[_begin_stream] ( identifier[self] , identifier[command] : identifier[Command] ): literal[string] identifier[begin_reply] = keyword[yield] keyword[from] identifier[self] . identifier[_commander] . identifier[begin_stream] ( identifier[command] ) identifier[self] . identifier[_response] . identifier[reply] = identifier[begin_reply] identifier[self] . identifier[event_dispatcher] . identifier[notify] ( identifier[self] . identifier[Event] . identifier[begin_transfer] , identifier[self] . identifier[_response] )
def _begin_stream(self, command: Command): """Start data stream transfer.""" begin_reply = (yield from self._commander.begin_stream(command)) self._response.reply = begin_reply self.event_dispatcher.notify(self.Event.begin_transfer, self._response)
def _send_register_payload(self, websocket): """Send the register payload.""" file = os.path.join(os.path.dirname(__file__), HANDSHAKE_FILE_NAME) data = codecs.open(file, 'r', 'utf-8') raw_handshake = data.read() handshake = json.loads(raw_handshake) handshake['payload']['client-key'] = self.client_key yield from websocket.send(json.dumps(handshake)) raw_response = yield from websocket.recv() response = json.loads(raw_response) if response['type'] == 'response' and \ response['payload']['pairingType'] == 'PROMPT': raw_response = yield from websocket.recv() response = json.loads(raw_response) if response['type'] == 'registered': self.client_key = response['payload']['client-key'] self.save_key_file()
def function[_send_register_payload, parameter[self, websocket]]: constant[Send the register payload.] variable[file] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], name[HANDSHAKE_FILE_NAME]]] variable[data] assign[=] call[name[codecs].open, parameter[name[file], constant[r], constant[utf-8]]] variable[raw_handshake] assign[=] call[name[data].read, parameter[]] variable[handshake] assign[=] call[name[json].loads, parameter[name[raw_handshake]]] call[call[name[handshake]][constant[payload]]][constant[client-key]] assign[=] name[self].client_key <ast.YieldFrom object at 0x7da18c4cdd80> variable[raw_response] assign[=] <ast.YieldFrom object at 0x7da18c4cc1c0> variable[response] assign[=] call[name[json].loads, parameter[name[raw_response]]] if <ast.BoolOp object at 0x7da18eb54640> begin[:] variable[raw_response] assign[=] <ast.YieldFrom object at 0x7da18eb565c0> variable[response] assign[=] call[name[json].loads, parameter[name[raw_response]]] if compare[call[name[response]][constant[type]] equal[==] constant[registered]] begin[:] name[self].client_key assign[=] call[call[name[response]][constant[payload]]][constant[client-key]] call[name[self].save_key_file, parameter[]]
keyword[def] identifier[_send_register_payload] ( identifier[self] , identifier[websocket] ): literal[string] identifier[file] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), identifier[HANDSHAKE_FILE_NAME] ) identifier[data] = identifier[codecs] . identifier[open] ( identifier[file] , literal[string] , literal[string] ) identifier[raw_handshake] = identifier[data] . identifier[read] () identifier[handshake] = identifier[json] . identifier[loads] ( identifier[raw_handshake] ) identifier[handshake] [ literal[string] ][ literal[string] ]= identifier[self] . identifier[client_key] keyword[yield] keyword[from] identifier[websocket] . identifier[send] ( identifier[json] . identifier[dumps] ( identifier[handshake] )) identifier[raw_response] = keyword[yield] keyword[from] identifier[websocket] . identifier[recv] () identifier[response] = identifier[json] . identifier[loads] ( identifier[raw_response] ) keyword[if] identifier[response] [ literal[string] ]== literal[string] keyword[and] identifier[response] [ literal[string] ][ literal[string] ]== literal[string] : identifier[raw_response] = keyword[yield] keyword[from] identifier[websocket] . identifier[recv] () identifier[response] = identifier[json] . identifier[loads] ( identifier[raw_response] ) keyword[if] identifier[response] [ literal[string] ]== literal[string] : identifier[self] . identifier[client_key] = identifier[response] [ literal[string] ][ literal[string] ] identifier[self] . identifier[save_key_file] ()
def _send_register_payload(self, websocket): """Send the register payload.""" file = os.path.join(os.path.dirname(__file__), HANDSHAKE_FILE_NAME) data = codecs.open(file, 'r', 'utf-8') raw_handshake = data.read() handshake = json.loads(raw_handshake) handshake['payload']['client-key'] = self.client_key yield from websocket.send(json.dumps(handshake)) raw_response = (yield from websocket.recv()) response = json.loads(raw_response) if response['type'] == 'response' and response['payload']['pairingType'] == 'PROMPT': raw_response = (yield from websocket.recv()) response = json.loads(raw_response) if response['type'] == 'registered': self.client_key = response['payload']['client-key'] self.save_key_file() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def stop(self): """ Stops the video stream and resets the clock. """ logger.debug("Stopping playback") # Stop the clock self.clock.stop() # Set plauyer status to ready self.status = READY
def function[stop, parameter[self]]: constant[ Stops the video stream and resets the clock. ] call[name[logger].debug, parameter[constant[Stopping playback]]] call[name[self].clock.stop, parameter[]] name[self].status assign[=] name[READY]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[clock] . identifier[stop] () identifier[self] . identifier[status] = identifier[READY]
def stop(self): """ Stops the video stream and resets the clock. """ logger.debug('Stopping playback') # Stop the clock self.clock.stop() # Set plauyer status to ready self.status = READY
def load(fname: str) -> 'ParallelDataSet': """ Loads a dataset from a binary .npy file. """ data = mx.nd.load(fname) n = len(data) // 3 source = data[:n] target = data[n:2 * n] label = data[2 * n:] assert len(source) == len(target) == len(label) return ParallelDataSet(source, target, label)
def function[load, parameter[fname]]: constant[ Loads a dataset from a binary .npy file. ] variable[data] assign[=] call[name[mx].nd.load, parameter[name[fname]]] variable[n] assign[=] binary_operation[call[name[len], parameter[name[data]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[3]] variable[source] assign[=] call[name[data]][<ast.Slice object at 0x7da20e954ee0>] variable[target] assign[=] call[name[data]][<ast.Slice object at 0x7da1b1ddf7f0>] variable[label] assign[=] call[name[data]][<ast.Slice object at 0x7da1b1ddfeb0>] assert[compare[call[name[len], parameter[name[source]]] equal[==] call[name[len], parameter[name[target]]]]] return[call[name[ParallelDataSet], parameter[name[source], name[target], name[label]]]]
keyword[def] identifier[load] ( identifier[fname] : identifier[str] )-> literal[string] : literal[string] identifier[data] = identifier[mx] . identifier[nd] . identifier[load] ( identifier[fname] ) identifier[n] = identifier[len] ( identifier[data] )// literal[int] identifier[source] = identifier[data] [: identifier[n] ] identifier[target] = identifier[data] [ identifier[n] : literal[int] * identifier[n] ] identifier[label] = identifier[data] [ literal[int] * identifier[n] :] keyword[assert] identifier[len] ( identifier[source] )== identifier[len] ( identifier[target] )== identifier[len] ( identifier[label] ) keyword[return] identifier[ParallelDataSet] ( identifier[source] , identifier[target] , identifier[label] )
def load(fname: str) -> 'ParallelDataSet': """ Loads a dataset from a binary .npy file. """ data = mx.nd.load(fname) n = len(data) // 3 source = data[:n] target = data[n:2 * n] label = data[2 * n:] assert len(source) == len(target) == len(label) return ParallelDataSet(source, target, label)
def do_fileplaceholder(parser, token): """ Method that parse the fileplaceholder template tag. """ name, params = parse_placeholder(parser, token) return FilePlaceholderNode(name, **params)
def function[do_fileplaceholder, parameter[parser, token]]: constant[ Method that parse the fileplaceholder template tag. ] <ast.Tuple object at 0x7da18f58c910> assign[=] call[name[parse_placeholder], parameter[name[parser], name[token]]] return[call[name[FilePlaceholderNode], parameter[name[name]]]]
keyword[def] identifier[do_fileplaceholder] ( identifier[parser] , identifier[token] ): literal[string] identifier[name] , identifier[params] = identifier[parse_placeholder] ( identifier[parser] , identifier[token] ) keyword[return] identifier[FilePlaceholderNode] ( identifier[name] ,** identifier[params] )
def do_fileplaceholder(parser, token): """ Method that parse the fileplaceholder template tag. """ (name, params) = parse_placeholder(parser, token) return FilePlaceholderNode(name, **params)
def extract_parameters(Pressure, PressureErr, A, AErr, Gamma0, Gamma0Err, method="chang"): """ Calculates the radius, mass and conversion factor and thier uncertainties. For values to be correct data must have been taken with feedback off and at pressures of around 1mbar (this is because the equations assume harmonic motion and at lower pressures the uncooled particle experiences anharmonic motion (due to exploring furthur outside the middle of the trap). When cooled the value of Gamma (the damping) is a combination of the enviromental damping and feedback damping and so is not the correct value for use in this equation (as it requires the enviromental damping). Environmental damping can be predicted though as A=const*Gamma0. By fitting to 1mbar data one can find the value of the const and therefore Gamma0 = A/const Parameters ---------- Pressure : float Pressure in mbar when the data was taken PressureErr : float Error in the Pressure as a decimal (e.g. 15% error is 0.15) A : float Fitting constant A A = γ**2*2*Γ_0*(K_b*T_0)/(π*m) where: γ = conversionFactor Γ_0 = Damping factor due to environment π = pi AErr : float Error in Fitting constant A Gamma0 : float The enviromental damping factor Gamma_0 = Γ_0 Gamma0Err : float The error in the enviromental damping factor Gamma_0 = Γ_0 Returns: Params : list [radius, mass, conversionFactor] The extracted parameters ParamsError : list [radiusError, massError, conversionFactorError] The error in the extracted parameters """ Pressure = 100 * Pressure # conversion to Pascals rho = 1800 # as quoted by Microspheres and Nanospheres # kgm^3 dm = 0.372e-9 # m O'Hanlon, 2003 T0 = 300 # kelvin kB = Boltzmann # m^2 kg s^-2 K-1 eta = 18.27e-6 # Pa s, viscosity of air method = method.lower() if method == "rashid": radius = (0.619 * 9 * pi * eta * dm**2) / \ (_np.sqrt(2) * rho * kB * T0) * (Pressure/Gamma0) m_air = 4.81e-26 # molecular mass of air is 28.97 g/mol and Avogadro's Number 6.0221409^23 if method == "chang": vbar = (8*kB*T0/(pi*m_air))**0.5 radius = 16/(rho*pi*vbar)*(Pressure/Gamma0)/4 # CORRECTION FACTOR OF 4 APPLIED!!!! # see section 4.1.1 of Muddassar Rashid's 2016 Thesis for # derivation of this # see also page 132 of Jan Giesler's Thesis err_radius = radius * \ _np.sqrt(((PressureErr * Pressure) / Pressure) ** 2 + (Gamma0Err / Gamma0)**2) mass = rho * ((4 * pi * radius**3) / 3) err_mass = mass * 3 * err_radius / radius conversionFactor = _np.sqrt(A * mass / (4 * kB * T0 * Gamma0)) err_conversionFactor = conversionFactor * \ _np.sqrt((AErr / A)**2 + (err_mass / mass) ** 2 + (Gamma0Err / Gamma0)**2) return [radius, mass, conversionFactor], [err_radius, err_mass, err_conversionFactor]
def function[extract_parameters, parameter[Pressure, PressureErr, A, AErr, Gamma0, Gamma0Err, method]]: constant[ Calculates the radius, mass and conversion factor and thier uncertainties. For values to be correct data must have been taken with feedback off and at pressures of around 1mbar (this is because the equations assume harmonic motion and at lower pressures the uncooled particle experiences anharmonic motion (due to exploring furthur outside the middle of the trap). When cooled the value of Gamma (the damping) is a combination of the enviromental damping and feedback damping and so is not the correct value for use in this equation (as it requires the enviromental damping). Environmental damping can be predicted though as A=const*Gamma0. By fitting to 1mbar data one can find the value of the const and therefore Gamma0 = A/const Parameters ---------- Pressure : float Pressure in mbar when the data was taken PressureErr : float Error in the Pressure as a decimal (e.g. 15% error is 0.15) A : float Fitting constant A A = γ**2*2*Γ_0*(K_b*T_0)/(π*m) where: γ = conversionFactor Γ_0 = Damping factor due to environment π = pi AErr : float Error in Fitting constant A Gamma0 : float The enviromental damping factor Gamma_0 = Γ_0 Gamma0Err : float The error in the enviromental damping factor Gamma_0 = Γ_0 Returns: Params : list [radius, mass, conversionFactor] The extracted parameters ParamsError : list [radiusError, massError, conversionFactorError] The error in the extracted parameters ] variable[Pressure] assign[=] binary_operation[constant[100] * name[Pressure]] variable[rho] assign[=] constant[1800] variable[dm] assign[=] constant[3.72e-10] variable[T0] assign[=] constant[300] variable[kB] assign[=] name[Boltzmann] variable[eta] assign[=] constant[1.827e-05] variable[method] assign[=] call[name[method].lower, parameter[]] if compare[name[method] equal[==] constant[rashid]] begin[:] variable[radius] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[0.619] * constant[9]] * name[pi]] * name[eta]] * binary_operation[name[dm] ** constant[2]]] / binary_operation[binary_operation[binary_operation[call[name[_np].sqrt, parameter[constant[2]]] * name[rho]] * name[kB]] * name[T0]]] * binary_operation[name[Pressure] / name[Gamma0]]] variable[m_air] assign[=] constant[4.81e-26] if compare[name[method] equal[==] constant[chang]] begin[:] variable[vbar] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[8] * name[kB]] * name[T0]] / binary_operation[name[pi] * name[m_air]]] ** constant[0.5]] variable[radius] assign[=] binary_operation[binary_operation[binary_operation[constant[16] / binary_operation[binary_operation[name[rho] * name[pi]] * name[vbar]]] * binary_operation[name[Pressure] / name[Gamma0]]] / constant[4]] variable[err_radius] assign[=] binary_operation[name[radius] * call[name[_np].sqrt, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[PressureErr] * name[Pressure]] / name[Pressure]] ** constant[2]] + binary_operation[binary_operation[name[Gamma0Err] / name[Gamma0]] ** constant[2]]]]]] variable[mass] assign[=] binary_operation[name[rho] * binary_operation[binary_operation[binary_operation[constant[4] * name[pi]] * binary_operation[name[radius] ** constant[3]]] / constant[3]]] variable[err_mass] assign[=] binary_operation[binary_operation[binary_operation[name[mass] * constant[3]] * name[err_radius]] / name[radius]] variable[conversionFactor] assign[=] call[name[_np].sqrt, parameter[binary_operation[binary_operation[name[A] * name[mass]] / binary_operation[binary_operation[binary_operation[constant[4] * name[kB]] * name[T0]] * name[Gamma0]]]]] variable[err_conversionFactor] assign[=] binary_operation[name[conversionFactor] * call[name[_np].sqrt, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[AErr] / name[A]] ** constant[2]] + binary_operation[binary_operation[name[err_mass] / name[mass]] ** constant[2]]] + binary_operation[binary_operation[name[Gamma0Err] / name[Gamma0]] ** constant[2]]]]]] return[tuple[[<ast.List object at 0x7da1b28fbfd0>, <ast.List object at 0x7da1b28fbf10>]]]
keyword[def] identifier[extract_parameters] ( identifier[Pressure] , identifier[PressureErr] , identifier[A] , identifier[AErr] , identifier[Gamma0] , identifier[Gamma0Err] , identifier[method] = literal[string] ): literal[string] identifier[Pressure] = literal[int] * identifier[Pressure] identifier[rho] = literal[int] identifier[dm] = literal[int] identifier[T0] = literal[int] identifier[kB] = identifier[Boltzmann] identifier[eta] = literal[int] identifier[method] = identifier[method] . identifier[lower] () keyword[if] identifier[method] == literal[string] : identifier[radius] =( literal[int] * literal[int] * identifier[pi] * identifier[eta] * identifier[dm] ** literal[int] )/( identifier[_np] . identifier[sqrt] ( literal[int] )* identifier[rho] * identifier[kB] * identifier[T0] )*( identifier[Pressure] / identifier[Gamma0] ) identifier[m_air] = literal[int] keyword[if] identifier[method] == literal[string] : identifier[vbar] =( literal[int] * identifier[kB] * identifier[T0] /( identifier[pi] * identifier[m_air] ))** literal[int] identifier[radius] = literal[int] /( identifier[rho] * identifier[pi] * identifier[vbar] )*( identifier[Pressure] / identifier[Gamma0] )/ literal[int] identifier[err_radius] = identifier[radius] * identifier[_np] . identifier[sqrt] ((( identifier[PressureErr] * identifier[Pressure] )/ identifier[Pressure] ) ** literal[int] +( identifier[Gamma0Err] / identifier[Gamma0] )** literal[int] ) identifier[mass] = identifier[rho] *(( literal[int] * identifier[pi] * identifier[radius] ** literal[int] )/ literal[int] ) identifier[err_mass] = identifier[mass] * literal[int] * identifier[err_radius] / identifier[radius] identifier[conversionFactor] = identifier[_np] . identifier[sqrt] ( identifier[A] * identifier[mass] /( literal[int] * identifier[kB] * identifier[T0] * identifier[Gamma0] )) identifier[err_conversionFactor] = identifier[conversionFactor] * identifier[_np] . identifier[sqrt] (( identifier[AErr] / identifier[A] )** literal[int] +( identifier[err_mass] / identifier[mass] ) ** literal[int] +( identifier[Gamma0Err] / identifier[Gamma0] )** literal[int] ) keyword[return] [ identifier[radius] , identifier[mass] , identifier[conversionFactor] ],[ identifier[err_radius] , identifier[err_mass] , identifier[err_conversionFactor] ]
def extract_parameters(Pressure, PressureErr, A, AErr, Gamma0, Gamma0Err, method='chang'): """ Calculates the radius, mass and conversion factor and thier uncertainties. For values to be correct data must have been taken with feedback off and at pressures of around 1mbar (this is because the equations assume harmonic motion and at lower pressures the uncooled particle experiences anharmonic motion (due to exploring furthur outside the middle of the trap). When cooled the value of Gamma (the damping) is a combination of the enviromental damping and feedback damping and so is not the correct value for use in this equation (as it requires the enviromental damping). Environmental damping can be predicted though as A=const*Gamma0. By fitting to 1mbar data one can find the value of the const and therefore Gamma0 = A/const Parameters ---------- Pressure : float Pressure in mbar when the data was taken PressureErr : float Error in the Pressure as a decimal (e.g. 15% error is 0.15) A : float Fitting constant A A = γ**2*2*Γ_0*(K_b*T_0)/(π*m) where: γ = conversionFactor Γ_0 = Damping factor due to environment π = pi AErr : float Error in Fitting constant A Gamma0 : float The enviromental damping factor Gamma_0 = Γ_0 Gamma0Err : float The error in the enviromental damping factor Gamma_0 = Γ_0 Returns: Params : list [radius, mass, conversionFactor] The extracted parameters ParamsError : list [radiusError, massError, conversionFactorError] The error in the extracted parameters """ Pressure = 100 * Pressure # conversion to Pascals rho = 1800 # as quoted by Microspheres and Nanospheres # kgm^3 dm = 3.72e-10 # m O'Hanlon, 2003 T0 = 300 # kelvin kB = Boltzmann # m^2 kg s^-2 K-1 eta = 1.827e-05 # Pa s, viscosity of air method = method.lower() if method == 'rashid': radius = 0.619 * 9 * pi * eta * dm ** 2 / (_np.sqrt(2) * rho * kB * T0) * (Pressure / Gamma0) # depends on [control=['if'], data=[]] m_air = 4.81e-26 # molecular mass of air is 28.97 g/mol and Avogadro's Number 6.0221409^23 if method == 'chang': vbar = (8 * kB * T0 / (pi * m_air)) ** 0.5 radius = 16 / (rho * pi * vbar) * (Pressure / Gamma0) / 4 # CORRECTION FACTOR OF 4 APPLIED!!!! # depends on [control=['if'], data=[]] # see section 4.1.1 of Muddassar Rashid's 2016 Thesis for # derivation of this # see also page 132 of Jan Giesler's Thesis err_radius = radius * _np.sqrt((PressureErr * Pressure / Pressure) ** 2 + (Gamma0Err / Gamma0) ** 2) mass = rho * (4 * pi * radius ** 3 / 3) err_mass = mass * 3 * err_radius / radius conversionFactor = _np.sqrt(A * mass / (4 * kB * T0 * Gamma0)) err_conversionFactor = conversionFactor * _np.sqrt((AErr / A) ** 2 + (err_mass / mass) ** 2 + (Gamma0Err / Gamma0) ** 2) return ([radius, mass, conversionFactor], [err_radius, err_mass, err_conversionFactor])
def merge_leaderboards(self, destination, keys, aggregate='SUM'): ''' Merge leaderboards given by keys with this leaderboard into a named destination leaderboard. @param destination [String] Destination leaderboard name. @param keys [Array] Leaderboards to be merged with the current leaderboard. @param options [Hash] Options for merging the leaderboards. ''' keys.insert(0, self.leaderboard_name) self.redis_connection.zunionstore(destination, keys, aggregate)
def function[merge_leaderboards, parameter[self, destination, keys, aggregate]]: constant[ Merge leaderboards given by keys with this leaderboard into a named destination leaderboard. @param destination [String] Destination leaderboard name. @param keys [Array] Leaderboards to be merged with the current leaderboard. @param options [Hash] Options for merging the leaderboards. ] call[name[keys].insert, parameter[constant[0], name[self].leaderboard_name]] call[name[self].redis_connection.zunionstore, parameter[name[destination], name[keys], name[aggregate]]]
keyword[def] identifier[merge_leaderboards] ( identifier[self] , identifier[destination] , identifier[keys] , identifier[aggregate] = literal[string] ): literal[string] identifier[keys] . identifier[insert] ( literal[int] , identifier[self] . identifier[leaderboard_name] ) identifier[self] . identifier[redis_connection] . identifier[zunionstore] ( identifier[destination] , identifier[keys] , identifier[aggregate] )
def merge_leaderboards(self, destination, keys, aggregate='SUM'): """ Merge leaderboards given by keys with this leaderboard into a named destination leaderboard. @param destination [String] Destination leaderboard name. @param keys [Array] Leaderboards to be merged with the current leaderboard. @param options [Hash] Options for merging the leaderboards. """ keys.insert(0, self.leaderboard_name) self.redis_connection.zunionstore(destination, keys, aggregate)
def read_packet(self, timeout=3.0): """read one packet, timeout if one packet is not available in the timeout period""" try: return self.queue.get(timeout=timeout) except Empty: raise InternalTimeoutError("Timeout waiting for packet in AsyncPacketBuffer")
def function[read_packet, parameter[self, timeout]]: constant[read one packet, timeout if one packet is not available in the timeout period] <ast.Try object at 0x7da204621fc0>
keyword[def] identifier[read_packet] ( identifier[self] , identifier[timeout] = literal[int] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[queue] . identifier[get] ( identifier[timeout] = identifier[timeout] ) keyword[except] identifier[Empty] : keyword[raise] identifier[InternalTimeoutError] ( literal[string] )
def read_packet(self, timeout=3.0): """read one packet, timeout if one packet is not available in the timeout period""" try: return self.queue.get(timeout=timeout) # depends on [control=['try'], data=[]] except Empty: raise InternalTimeoutError('Timeout waiting for packet in AsyncPacketBuffer') # depends on [control=['except'], data=[]]
def parse_reaction_table_file(path, f, default_compartment): """Parse a tab-separated file containing reaction IDs and properties The reaction properties are parsed according to the header which specifies which property is contained in each column. """ context = FilePathContext(path) for lineno, row in enumerate(csv.DictReader(f, delimiter=str('\t'))): if 'id' not in row or row['id'].strip() == '': raise ParseError('Expected `id` column in table') props = {key: value for key, value in iteritems(row) if value != ''} if 'equation' in props: props['equation'] = parse_reaction_equation_string( props['equation'], default_compartment) mark = FileMark(context, lineno + 2, 0) yield ReactionEntry(props, mark)
def function[parse_reaction_table_file, parameter[path, f, default_compartment]]: constant[Parse a tab-separated file containing reaction IDs and properties The reaction properties are parsed according to the header which specifies which property is contained in each column. ] variable[context] assign[=] call[name[FilePathContext], parameter[name[path]]] for taget[tuple[[<ast.Name object at 0x7da1b254d480>, <ast.Name object at 0x7da1b254eb00>]]] in starred[call[name[enumerate], parameter[call[name[csv].DictReader, parameter[name[f]]]]]] begin[:] if <ast.BoolOp object at 0x7da1b254f700> begin[:] <ast.Raise object at 0x7da1b254f100> variable[props] assign[=] <ast.DictComp object at 0x7da1b254eaa0> if compare[constant[equation] in name[props]] begin[:] call[name[props]][constant[equation]] assign[=] call[name[parse_reaction_equation_string], parameter[call[name[props]][constant[equation]], name[default_compartment]]] variable[mark] assign[=] call[name[FileMark], parameter[name[context], binary_operation[name[lineno] + constant[2]], constant[0]]] <ast.Yield object at 0x7da1b254fac0>
keyword[def] identifier[parse_reaction_table_file] ( identifier[path] , identifier[f] , identifier[default_compartment] ): literal[string] identifier[context] = identifier[FilePathContext] ( identifier[path] ) keyword[for] identifier[lineno] , identifier[row] keyword[in] identifier[enumerate] ( identifier[csv] . identifier[DictReader] ( identifier[f] , identifier[delimiter] = identifier[str] ( literal[string] ))): keyword[if] literal[string] keyword[not] keyword[in] identifier[row] keyword[or] identifier[row] [ literal[string] ]. identifier[strip] ()== literal[string] : keyword[raise] identifier[ParseError] ( literal[string] ) identifier[props] ={ identifier[key] : identifier[value] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[row] ) keyword[if] identifier[value] != literal[string] } keyword[if] literal[string] keyword[in] identifier[props] : identifier[props] [ literal[string] ]= identifier[parse_reaction_equation_string] ( identifier[props] [ literal[string] ], identifier[default_compartment] ) identifier[mark] = identifier[FileMark] ( identifier[context] , identifier[lineno] + literal[int] , literal[int] ) keyword[yield] identifier[ReactionEntry] ( identifier[props] , identifier[mark] )
def parse_reaction_table_file(path, f, default_compartment): """Parse a tab-separated file containing reaction IDs and properties The reaction properties are parsed according to the header which specifies which property is contained in each column. """ context = FilePathContext(path) for (lineno, row) in enumerate(csv.DictReader(f, delimiter=str('\t'))): if 'id' not in row or row['id'].strip() == '': raise ParseError('Expected `id` column in table') # depends on [control=['if'], data=[]] props = {key: value for (key, value) in iteritems(row) if value != ''} if 'equation' in props: props['equation'] = parse_reaction_equation_string(props['equation'], default_compartment) # depends on [control=['if'], data=['props']] mark = FileMark(context, lineno + 2, 0) yield ReactionEntry(props, mark) # depends on [control=['for'], data=[]]
def ipv4_reassembly(packet, *, count=NotImplemented): """Make data for IPv4 reassembly.""" ipv4 = getattr(packet, 'ip', None) if ipv4 is not None: if ipv4.df: # dismiss not fragmented packet return False, None data = dict( bufid=( ipaddress.ip_address(ipv4.src), # source IP address ipaddress.ip_address(ipv4.dst), # destination IP address ipv4.id, # identification TP_PROTO.get(ipv4.p).name, # payload protocol type ), num=count, # original packet range number fo=ipv4.off, # fragment offset ihl=ipv4.__hdr_len__, # internet header length mf=bool(ipv4.mf), # more fragment flag tl=ipv4.len, # total length, header includes header=bytearray(ipv4.pack()[:ipv4.__hdr_len__]), # raw bytearray type header payload=bytearray(ipv4.pack()[ipv4.__hdr_len__:]), # raw bytearray type payload ) return True, data return False, None
def function[ipv4_reassembly, parameter[packet]]: constant[Make data for IPv4 reassembly.] variable[ipv4] assign[=] call[name[getattr], parameter[name[packet], constant[ip], constant[None]]] if compare[name[ipv4] is_not constant[None]] begin[:] if name[ipv4].df begin[:] return[tuple[[<ast.Constant object at 0x7da1b07d35e0>, <ast.Constant object at 0x7da1b07d1120>]]] variable[data] assign[=] call[name[dict], parameter[]] return[tuple[[<ast.Constant object at 0x7da1b07d2fe0>, <ast.Name object at 0x7da1b07d38e0>]]] return[tuple[[<ast.Constant object at 0x7da1b07d0550>, <ast.Constant object at 0x7da1b07d15a0>]]]
keyword[def] identifier[ipv4_reassembly] ( identifier[packet] ,*, identifier[count] = identifier[NotImplemented] ): literal[string] identifier[ipv4] = identifier[getattr] ( identifier[packet] , literal[string] , keyword[None] ) keyword[if] identifier[ipv4] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[ipv4] . identifier[df] : keyword[return] keyword[False] , keyword[None] identifier[data] = identifier[dict] ( identifier[bufid] =( identifier[ipaddress] . identifier[ip_address] ( identifier[ipv4] . identifier[src] ), identifier[ipaddress] . identifier[ip_address] ( identifier[ipv4] . identifier[dst] ), identifier[ipv4] . identifier[id] , identifier[TP_PROTO] . identifier[get] ( identifier[ipv4] . identifier[p] ). identifier[name] , ), identifier[num] = identifier[count] , identifier[fo] = identifier[ipv4] . identifier[off] , identifier[ihl] = identifier[ipv4] . identifier[__hdr_len__] , identifier[mf] = identifier[bool] ( identifier[ipv4] . identifier[mf] ), identifier[tl] = identifier[ipv4] . identifier[len] , identifier[header] = identifier[bytearray] ( identifier[ipv4] . identifier[pack] ()[: identifier[ipv4] . identifier[__hdr_len__] ]), identifier[payload] = identifier[bytearray] ( identifier[ipv4] . identifier[pack] ()[ identifier[ipv4] . identifier[__hdr_len__] :]), ) keyword[return] keyword[True] , identifier[data] keyword[return] keyword[False] , keyword[None]
def ipv4_reassembly(packet, *, count=NotImplemented): """Make data for IPv4 reassembly.""" ipv4 = getattr(packet, 'ip', None) if ipv4 is not None: if ipv4.df: # dismiss not fragmented packet return (False, None) # depends on [control=['if'], data=[]] # source IP address # destination IP address # identification # payload protocol type # original packet range number # fragment offset # internet header length # more fragment flag # total length, header includes # raw bytearray type header # raw bytearray type payload data = dict(bufid=(ipaddress.ip_address(ipv4.src), ipaddress.ip_address(ipv4.dst), ipv4.id, TP_PROTO.get(ipv4.p).name), num=count, fo=ipv4.off, ihl=ipv4.__hdr_len__, mf=bool(ipv4.mf), tl=ipv4.len, header=bytearray(ipv4.pack()[:ipv4.__hdr_len__]), payload=bytearray(ipv4.pack()[ipv4.__hdr_len__:])) return (True, data) # depends on [control=['if'], data=['ipv4']] return (False, None)
def read_coils(slave_id, starting_address, quantity): """ Return ADU for Modbus function code 01: Read Coils. :param slave_id: Number of slave. :return: Byte array with ADU. """ function = ReadCoils() function.starting_address = starting_address function.quantity = quantity return _create_request_adu(slave_id, function.request_pdu)
def function[read_coils, parameter[slave_id, starting_address, quantity]]: constant[ Return ADU for Modbus function code 01: Read Coils. :param slave_id: Number of slave. :return: Byte array with ADU. ] variable[function] assign[=] call[name[ReadCoils], parameter[]] name[function].starting_address assign[=] name[starting_address] name[function].quantity assign[=] name[quantity] return[call[name[_create_request_adu], parameter[name[slave_id], name[function].request_pdu]]]
keyword[def] identifier[read_coils] ( identifier[slave_id] , identifier[starting_address] , identifier[quantity] ): literal[string] identifier[function] = identifier[ReadCoils] () identifier[function] . identifier[starting_address] = identifier[starting_address] identifier[function] . identifier[quantity] = identifier[quantity] keyword[return] identifier[_create_request_adu] ( identifier[slave_id] , identifier[function] . identifier[request_pdu] )
def read_coils(slave_id, starting_address, quantity): """ Return ADU for Modbus function code 01: Read Coils. :param slave_id: Number of slave. :return: Byte array with ADU. """ function = ReadCoils() function.starting_address = starting_address function.quantity = quantity return _create_request_adu(slave_id, function.request_pdu)
def has_content_in(page, language): """Fitler that return ``True`` if the page has any content in a particular language. :param page: the current page :param language: the language you want to look at """ if page is None: return False return Content.objects.filter(page=page, language=language).count() > 0
def function[has_content_in, parameter[page, language]]: constant[Fitler that return ``True`` if the page has any content in a particular language. :param page: the current page :param language: the language you want to look at ] if compare[name[page] is constant[None]] begin[:] return[constant[False]] return[compare[call[call[name[Content].objects.filter, parameter[]].count, parameter[]] greater[>] constant[0]]]
keyword[def] identifier[has_content_in] ( identifier[page] , identifier[language] ): literal[string] keyword[if] identifier[page] keyword[is] keyword[None] : keyword[return] keyword[False] keyword[return] identifier[Content] . identifier[objects] . identifier[filter] ( identifier[page] = identifier[page] , identifier[language] = identifier[language] ). identifier[count] ()> literal[int]
def has_content_in(page, language): """Fitler that return ``True`` if the page has any content in a particular language. :param page: the current page :param language: the language you want to look at """ if page is None: return False # depends on [control=['if'], data=[]] return Content.objects.filter(page=page, language=language).count() > 0
def _saturation(color, **kwargs): """ Get saturation value of HSL color. """ s = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[2] return NumberValue((s * 100, '%'))
def function[_saturation, parameter[color]]: constant[ Get saturation value of HSL color. ] variable[s] assign[=] call[call[name[colorsys].rgb_to_hls, parameter[<ast.Starred object at 0x7da1b27ed6c0>]]][constant[2]] return[call[name[NumberValue], parameter[tuple[[<ast.BinOp object at 0x7da1b27eda80>, <ast.Constant object at 0x7da1b27eceb0>]]]]]
keyword[def] identifier[_saturation] ( identifier[color] ,** identifier[kwargs] ): literal[string] identifier[s] = identifier[colorsys] . identifier[rgb_to_hls] (*[ identifier[x] / literal[int] keyword[for] identifier[x] keyword[in] identifier[color] . identifier[value] [: literal[int] ]])[ literal[int] ] keyword[return] identifier[NumberValue] (( identifier[s] * literal[int] , literal[string] ))
def _saturation(color, **kwargs): """ Get saturation value of HSL color. """ s = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[2] return NumberValue((s * 100, '%'))
def template_exception_handler(fn, error_context, filename=None): """Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.""" error_message = None if filename: error_context.update(filename=filename) try: return fn() except jinja2.TemplateSyntaxError as exc: error_context.update(filename=exc.filename, line_no=exc.lineno) error_message = exc.message except jinja2.TemplateError as exc: error_message = exc.message except Exception as exc: error_message = "%s" % exc raise TemplateError(message=error_message, context=error_context)
def function[template_exception_handler, parameter[fn, error_context, filename]]: constant[Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.] variable[error_message] assign[=] constant[None] if name[filename] begin[:] call[name[error_context].update, parameter[]] <ast.Try object at 0x7da1b1228760> <ast.Raise object at 0x7da1b124e920>
keyword[def] identifier[template_exception_handler] ( identifier[fn] , identifier[error_context] , identifier[filename] = keyword[None] ): literal[string] identifier[error_message] = keyword[None] keyword[if] identifier[filename] : identifier[error_context] . identifier[update] ( identifier[filename] = identifier[filename] ) keyword[try] : keyword[return] identifier[fn] () keyword[except] identifier[jinja2] . identifier[TemplateSyntaxError] keyword[as] identifier[exc] : identifier[error_context] . identifier[update] ( identifier[filename] = identifier[exc] . identifier[filename] , identifier[line_no] = identifier[exc] . identifier[lineno] ) identifier[error_message] = identifier[exc] . identifier[message] keyword[except] identifier[jinja2] . identifier[TemplateError] keyword[as] identifier[exc] : identifier[error_message] = identifier[exc] . identifier[message] keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[error_message] = literal[string] % identifier[exc] keyword[raise] identifier[TemplateError] ( identifier[message] = identifier[error_message] , identifier[context] = identifier[error_context] )
def template_exception_handler(fn, error_context, filename=None): """Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.""" error_message = None if filename: error_context.update(filename=filename) # depends on [control=['if'], data=[]] try: return fn() # depends on [control=['try'], data=[]] except jinja2.TemplateSyntaxError as exc: error_context.update(filename=exc.filename, line_no=exc.lineno) error_message = exc.message # depends on [control=['except'], data=['exc']] except jinja2.TemplateError as exc: error_message = exc.message # depends on [control=['except'], data=['exc']] except Exception as exc: error_message = '%s' % exc # depends on [control=['except'], data=['exc']] raise TemplateError(message=error_message, context=error_context)
def find_n50(self): """ Calculate the N50 for each strain. N50 is defined as the largest contig such that at least half of the total genome size is contained in contigs equal to or larger than this contig """ for sample in self.metadata: # Initialise the N50 attribute in case there is no assembly, and the attribute is not created in the loop sample[self.analysistype].n50 = '-' # Initialise a variable to store a running total of contig lengths currentlength = 0 for contig_length in sample[self.analysistype].contig_lengths: # Increment the current length with the length of the current contig currentlength += contig_length # If the current length is now greater than the total genome / 2, the current contig length is the N50 if currentlength >= sample[self.analysistype].genome_length * 0.5: # Populate the dictionary, and break the loop sample[self.analysistype].n50 = contig_length break
def function[find_n50, parameter[self]]: constant[ Calculate the N50 for each strain. N50 is defined as the largest contig such that at least half of the total genome size is contained in contigs equal to or larger than this contig ] for taget[name[sample]] in starred[name[self].metadata] begin[:] call[name[sample]][name[self].analysistype].n50 assign[=] constant[-] variable[currentlength] assign[=] constant[0] for taget[name[contig_length]] in starred[call[name[sample]][name[self].analysistype].contig_lengths] begin[:] <ast.AugAssign object at 0x7da1b1ec90f0> if compare[name[currentlength] greater_or_equal[>=] binary_operation[call[name[sample]][name[self].analysistype].genome_length * constant[0.5]]] begin[:] call[name[sample]][name[self].analysistype].n50 assign[=] name[contig_length] break
keyword[def] identifier[find_n50] ( identifier[self] ): literal[string] keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[metadata] : identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[n50] = literal[string] identifier[currentlength] = literal[int] keyword[for] identifier[contig_length] keyword[in] identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[contig_lengths] : identifier[currentlength] += identifier[contig_length] keyword[if] identifier[currentlength] >= identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[genome_length] * literal[int] : identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[n50] = identifier[contig_length] keyword[break]
def find_n50(self): """ Calculate the N50 for each strain. N50 is defined as the largest contig such that at least half of the total genome size is contained in contigs equal to or larger than this contig """ for sample in self.metadata: # Initialise the N50 attribute in case there is no assembly, and the attribute is not created in the loop sample[self.analysistype].n50 = '-' # Initialise a variable to store a running total of contig lengths currentlength = 0 for contig_length in sample[self.analysistype].contig_lengths: # Increment the current length with the length of the current contig currentlength += contig_length # If the current length is now greater than the total genome / 2, the current contig length is the N50 if currentlength >= sample[self.analysistype].genome_length * 0.5: # Populate the dictionary, and break the loop sample[self.analysistype].n50 = contig_length break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['contig_length']] # depends on [control=['for'], data=['sample']]
def prepend_rez_path(self): """Prepend rez path to $PATH.""" if system.rez_bin_path: self.env.PATH.prepend(system.rez_bin_path)
def function[prepend_rez_path, parameter[self]]: constant[Prepend rez path to $PATH.] if name[system].rez_bin_path begin[:] call[name[self].env.PATH.prepend, parameter[name[system].rez_bin_path]]
keyword[def] identifier[prepend_rez_path] ( identifier[self] ): literal[string] keyword[if] identifier[system] . identifier[rez_bin_path] : identifier[self] . identifier[env] . identifier[PATH] . identifier[prepend] ( identifier[system] . identifier[rez_bin_path] )
def prepend_rez_path(self): """Prepend rez path to $PATH.""" if system.rez_bin_path: self.env.PATH.prepend(system.rez_bin_path) # depends on [control=['if'], data=[]]
def update(self, iterable): """ Update bag with all elements in iterable. >>> s = pbag([1]) >>> s.update([1, 2]) pbag([1, 1, 2]) """ if iterable: return PBag(reduce(_add_to_counters, iterable, self._counts)) return self
def function[update, parameter[self, iterable]]: constant[ Update bag with all elements in iterable. >>> s = pbag([1]) >>> s.update([1, 2]) pbag([1, 1, 2]) ] if name[iterable] begin[:] return[call[name[PBag], parameter[call[name[reduce], parameter[name[_add_to_counters], name[iterable], name[self]._counts]]]]] return[name[self]]
keyword[def] identifier[update] ( identifier[self] , identifier[iterable] ): literal[string] keyword[if] identifier[iterable] : keyword[return] identifier[PBag] ( identifier[reduce] ( identifier[_add_to_counters] , identifier[iterable] , identifier[self] . identifier[_counts] )) keyword[return] identifier[self]
def update(self, iterable): """ Update bag with all elements in iterable. >>> s = pbag([1]) >>> s.update([1, 2]) pbag([1, 1, 2]) """ if iterable: return PBag(reduce(_add_to_counters, iterable, self._counts)) # depends on [control=['if'], data=[]] return self
def rsa_pkcs1v15_sign(private_key, data, hash_algorithm): """ Generates an RSASSA-PKCS-v1.5 signature. When the hash_algorithm is "raw", the operation is identical to RSA private key encryption. That is: the data is not hashed and no ASN.1 structure with an algorithm identifier of the hash algorithm is placed in the encrypted byte string. :param private_key: The PrivateKey to generate the signature with :param data: A byte string of the data the signature is for :param hash_algorithm: A unicode string of "md5", "sha1", "sha256", "sha384", "sha512" or "raw" :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A byte string of the signature """ if private_key.algorithm != 'rsa': raise ValueError('The key specified is not an RSA private key') return _sign(private_key, data, hash_algorithm)
def function[rsa_pkcs1v15_sign, parameter[private_key, data, hash_algorithm]]: constant[ Generates an RSASSA-PKCS-v1.5 signature. When the hash_algorithm is "raw", the operation is identical to RSA private key encryption. That is: the data is not hashed and no ASN.1 structure with an algorithm identifier of the hash algorithm is placed in the encrypted byte string. :param private_key: The PrivateKey to generate the signature with :param data: A byte string of the data the signature is for :param hash_algorithm: A unicode string of "md5", "sha1", "sha256", "sha384", "sha512" or "raw" :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A byte string of the signature ] if compare[name[private_key].algorithm not_equal[!=] constant[rsa]] begin[:] <ast.Raise object at 0x7da1b0009180> return[call[name[_sign], parameter[name[private_key], name[data], name[hash_algorithm]]]]
keyword[def] identifier[rsa_pkcs1v15_sign] ( identifier[private_key] , identifier[data] , identifier[hash_algorithm] ): literal[string] keyword[if] identifier[private_key] . identifier[algorithm] != literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[_sign] ( identifier[private_key] , identifier[data] , identifier[hash_algorithm] )
def rsa_pkcs1v15_sign(private_key, data, hash_algorithm): """ Generates an RSASSA-PKCS-v1.5 signature. When the hash_algorithm is "raw", the operation is identical to RSA private key encryption. That is: the data is not hashed and no ASN.1 structure with an algorithm identifier of the hash algorithm is placed in the encrypted byte string. :param private_key: The PrivateKey to generate the signature with :param data: A byte string of the data the signature is for :param hash_algorithm: A unicode string of "md5", "sha1", "sha256", "sha384", "sha512" or "raw" :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A byte string of the signature """ if private_key.algorithm != 'rsa': raise ValueError('The key specified is not an RSA private key') # depends on [control=['if'], data=[]] return _sign(private_key, data, hash_algorithm)
def calculate_bottom_margin(self): """ Calculate the margin in pixels below the plot area, setting border_bottom. """ bb = 7 if self.key and self.key_position == 'bottom': bb += len(self.data) * (self.font_size + 5) bb += 10 if self.show_x_labels: max_x_label_height_px = self.x_label_font_size if self.rotate_x_labels: label_lengths = map(len, self.get_x_labels()) max_x_label_len = functools.reduce(max, label_lengths) max_x_label_height_px *= 0.6 * max_x_label_len bb += max_x_label_height_px if self.stagger_x_labels: bb += max_x_label_height_px + 10 if self.show_x_title: bb += self.x_title_font_size + 5 self.border_bottom = bb
def function[calculate_bottom_margin, parameter[self]]: constant[ Calculate the margin in pixels below the plot area, setting border_bottom. ] variable[bb] assign[=] constant[7] if <ast.BoolOp object at 0x7da1b032b4f0> begin[:] <ast.AugAssign object at 0x7da1b032b6a0> <ast.AugAssign object at 0x7da1b0217310> if name[self].show_x_labels begin[:] variable[max_x_label_height_px] assign[=] name[self].x_label_font_size if name[self].rotate_x_labels begin[:] variable[label_lengths] assign[=] call[name[map], parameter[name[len], call[name[self].get_x_labels, parameter[]]]] variable[max_x_label_len] assign[=] call[name[functools].reduce, parameter[name[max], name[label_lengths]]] <ast.AugAssign object at 0x7da1b0215e40> <ast.AugAssign object at 0x7da1b0216bf0> if name[self].stagger_x_labels begin[:] <ast.AugAssign object at 0x7da1b0217100> if name[self].show_x_title begin[:] <ast.AugAssign object at 0x7da1b02157b0> name[self].border_bottom assign[=] name[bb]
keyword[def] identifier[calculate_bottom_margin] ( identifier[self] ): literal[string] identifier[bb] = literal[int] keyword[if] identifier[self] . identifier[key] keyword[and] identifier[self] . identifier[key_position] == literal[string] : identifier[bb] += identifier[len] ( identifier[self] . identifier[data] )*( identifier[self] . identifier[font_size] + literal[int] ) identifier[bb] += literal[int] keyword[if] identifier[self] . identifier[show_x_labels] : identifier[max_x_label_height_px] = identifier[self] . identifier[x_label_font_size] keyword[if] identifier[self] . identifier[rotate_x_labels] : identifier[label_lengths] = identifier[map] ( identifier[len] , identifier[self] . identifier[get_x_labels] ()) identifier[max_x_label_len] = identifier[functools] . identifier[reduce] ( identifier[max] , identifier[label_lengths] ) identifier[max_x_label_height_px] *= literal[int] * identifier[max_x_label_len] identifier[bb] += identifier[max_x_label_height_px] keyword[if] identifier[self] . identifier[stagger_x_labels] : identifier[bb] += identifier[max_x_label_height_px] + literal[int] keyword[if] identifier[self] . identifier[show_x_title] : identifier[bb] += identifier[self] . identifier[x_title_font_size] + literal[int] identifier[self] . identifier[border_bottom] = identifier[bb]
def calculate_bottom_margin(self): """ Calculate the margin in pixels below the plot area, setting border_bottom. """ bb = 7 if self.key and self.key_position == 'bottom': bb += len(self.data) * (self.font_size + 5) bb += 10 # depends on [control=['if'], data=[]] if self.show_x_labels: max_x_label_height_px = self.x_label_font_size if self.rotate_x_labels: label_lengths = map(len, self.get_x_labels()) max_x_label_len = functools.reduce(max, label_lengths) max_x_label_height_px *= 0.6 * max_x_label_len # depends on [control=['if'], data=[]] bb += max_x_label_height_px if self.stagger_x_labels: bb += max_x_label_height_px + 10 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.show_x_title: bb += self.x_title_font_size + 5 # depends on [control=['if'], data=[]] self.border_bottom = bb
def properties(self): # type: () -> list """ Returns: (list[str]) List of public properties """ _type = type(self) return [_property for _property in dir(_type) if self._is_property(_property)]
def function[properties, parameter[self]]: constant[ Returns: (list[str]) List of public properties ] variable[_type] assign[=] call[name[type], parameter[name[self]]] return[<ast.ListComp object at 0x7da1b16475b0>]
keyword[def] identifier[properties] ( identifier[self] ): literal[string] identifier[_type] = identifier[type] ( identifier[self] ) keyword[return] [ identifier[_property] keyword[for] identifier[_property] keyword[in] identifier[dir] ( identifier[_type] ) keyword[if] identifier[self] . identifier[_is_property] ( identifier[_property] )]
def properties(self): # type: () -> list '\n Returns:\n (list[str]) List of public properties\n ' _type = type(self) return [_property for _property in dir(_type) if self._is_property(_property)]
def delete_items(self, url, container, container_object=None): """Deletes an objects in a container. :param url: :param container: """ headers, container_uri = self._return_base_data( url=url, container=container, container_object=container_object ) return self._deleter(uri=container_uri, headers=headers)
def function[delete_items, parameter[self, url, container, container_object]]: constant[Deletes an objects in a container. :param url: :param container: ] <ast.Tuple object at 0x7da1b27247c0> assign[=] call[name[self]._return_base_data, parameter[]] return[call[name[self]._deleter, parameter[]]]
keyword[def] identifier[delete_items] ( identifier[self] , identifier[url] , identifier[container] , identifier[container_object] = keyword[None] ): literal[string] identifier[headers] , identifier[container_uri] = identifier[self] . identifier[_return_base_data] ( identifier[url] = identifier[url] , identifier[container] = identifier[container] , identifier[container_object] = identifier[container_object] ) keyword[return] identifier[self] . identifier[_deleter] ( identifier[uri] = identifier[container_uri] , identifier[headers] = identifier[headers] )
def delete_items(self, url, container, container_object=None): """Deletes an objects in a container. :param url: :param container: """ (headers, container_uri) = self._return_base_data(url=url, container=container, container_object=container_object) return self._deleter(uri=container_uri, headers=headers)
def flow(self)->FlowField: "Access the flow-field grid after applying queued affine transforms." if self._flow is None: self._flow = _affine_grid(self.shape) if self._affine_mat is not None: self._flow = _affine_mult(self._flow,self._affine_mat) self._affine_mat = None return self._flow
def function[flow, parameter[self]]: constant[Access the flow-field grid after applying queued affine transforms.] if compare[name[self]._flow is constant[None]] begin[:] name[self]._flow assign[=] call[name[_affine_grid], parameter[name[self].shape]] if compare[name[self]._affine_mat is_not constant[None]] begin[:] name[self]._flow assign[=] call[name[_affine_mult], parameter[name[self]._flow, name[self]._affine_mat]] name[self]._affine_mat assign[=] constant[None] return[name[self]._flow]
keyword[def] identifier[flow] ( identifier[self] )-> identifier[FlowField] : literal[string] keyword[if] identifier[self] . identifier[_flow] keyword[is] keyword[None] : identifier[self] . identifier[_flow] = identifier[_affine_grid] ( identifier[self] . identifier[shape] ) keyword[if] identifier[self] . identifier[_affine_mat] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_flow] = identifier[_affine_mult] ( identifier[self] . identifier[_flow] , identifier[self] . identifier[_affine_mat] ) identifier[self] . identifier[_affine_mat] = keyword[None] keyword[return] identifier[self] . identifier[_flow]
def flow(self) -> FlowField: """Access the flow-field grid after applying queued affine transforms.""" if self._flow is None: self._flow = _affine_grid(self.shape) # depends on [control=['if'], data=[]] if self._affine_mat is not None: self._flow = _affine_mult(self._flow, self._affine_mat) self._affine_mat = None # depends on [control=['if'], data=[]] return self._flow
def datasets(dataset, node, ll=None, ur=None, start_date=None, end_date=None, api_key=None): """ This method is used to find datasets available for searching. By passing no parameters except node, all available datasets are returned. Additional parameters such as temporal range and spatial bounding box can be used to find datasets that provide more specific data. The dataset name parameter can be used to limit the results based on matching the supplied value against the dataset name with assumed wildcards at the beginning and end. All parameters are optional except for the 'node' parameter. :param dataset: Dataset Identifier :param ll: Lower left corner of an AOI bounding box - in decimal form Longitude/Latitude dictionary e.g. { "longitude": 0.0, "latitude": 0.0 } :param ur: Upper right corner of an AOI bounding box - in decimal form Longitude/Latitude dictionary e.g. { "longitude": 0.0, "latitude": 0.0 } :param start_date: Used for searching scene acquisition - will accept anything that the PHP strtotime function can understand :param end_date: Used for searching scene acquisition - will accept anything that the PHP strtotime function can understand :param node: The requested Catalog :param api_key: API key is not required. """ payload = { "node": node, "apiKey": api_key } if dataset: payload["datasetName"] = dataset if ll and ur: payload["lowerLeft"] = { "latitude": ll["latitude"], "longitude": ll["longitude"] } payload["upperRight"] = { "latitude": ur["latitude"], "longitude": ur["longitude"] } if start_date: payload["startDate"] = start_date if end_date: payload["endDate"] = end_date return json.dumps(payload)
def function[datasets, parameter[dataset, node, ll, ur, start_date, end_date, api_key]]: constant[ This method is used to find datasets available for searching. By passing no parameters except node, all available datasets are returned. Additional parameters such as temporal range and spatial bounding box can be used to find datasets that provide more specific data. The dataset name parameter can be used to limit the results based on matching the supplied value against the dataset name with assumed wildcards at the beginning and end. All parameters are optional except for the 'node' parameter. :param dataset: Dataset Identifier :param ll: Lower left corner of an AOI bounding box - in decimal form Longitude/Latitude dictionary e.g. { "longitude": 0.0, "latitude": 0.0 } :param ur: Upper right corner of an AOI bounding box - in decimal form Longitude/Latitude dictionary e.g. { "longitude": 0.0, "latitude": 0.0 } :param start_date: Used for searching scene acquisition - will accept anything that the PHP strtotime function can understand :param end_date: Used for searching scene acquisition - will accept anything that the PHP strtotime function can understand :param node: The requested Catalog :param api_key: API key is not required. ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b2726ad0>, <ast.Constant object at 0x7da1b27257e0>], [<ast.Name object at 0x7da1b2727760>, <ast.Name object at 0x7da1b27276d0>]] if name[dataset] begin[:] call[name[payload]][constant[datasetName]] assign[=] name[dataset] if <ast.BoolOp object at 0x7da1b2724ac0> begin[:] call[name[payload]][constant[lowerLeft]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2724ee0>, <ast.Constant object at 0x7da1b2727130>], [<ast.Subscript object at 0x7da1b2725360>, <ast.Subscript object at 0x7da1b2727400>]] call[name[payload]][constant[upperRight]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2725fc0>, <ast.Constant object at 0x7da1b27248b0>], [<ast.Subscript object at 0x7da1b27259c0>, <ast.Subscript object at 0x7da1b2724460>]] if name[start_date] begin[:] call[name[payload]][constant[startDate]] assign[=] name[start_date] if name[end_date] begin[:] call[name[payload]][constant[endDate]] assign[=] name[end_date] return[call[name[json].dumps, parameter[name[payload]]]]
keyword[def] identifier[datasets] ( identifier[dataset] , identifier[node] , identifier[ll] = keyword[None] , identifier[ur] = keyword[None] , identifier[start_date] = keyword[None] , identifier[end_date] = keyword[None] , identifier[api_key] = keyword[None] ): literal[string] identifier[payload] ={ literal[string] : identifier[node] , literal[string] : identifier[api_key] } keyword[if] identifier[dataset] : identifier[payload] [ literal[string] ]= identifier[dataset] keyword[if] identifier[ll] keyword[and] identifier[ur] : identifier[payload] [ literal[string] ]={ literal[string] : identifier[ll] [ literal[string] ], literal[string] : identifier[ll] [ literal[string] ] } identifier[payload] [ literal[string] ]={ literal[string] : identifier[ur] [ literal[string] ], literal[string] : identifier[ur] [ literal[string] ] } keyword[if] identifier[start_date] : identifier[payload] [ literal[string] ]= identifier[start_date] keyword[if] identifier[end_date] : identifier[payload] [ literal[string] ]= identifier[end_date] keyword[return] identifier[json] . identifier[dumps] ( identifier[payload] )
def datasets(dataset, node, ll=None, ur=None, start_date=None, end_date=None, api_key=None): """ This method is used to find datasets available for searching. By passing no parameters except node, all available datasets are returned. Additional parameters such as temporal range and spatial bounding box can be used to find datasets that provide more specific data. The dataset name parameter can be used to limit the results based on matching the supplied value against the dataset name with assumed wildcards at the beginning and end. All parameters are optional except for the 'node' parameter. :param dataset: Dataset Identifier :param ll: Lower left corner of an AOI bounding box - in decimal form Longitude/Latitude dictionary e.g. { "longitude": 0.0, "latitude": 0.0 } :param ur: Upper right corner of an AOI bounding box - in decimal form Longitude/Latitude dictionary e.g. { "longitude": 0.0, "latitude": 0.0 } :param start_date: Used for searching scene acquisition - will accept anything that the PHP strtotime function can understand :param end_date: Used for searching scene acquisition - will accept anything that the PHP strtotime function can understand :param node: The requested Catalog :param api_key: API key is not required. """ payload = {'node': node, 'apiKey': api_key} if dataset: payload['datasetName'] = dataset # depends on [control=['if'], data=[]] if ll and ur: payload['lowerLeft'] = {'latitude': ll['latitude'], 'longitude': ll['longitude']} payload['upperRight'] = {'latitude': ur['latitude'], 'longitude': ur['longitude']} # depends on [control=['if'], data=[]] if start_date: payload['startDate'] = start_date # depends on [control=['if'], data=[]] if end_date: payload['endDate'] = end_date # depends on [control=['if'], data=[]] return json.dumps(payload)
def convert_instancenorm(node, **kwargs): """Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) eps = float(attrs.get("eps", 0.001)) node = onnx.helper.make_node( 'InstanceNormalization', inputs=input_nodes, outputs=[name], name=name, epsilon=eps) return [node]
def function[convert_instancenorm, parameter[node]]: constant[Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node. ] <ast.Tuple object at 0x7da1b204d0c0> assign[=] call[name[get_inputs], parameter[name[node], name[kwargs]]] variable[eps] assign[=] call[name[float], parameter[call[name[attrs].get, parameter[constant[eps], constant[0.001]]]]] variable[node] assign[=] call[name[onnx].helper.make_node, parameter[constant[InstanceNormalization]]] return[list[[<ast.Name object at 0x7da1b204cac0>]]]
keyword[def] identifier[convert_instancenorm] ( identifier[node] ,** identifier[kwargs] ): literal[string] identifier[name] , identifier[input_nodes] , identifier[attrs] = identifier[get_inputs] ( identifier[node] , identifier[kwargs] ) identifier[eps] = identifier[float] ( identifier[attrs] . identifier[get] ( literal[string] , literal[int] )) identifier[node] = identifier[onnx] . identifier[helper] . identifier[make_node] ( literal[string] , identifier[inputs] = identifier[input_nodes] , identifier[outputs] =[ identifier[name] ], identifier[name] = identifier[name] , identifier[epsilon] = identifier[eps] ) keyword[return] [ identifier[node] ]
def convert_instancenorm(node, **kwargs): """Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node. """ (name, input_nodes, attrs) = get_inputs(node, kwargs) eps = float(attrs.get('eps', 0.001)) node = onnx.helper.make_node('InstanceNormalization', inputs=input_nodes, outputs=[name], name=name, epsilon=eps) return [node]
def iteritems(self, pipe=None): """Return an iterator over the dictionary's ``(key, value)`` pairs.""" pipe = self.redis if pipe is None else pipe for k, v in self._data(pipe).items(): yield k, self.cache.get(k, v)
def function[iteritems, parameter[self, pipe]]: constant[Return an iterator over the dictionary's ``(key, value)`` pairs.] variable[pipe] assign[=] <ast.IfExp object at 0x7da20c6c5a50> for taget[tuple[[<ast.Name object at 0x7da20c6c6050>, <ast.Name object at 0x7da20c6c7160>]]] in starred[call[call[name[self]._data, parameter[name[pipe]]].items, parameter[]]] begin[:] <ast.Yield object at 0x7da20c6c7400>
keyword[def] identifier[iteritems] ( identifier[self] , identifier[pipe] = keyword[None] ): literal[string] identifier[pipe] = identifier[self] . identifier[redis] keyword[if] identifier[pipe] keyword[is] keyword[None] keyword[else] identifier[pipe] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_data] ( identifier[pipe] ). identifier[items] (): keyword[yield] identifier[k] , identifier[self] . identifier[cache] . identifier[get] ( identifier[k] , identifier[v] )
def iteritems(self, pipe=None): """Return an iterator over the dictionary's ``(key, value)`` pairs.""" pipe = self.redis if pipe is None else pipe for (k, v) in self._data(pipe).items(): yield (k, self.cache.get(k, v)) # depends on [control=['for'], data=[]]
def as_pandas(self, with_metadata=False): """Return this as a pd.DataFrame Parameters ---------- with_metadata : bool, default False or dict if True, join data with all meta columns; if a dict, discover meaningful meta columns from values (in key-value) """ if with_metadata: cols = self._discover_meta_cols(**with_metadata) \ if isinstance(with_metadata, dict) else self.meta.columns return ( self.data .set_index(META_IDX) .join(self.meta[cols]) .reset_index() ) else: return self.data.copy()
def function[as_pandas, parameter[self, with_metadata]]: constant[Return this as a pd.DataFrame Parameters ---------- with_metadata : bool, default False or dict if True, join data with all meta columns; if a dict, discover meaningful meta columns from values (in key-value) ] if name[with_metadata] begin[:] variable[cols] assign[=] <ast.IfExp object at 0x7da2044c0f40> return[call[call[call[name[self].data.set_index, parameter[name[META_IDX]]].join, parameter[call[name[self].meta][name[cols]]]].reset_index, parameter[]]]
keyword[def] identifier[as_pandas] ( identifier[self] , identifier[with_metadata] = keyword[False] ): literal[string] keyword[if] identifier[with_metadata] : identifier[cols] = identifier[self] . identifier[_discover_meta_cols] (** identifier[with_metadata] ) keyword[if] identifier[isinstance] ( identifier[with_metadata] , identifier[dict] ) keyword[else] identifier[self] . identifier[meta] . identifier[columns] keyword[return] ( identifier[self] . identifier[data] . identifier[set_index] ( identifier[META_IDX] ) . identifier[join] ( identifier[self] . identifier[meta] [ identifier[cols] ]) . identifier[reset_index] () ) keyword[else] : keyword[return] identifier[self] . identifier[data] . identifier[copy] ()
def as_pandas(self, with_metadata=False): """Return this as a pd.DataFrame Parameters ---------- with_metadata : bool, default False or dict if True, join data with all meta columns; if a dict, discover meaningful meta columns from values (in key-value) """ if with_metadata: cols = self._discover_meta_cols(**with_metadata) if isinstance(with_metadata, dict) else self.meta.columns return self.data.set_index(META_IDX).join(self.meta[cols]).reset_index() # depends on [control=['if'], data=[]] else: return self.data.copy()
def constant(self, val, ty): """ Creates a constant as a VexValue :param val: The value, as an integer :param ty: The type of the resulting VexValue :return: a VexValue """ if isinstance(val, VexValue) and not isinstance(val, IRExpr): raise Exception('Constant cannot be made from VexValue or IRExpr') rdt = self.irsb_c.mkconst(val, ty) return VexValue(self.irsb_c, rdt)
def function[constant, parameter[self, val, ty]]: constant[ Creates a constant as a VexValue :param val: The value, as an integer :param ty: The type of the resulting VexValue :return: a VexValue ] if <ast.BoolOp object at 0x7da1b155f7c0> begin[:] <ast.Raise object at 0x7da1b155f250> variable[rdt] assign[=] call[name[self].irsb_c.mkconst, parameter[name[val], name[ty]]] return[call[name[VexValue], parameter[name[self].irsb_c, name[rdt]]]]
keyword[def] identifier[constant] ( identifier[self] , identifier[val] , identifier[ty] ): literal[string] keyword[if] identifier[isinstance] ( identifier[val] , identifier[VexValue] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[val] , identifier[IRExpr] ): keyword[raise] identifier[Exception] ( literal[string] ) identifier[rdt] = identifier[self] . identifier[irsb_c] . identifier[mkconst] ( identifier[val] , identifier[ty] ) keyword[return] identifier[VexValue] ( identifier[self] . identifier[irsb_c] , identifier[rdt] )
def constant(self, val, ty): """ Creates a constant as a VexValue :param val: The value, as an integer :param ty: The type of the resulting VexValue :return: a VexValue """ if isinstance(val, VexValue) and (not isinstance(val, IRExpr)): raise Exception('Constant cannot be made from VexValue or IRExpr') # depends on [control=['if'], data=[]] rdt = self.irsb_c.mkconst(val, ty) return VexValue(self.irsb_c, rdt)
def merge_from_master(git_action, doc_id, auth_info, parent_sha, doctype_display_name="document"): """merge from master into the WIP for this document/author this is needed to allow a worker's future saves to be merged seamlessly into master """ gh_user = get_user_author(auth_info)[0] acquire_lock_raise(git_action, fail_msg="Could not acquire lock to merge %s #%s" % (doctype_display_name, doc_id)) try: git_action.checkout_master() written_fp = git_action.path_for_doc(doc_id) if os.path.exists(written_fp): master_file_blob_sha = git_action.get_blob_sha_for_file(written_fp) else: raise GitWorkflowError('{t} "{i}" does not exist on master'.format(t=doctype_display_name, i=doc_id)) branch = git_action.create_or_checkout_branch(gh_user, doc_id, parent_sha) new_sha = git_action.merge('master', branch) finally: git_action.release_lock() # What other useful information should be returned on a successful write? return { "error": 0, "resource_id": doc_id, "branch_name": branch, "description": "Updated %s #%s" % (doctype_display_name, doc_id), "sha": new_sha, "merged_sha": master_file_blob_sha, }
def function[merge_from_master, parameter[git_action, doc_id, auth_info, parent_sha, doctype_display_name]]: constant[merge from master into the WIP for this document/author this is needed to allow a worker's future saves to be merged seamlessly into master ] variable[gh_user] assign[=] call[call[name[get_user_author], parameter[name[auth_info]]]][constant[0]] call[name[acquire_lock_raise], parameter[name[git_action]]] <ast.Try object at 0x7da20c6a80d0> return[dictionary[[<ast.Constant object at 0x7da20c6ab5b0>, <ast.Constant object at 0x7da20c6a8b20>, <ast.Constant object at 0x7da20c6aba30>, <ast.Constant object at 0x7da20c6a9cc0>, <ast.Constant object at 0x7da20c6ab8b0>, <ast.Constant object at 0x7da20c6a9c00>], [<ast.Constant object at 0x7da20c6a9420>, <ast.Name object at 0x7da20c6aabc0>, <ast.Name object at 0x7da20c6ab3d0>, <ast.BinOp object at 0x7da20c6aaf50>, <ast.Name object at 0x7da20c6a8040>, <ast.Name object at 0x7da20c6a8760>]]]
keyword[def] identifier[merge_from_master] ( identifier[git_action] , identifier[doc_id] , identifier[auth_info] , identifier[parent_sha] , identifier[doctype_display_name] = literal[string] ): literal[string] identifier[gh_user] = identifier[get_user_author] ( identifier[auth_info] )[ literal[int] ] identifier[acquire_lock_raise] ( identifier[git_action] , identifier[fail_msg] = literal[string] %( identifier[doctype_display_name] , identifier[doc_id] )) keyword[try] : identifier[git_action] . identifier[checkout_master] () identifier[written_fp] = identifier[git_action] . identifier[path_for_doc] ( identifier[doc_id] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[written_fp] ): identifier[master_file_blob_sha] = identifier[git_action] . identifier[get_blob_sha_for_file] ( identifier[written_fp] ) keyword[else] : keyword[raise] identifier[GitWorkflowError] ( literal[string] . identifier[format] ( identifier[t] = identifier[doctype_display_name] , identifier[i] = identifier[doc_id] )) identifier[branch] = identifier[git_action] . identifier[create_or_checkout_branch] ( identifier[gh_user] , identifier[doc_id] , identifier[parent_sha] ) identifier[new_sha] = identifier[git_action] . identifier[merge] ( literal[string] , identifier[branch] ) keyword[finally] : identifier[git_action] . identifier[release_lock] () keyword[return] { literal[string] : literal[int] , literal[string] : identifier[doc_id] , literal[string] : identifier[branch] , literal[string] : literal[string] %( identifier[doctype_display_name] , identifier[doc_id] ), literal[string] : identifier[new_sha] , literal[string] : identifier[master_file_blob_sha] , }
def merge_from_master(git_action, doc_id, auth_info, parent_sha, doctype_display_name='document'): """merge from master into the WIP for this document/author this is needed to allow a worker's future saves to be merged seamlessly into master """ gh_user = get_user_author(auth_info)[0] acquire_lock_raise(git_action, fail_msg='Could not acquire lock to merge %s #%s' % (doctype_display_name, doc_id)) try: git_action.checkout_master() written_fp = git_action.path_for_doc(doc_id) if os.path.exists(written_fp): master_file_blob_sha = git_action.get_blob_sha_for_file(written_fp) # depends on [control=['if'], data=[]] else: raise GitWorkflowError('{t} "{i}" does not exist on master'.format(t=doctype_display_name, i=doc_id)) branch = git_action.create_or_checkout_branch(gh_user, doc_id, parent_sha) new_sha = git_action.merge('master', branch) # depends on [control=['try'], data=[]] finally: git_action.release_lock() # What other useful information should be returned on a successful write? return {'error': 0, 'resource_id': doc_id, 'branch_name': branch, 'description': 'Updated %s #%s' % (doctype_display_name, doc_id), 'sha': new_sha, 'merged_sha': master_file_blob_sha}
def _read_pyMatch(fn, precursors): """ read pyMatch file and perform realignment of hits """ with open(fn) as handle: reads = defaultdict(realign) for line in handle: query_name, seq, chrom, reference_start, end, mism, add = line.split() reference_start = int(reference_start) # chrom = handle.getrname(cols[1]) # print("%s %s %s %s" % (line.query_name, line.reference_start, line.query_sequence, chrom)) if query_name not in reads: reads[query_name].sequence = seq iso = isomir() iso.align = line iso.start = reference_start iso.subs, iso.add = _realign(reads[query_name].sequence, precursors[chrom], reference_start) logger.debug("%s %s %s %s %s" % (query_name, reference_start, chrom, iso.subs, iso.add)) if len(iso.subs) > 1: continue reads[query_name].set_precursor(chrom, iso) reads = _clean_hits(reads) return reads
def function[_read_pyMatch, parameter[fn, precursors]]: constant[ read pyMatch file and perform realignment of hits ] with call[name[open], parameter[name[fn]]] begin[:] variable[reads] assign[=] call[name[defaultdict], parameter[name[realign]]] for taget[name[line]] in starred[name[handle]] begin[:] <ast.Tuple object at 0x7da1b0341de0> assign[=] call[name[line].split, parameter[]] variable[reference_start] assign[=] call[name[int], parameter[name[reference_start]]] if compare[name[query_name] <ast.NotIn object at 0x7da2590d7190> name[reads]] begin[:] call[name[reads]][name[query_name]].sequence assign[=] name[seq] variable[iso] assign[=] call[name[isomir], parameter[]] name[iso].align assign[=] name[line] name[iso].start assign[=] name[reference_start] <ast.Tuple object at 0x7da1b0338460> assign[=] call[name[_realign], parameter[call[name[reads]][name[query_name]].sequence, call[name[precursors]][name[chrom]], name[reference_start]]] call[name[logger].debug, parameter[binary_operation[constant[%s %s %s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b033b040>, <ast.Name object at 0x7da1b0339d20>, <ast.Name object at 0x7da1b033a1a0>, <ast.Attribute object at 0x7da1b0339f30>, <ast.Attribute object at 0x7da1b0338ac0>]]]]] if compare[call[name[len], parameter[name[iso].subs]] greater[>] constant[1]] begin[:] continue call[call[name[reads]][name[query_name]].set_precursor, parameter[name[chrom], name[iso]]] variable[reads] assign[=] call[name[_clean_hits], parameter[name[reads]]] return[name[reads]]
keyword[def] identifier[_read_pyMatch] ( identifier[fn] , identifier[precursors] ): literal[string] keyword[with] identifier[open] ( identifier[fn] ) keyword[as] identifier[handle] : identifier[reads] = identifier[defaultdict] ( identifier[realign] ) keyword[for] identifier[line] keyword[in] identifier[handle] : identifier[query_name] , identifier[seq] , identifier[chrom] , identifier[reference_start] , identifier[end] , identifier[mism] , identifier[add] = identifier[line] . identifier[split] () identifier[reference_start] = identifier[int] ( identifier[reference_start] ) keyword[if] identifier[query_name] keyword[not] keyword[in] identifier[reads] : identifier[reads] [ identifier[query_name] ]. identifier[sequence] = identifier[seq] identifier[iso] = identifier[isomir] () identifier[iso] . identifier[align] = identifier[line] identifier[iso] . identifier[start] = identifier[reference_start] identifier[iso] . identifier[subs] , identifier[iso] . identifier[add] = identifier[_realign] ( identifier[reads] [ identifier[query_name] ]. identifier[sequence] , identifier[precursors] [ identifier[chrom] ], identifier[reference_start] ) identifier[logger] . identifier[debug] ( literal[string] %( identifier[query_name] , identifier[reference_start] , identifier[chrom] , identifier[iso] . identifier[subs] , identifier[iso] . identifier[add] )) keyword[if] identifier[len] ( identifier[iso] . identifier[subs] )> literal[int] : keyword[continue] identifier[reads] [ identifier[query_name] ]. identifier[set_precursor] ( identifier[chrom] , identifier[iso] ) identifier[reads] = identifier[_clean_hits] ( identifier[reads] ) keyword[return] identifier[reads]
def _read_pyMatch(fn, precursors): """ read pyMatch file and perform realignment of hits """ with open(fn) as handle: reads = defaultdict(realign) for line in handle: (query_name, seq, chrom, reference_start, end, mism, add) = line.split() reference_start = int(reference_start) # chrom = handle.getrname(cols[1]) # print("%s %s %s %s" % (line.query_name, line.reference_start, line.query_sequence, chrom)) if query_name not in reads: reads[query_name].sequence = seq # depends on [control=['if'], data=['query_name', 'reads']] iso = isomir() iso.align = line iso.start = reference_start (iso.subs, iso.add) = _realign(reads[query_name].sequence, precursors[chrom], reference_start) logger.debug('%s %s %s %s %s' % (query_name, reference_start, chrom, iso.subs, iso.add)) if len(iso.subs) > 1: continue # depends on [control=['if'], data=[]] reads[query_name].set_precursor(chrom, iso) # depends on [control=['for'], data=['line']] reads = _clean_hits(reads) # depends on [control=['with'], data=['handle']] return reads
def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None: """Call the given callback when the stream is closed. This mostly is not necessary for applications that use the `.Future` interface; all outstanding ``Futures`` will resolve with a `StreamClosedError` when the stream is closed. However, it is still useful as a way to signal that the stream has been closed while no other read or write is in progress. Unlike other callback-based interfaces, ``set_close_callback`` was not removed in Tornado 6.0. """ self._close_callback = callback self._maybe_add_error_listener()
def function[set_close_callback, parameter[self, callback]]: constant[Call the given callback when the stream is closed. This mostly is not necessary for applications that use the `.Future` interface; all outstanding ``Futures`` will resolve with a `StreamClosedError` when the stream is closed. However, it is still useful as a way to signal that the stream has been closed while no other read or write is in progress. Unlike other callback-based interfaces, ``set_close_callback`` was not removed in Tornado 6.0. ] name[self]._close_callback assign[=] name[callback] call[name[self]._maybe_add_error_listener, parameter[]]
keyword[def] identifier[set_close_callback] ( identifier[self] , identifier[callback] : identifier[Optional] [ identifier[Callable] [[], keyword[None] ]])-> keyword[None] : literal[string] identifier[self] . identifier[_close_callback] = identifier[callback] identifier[self] . identifier[_maybe_add_error_listener] ()
def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None: """Call the given callback when the stream is closed. This mostly is not necessary for applications that use the `.Future` interface; all outstanding ``Futures`` will resolve with a `StreamClosedError` when the stream is closed. However, it is still useful as a way to signal that the stream has been closed while no other read or write is in progress. Unlike other callback-based interfaces, ``set_close_callback`` was not removed in Tornado 6.0. """ self._close_callback = callback self._maybe_add_error_listener()
def modify_permissions(self, permissions): """Modify the user's permissions.""" group = Group.objects.get(name='Admin') if permissions == 'admin': self.groups.add(group) else: self.groups.remove(group)
def function[modify_permissions, parameter[self, permissions]]: constant[Modify the user's permissions.] variable[group] assign[=] call[name[Group].objects.get, parameter[]] if compare[name[permissions] equal[==] constant[admin]] begin[:] call[name[self].groups.add, parameter[name[group]]]
keyword[def] identifier[modify_permissions] ( identifier[self] , identifier[permissions] ): literal[string] identifier[group] = identifier[Group] . identifier[objects] . identifier[get] ( identifier[name] = literal[string] ) keyword[if] identifier[permissions] == literal[string] : identifier[self] . identifier[groups] . identifier[add] ( identifier[group] ) keyword[else] : identifier[self] . identifier[groups] . identifier[remove] ( identifier[group] )
def modify_permissions(self, permissions): """Modify the user's permissions.""" group = Group.objects.get(name='Admin') if permissions == 'admin': self.groups.add(group) # depends on [control=['if'], data=[]] else: self.groups.remove(group)
def _lats(self): """ Return a vector containing the latitudes (in degrees) of each row of the gridded data. """ lats = 90. - _np.arccos(self.zeros) * 180. / _np.pi return lats
def function[_lats, parameter[self]]: constant[ Return a vector containing the latitudes (in degrees) of each row of the gridded data. ] variable[lats] assign[=] binary_operation[constant[90.0] - binary_operation[binary_operation[call[name[_np].arccos, parameter[name[self].zeros]] * constant[180.0]] / name[_np].pi]] return[name[lats]]
keyword[def] identifier[_lats] ( identifier[self] ): literal[string] identifier[lats] = literal[int] - identifier[_np] . identifier[arccos] ( identifier[self] . identifier[zeros] )* literal[int] / identifier[_np] . identifier[pi] keyword[return] identifier[lats]
def _lats(self): """ Return a vector containing the latitudes (in degrees) of each row of the gridded data. """ lats = 90.0 - _np.arccos(self.zeros) * 180.0 / _np.pi return lats
def in_(self, value): """ Sets the operator type to Query.Op.IsIn and sets the value to the inputted value. :param value <variant> :return <Query> :usage |>>> from orb import Query as Q |>>> query = Q('test').isIn([1,2]) |>>> print query |test is_in [1,2] """ newq = self.copy() newq.setOp(Query.Op.IsIn) if isinstance(value, orb.Collection): newq.setValue(value) elif not isinstance(value, (set, list, tuple)): newq.setValue((value,)) else: newq.setValue(tuple(value)) return newq
def function[in_, parameter[self, value]]: constant[ Sets the operator type to Query.Op.IsIn and sets the value to the inputted value. :param value <variant> :return <Query> :usage |>>> from orb import Query as Q |>>> query = Q('test').isIn([1,2]) |>>> print query |test is_in [1,2] ] variable[newq] assign[=] call[name[self].copy, parameter[]] call[name[newq].setOp, parameter[name[Query].Op.IsIn]] if call[name[isinstance], parameter[name[value], name[orb].Collection]] begin[:] call[name[newq].setValue, parameter[name[value]]] return[name[newq]]
keyword[def] identifier[in_] ( identifier[self] , identifier[value] ): literal[string] identifier[newq] = identifier[self] . identifier[copy] () identifier[newq] . identifier[setOp] ( identifier[Query] . identifier[Op] . identifier[IsIn] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[orb] . identifier[Collection] ): identifier[newq] . identifier[setValue] ( identifier[value] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[set] , identifier[list] , identifier[tuple] )): identifier[newq] . identifier[setValue] (( identifier[value] ,)) keyword[else] : identifier[newq] . identifier[setValue] ( identifier[tuple] ( identifier[value] )) keyword[return] identifier[newq]
def in_(self, value): """ Sets the operator type to Query.Op.IsIn and sets the value to the inputted value. :param value <variant> :return <Query> :usage |>>> from orb import Query as Q |>>> query = Q('test').isIn([1,2]) |>>> print query |test is_in [1,2] """ newq = self.copy() newq.setOp(Query.Op.IsIn) if isinstance(value, orb.Collection): newq.setValue(value) # depends on [control=['if'], data=[]] elif not isinstance(value, (set, list, tuple)): newq.setValue((value,)) # depends on [control=['if'], data=[]] else: newq.setValue(tuple(value)) return newq
def initChild(cls, obj, name, subContext, parent = None): """Implementation of initChild.""" addr = statsId(obj) if addr not in cls.containerMap: if not parent: # Find out the parent of the calling object by going back through the call stack until a self != this. f = inspect.currentframe() while not cls.__getSelf(f): f = f.f_back this = cls.__getSelf(f) f = f.f_back while cls.__getSelf(f) == this or not cls.__getSelf(f): f = f.f_back parent = cls.__getSelf(f) # Default subcontext to an autoincrementing ID. if subContext is None: cls.subId += 1 subContext = cls.subId if subContext is not '': path = '%s/%s' % (name, subContext) else: path = name # Now that we have the name, create an entry for this object. cls.parentMap[addr] = parent container = cls.getContainerForObject(statsId(parent)) if not container and isinstance(parent, unittest.TestCase): cls.init(parent, '/test-case') cls.containerMap[addr] = cls.__getStatContainer(path, cls.getContainerForObject(statsId(parent))) return cls.containerMap[addr]
def function[initChild, parameter[cls, obj, name, subContext, parent]]: constant[Implementation of initChild.] variable[addr] assign[=] call[name[statsId], parameter[name[obj]]] if compare[name[addr] <ast.NotIn object at 0x7da2590d7190> name[cls].containerMap] begin[:] if <ast.UnaryOp object at 0x7da20c6e7fa0> begin[:] variable[f] assign[=] call[name[inspect].currentframe, parameter[]] while <ast.UnaryOp object at 0x7da20c6e4940> begin[:] variable[f] assign[=] name[f].f_back variable[this] assign[=] call[name[cls].__getSelf, parameter[name[f]]] variable[f] assign[=] name[f].f_back while <ast.BoolOp object at 0x7da20c6e5750> begin[:] variable[f] assign[=] name[f].f_back variable[parent] assign[=] call[name[cls].__getSelf, parameter[name[f]]] if compare[name[subContext] is constant[None]] begin[:] <ast.AugAssign object at 0x7da20c6e6830> variable[subContext] assign[=] name[cls].subId if compare[name[subContext] is_not constant[]] begin[:] variable[path] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0efe6b0>, <ast.Name object at 0x7da1b0efe530>]]] call[name[cls].parentMap][name[addr]] assign[=] name[parent] variable[container] assign[=] call[name[cls].getContainerForObject, parameter[call[name[statsId], parameter[name[parent]]]]] if <ast.BoolOp object at 0x7da1b0efee60> begin[:] call[name[cls].init, parameter[name[parent], constant[/test-case]]] call[name[cls].containerMap][name[addr]] assign[=] call[name[cls].__getStatContainer, parameter[name[path], call[name[cls].getContainerForObject, parameter[call[name[statsId], parameter[name[parent]]]]]]] return[call[name[cls].containerMap][name[addr]]]
keyword[def] identifier[initChild] ( identifier[cls] , identifier[obj] , identifier[name] , identifier[subContext] , identifier[parent] = keyword[None] ): literal[string] identifier[addr] = identifier[statsId] ( identifier[obj] ) keyword[if] identifier[addr] keyword[not] keyword[in] identifier[cls] . identifier[containerMap] : keyword[if] keyword[not] identifier[parent] : identifier[f] = identifier[inspect] . identifier[currentframe] () keyword[while] keyword[not] identifier[cls] . identifier[__getSelf] ( identifier[f] ): identifier[f] = identifier[f] . identifier[f_back] identifier[this] = identifier[cls] . identifier[__getSelf] ( identifier[f] ) identifier[f] = identifier[f] . identifier[f_back] keyword[while] identifier[cls] . identifier[__getSelf] ( identifier[f] )== identifier[this] keyword[or] keyword[not] identifier[cls] . identifier[__getSelf] ( identifier[f] ): identifier[f] = identifier[f] . identifier[f_back] identifier[parent] = identifier[cls] . identifier[__getSelf] ( identifier[f] ) keyword[if] identifier[subContext] keyword[is] keyword[None] : identifier[cls] . identifier[subId] += literal[int] identifier[subContext] = identifier[cls] . identifier[subId] keyword[if] identifier[subContext] keyword[is] keyword[not] literal[string] : identifier[path] = literal[string] %( identifier[name] , identifier[subContext] ) keyword[else] : identifier[path] = identifier[name] identifier[cls] . identifier[parentMap] [ identifier[addr] ]= identifier[parent] identifier[container] = identifier[cls] . identifier[getContainerForObject] ( identifier[statsId] ( identifier[parent] )) keyword[if] keyword[not] identifier[container] keyword[and] identifier[isinstance] ( identifier[parent] , identifier[unittest] . identifier[TestCase] ): identifier[cls] . identifier[init] ( identifier[parent] , literal[string] ) identifier[cls] . identifier[containerMap] [ identifier[addr] ]= identifier[cls] . identifier[__getStatContainer] ( identifier[path] , identifier[cls] . identifier[getContainerForObject] ( identifier[statsId] ( identifier[parent] ))) keyword[return] identifier[cls] . identifier[containerMap] [ identifier[addr] ]
def initChild(cls, obj, name, subContext, parent=None): """Implementation of initChild.""" addr = statsId(obj) if addr not in cls.containerMap: if not parent: # Find out the parent of the calling object by going back through the call stack until a self != this. f = inspect.currentframe() while not cls.__getSelf(f): f = f.f_back # depends on [control=['while'], data=[]] this = cls.__getSelf(f) f = f.f_back while cls.__getSelf(f) == this or not cls.__getSelf(f): f = f.f_back # depends on [control=['while'], data=[]] parent = cls.__getSelf(f) # depends on [control=['if'], data=[]] # Default subcontext to an autoincrementing ID. if subContext is None: cls.subId += 1 subContext = cls.subId # depends on [control=['if'], data=['subContext']] if subContext is not '': path = '%s/%s' % (name, subContext) # depends on [control=['if'], data=['subContext']] else: path = name # Now that we have the name, create an entry for this object. cls.parentMap[addr] = parent container = cls.getContainerForObject(statsId(parent)) if not container and isinstance(parent, unittest.TestCase): cls.init(parent, '/test-case') # depends on [control=['if'], data=[]] cls.containerMap[addr] = cls.__getStatContainer(path, cls.getContainerForObject(statsId(parent))) # depends on [control=['if'], data=['addr']] return cls.containerMap[addr]
def compare_clades(pw): """ print min. pident within each clade and then matrix of between-clade max. """ names = sorted(set([i for i in pw])) for i in range(0, 4): wi, bt = {}, {} for a in names: for b in pw[a]: if ';' not in a or ';' not in b: continue pident = pw[a][b] cA, cB = a.split(';')[i], b.split(';')[i] if i == 0 and '_' in cA and '_' in cB: cA = cA.rsplit('_', 1)[1] cB = cB.rsplit('_', 1)[1] elif '>' in cA or '>' in cB: cA = cA.split('>')[1] cB = cB.split('>')[1] if cA == cB: if cA not in wi: wi[cA] = [] wi[cA].append(pident) else: if cA not in bt: bt[cA] = {} if cB not in bt[cA]: bt[cA][cB] = [] bt[cA][cB].append(pident) print('\n# min. within') for clade, pidents in list(wi.items()): print('\t'.join(['wi:%s' % str(i), clade, str(min(pidents))])) # print matrix of maximum between groups comps = [] print('\n# max. between') for comp in print_pairwise(bt): if comp is not None: print('\t'.join(['bt:%s' % str(i)] + [str(j) for j in comp])) if comp[0] != '#': comps.extend([j for j in comp[1:] if j != '-']) print_comps(comps) # print matrix of median between groups comps = [] print('\n# median between') for comp in print_pairwise(bt, median = True): if comp is not None: print('\t'.join(['bt:%s' % str(i)] + [str(j) for j in comp])) if comp[0] != '#': comps.extend([j for j in comp[1:] if j != '-']) print_comps(comps)
def function[compare_clades, parameter[pw]]: constant[ print min. pident within each clade and then matrix of between-clade max. ] variable[names] assign[=] call[name[sorted], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b2440a90>]]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[4]]]] begin[:] <ast.Tuple object at 0x7da18f721450> assign[=] tuple[[<ast.Dict object at 0x7da18f722b90>, <ast.Dict object at 0x7da18f721480>]] for taget[name[a]] in starred[name[names]] begin[:] for taget[name[b]] in starred[call[name[pw]][name[a]]] begin[:] if <ast.BoolOp object at 0x7da18f7231f0> begin[:] continue variable[pident] assign[=] call[call[name[pw]][name[a]]][name[b]] <ast.Tuple object at 0x7da18f723a90> assign[=] tuple[[<ast.Subscript object at 0x7da18f7204c0>, <ast.Subscript object at 0x7da18f722230>]] if <ast.BoolOp object at 0x7da18f720820> begin[:] variable[cA] assign[=] call[call[name[cA].rsplit, parameter[constant[_], constant[1]]]][constant[1]] variable[cB] assign[=] call[call[name[cB].rsplit, parameter[constant[_], constant[1]]]][constant[1]] if compare[name[cA] equal[==] name[cB]] begin[:] if compare[name[cA] <ast.NotIn object at 0x7da2590d7190> name[wi]] begin[:] call[name[wi]][name[cA]] assign[=] list[[]] call[call[name[wi]][name[cA]].append, parameter[name[pident]]] call[name[print], parameter[constant[ # min. within]]] for taget[tuple[[<ast.Name object at 0x7da18f720ac0>, <ast.Name object at 0x7da18f722dd0>]]] in starred[call[name[list], parameter[call[name[wi].items, parameter[]]]]] begin[:] call[name[print], parameter[call[constant[ ].join, parameter[list[[<ast.BinOp object at 0x7da18f58d840>, <ast.Name object at 0x7da18f58eb90>, <ast.Call object at 0x7da18f58c280>]]]]]] variable[comps] assign[=] list[[]] call[name[print], parameter[constant[ # max. between]]] for taget[name[comp]] in starred[call[name[print_pairwise], parameter[name[bt]]]] begin[:] if compare[name[comp] is_not constant[None]] begin[:] call[name[print], parameter[call[constant[ ].join, parameter[binary_operation[list[[<ast.BinOp object at 0x7da18f58fd60>]] + <ast.ListComp object at 0x7da18f58f220>]]]]] if compare[call[name[comp]][constant[0]] not_equal[!=] constant[#]] begin[:] call[name[comps].extend, parameter[<ast.ListComp object at 0x7da18f58f4f0>]] call[name[print_comps], parameter[name[comps]]] variable[comps] assign[=] list[[]] call[name[print], parameter[constant[ # median between]]] for taget[name[comp]] in starred[call[name[print_pairwise], parameter[name[bt]]]] begin[:] if compare[name[comp] is_not constant[None]] begin[:] call[name[print], parameter[call[constant[ ].join, parameter[binary_operation[list[[<ast.BinOp object at 0x7da18f58cd60>]] + <ast.ListComp object at 0x7da18f58d570>]]]]] if compare[call[name[comp]][constant[0]] not_equal[!=] constant[#]] begin[:] call[name[comps].extend, parameter[<ast.ListComp object at 0x7da18f58e560>]] call[name[print_comps], parameter[name[comps]]]
keyword[def] identifier[compare_clades] ( identifier[pw] ): literal[string] identifier[names] = identifier[sorted] ( identifier[set] ([ identifier[i] keyword[for] identifier[i] keyword[in] identifier[pw] ])) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[wi] , identifier[bt] ={},{} keyword[for] identifier[a] keyword[in] identifier[names] : keyword[for] identifier[b] keyword[in] identifier[pw] [ identifier[a] ]: keyword[if] literal[string] keyword[not] keyword[in] identifier[a] keyword[or] literal[string] keyword[not] keyword[in] identifier[b] : keyword[continue] identifier[pident] = identifier[pw] [ identifier[a] ][ identifier[b] ] identifier[cA] , identifier[cB] = identifier[a] . identifier[split] ( literal[string] )[ identifier[i] ], identifier[b] . identifier[split] ( literal[string] )[ identifier[i] ] keyword[if] identifier[i] == literal[int] keyword[and] literal[string] keyword[in] identifier[cA] keyword[and] literal[string] keyword[in] identifier[cB] : identifier[cA] = identifier[cA] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ] identifier[cB] = identifier[cB] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ] keyword[elif] literal[string] keyword[in] identifier[cA] keyword[or] literal[string] keyword[in] identifier[cB] : identifier[cA] = identifier[cA] . identifier[split] ( literal[string] )[ literal[int] ] identifier[cB] = identifier[cB] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[cA] == identifier[cB] : keyword[if] identifier[cA] keyword[not] keyword[in] identifier[wi] : identifier[wi] [ identifier[cA] ]=[] identifier[wi] [ identifier[cA] ]. identifier[append] ( identifier[pident] ) keyword[else] : keyword[if] identifier[cA] keyword[not] keyword[in] identifier[bt] : identifier[bt] [ identifier[cA] ]={} keyword[if] identifier[cB] keyword[not] keyword[in] identifier[bt] [ identifier[cA] ]: identifier[bt] [ identifier[cA] ][ identifier[cB] ]=[] identifier[bt] [ identifier[cA] ][ identifier[cB] ]. identifier[append] ( identifier[pident] ) identifier[print] ( literal[string] ) keyword[for] identifier[clade] , identifier[pidents] keyword[in] identifier[list] ( identifier[wi] . identifier[items] ()): identifier[print] ( literal[string] . identifier[join] ([ literal[string] % identifier[str] ( identifier[i] ), identifier[clade] , identifier[str] ( identifier[min] ( identifier[pidents] ))])) identifier[comps] =[] identifier[print] ( literal[string] ) keyword[for] identifier[comp] keyword[in] identifier[print_pairwise] ( identifier[bt] ): keyword[if] identifier[comp] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] . identifier[join] ([ literal[string] % identifier[str] ( identifier[i] )]+[ identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[comp] ])) keyword[if] identifier[comp] [ literal[int] ]!= literal[string] : identifier[comps] . identifier[extend] ([ identifier[j] keyword[for] identifier[j] keyword[in] identifier[comp] [ literal[int] :] keyword[if] identifier[j] != literal[string] ]) identifier[print_comps] ( identifier[comps] ) identifier[comps] =[] identifier[print] ( literal[string] ) keyword[for] identifier[comp] keyword[in] identifier[print_pairwise] ( identifier[bt] , identifier[median] = keyword[True] ): keyword[if] identifier[comp] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] . identifier[join] ([ literal[string] % identifier[str] ( identifier[i] )]+[ identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[comp] ])) keyword[if] identifier[comp] [ literal[int] ]!= literal[string] : identifier[comps] . identifier[extend] ([ identifier[j] keyword[for] identifier[j] keyword[in] identifier[comp] [ literal[int] :] keyword[if] identifier[j] != literal[string] ]) identifier[print_comps] ( identifier[comps] )
def compare_clades(pw): """ print min. pident within each clade and then matrix of between-clade max. """ names = sorted(set([i for i in pw])) for i in range(0, 4): (wi, bt) = ({}, {}) for a in names: for b in pw[a]: if ';' not in a or ';' not in b: continue # depends on [control=['if'], data=[]] pident = pw[a][b] (cA, cB) = (a.split(';')[i], b.split(';')[i]) if i == 0 and '_' in cA and ('_' in cB): cA = cA.rsplit('_', 1)[1] cB = cB.rsplit('_', 1)[1] # depends on [control=['if'], data=[]] elif '>' in cA or '>' in cB: cA = cA.split('>')[1] cB = cB.split('>')[1] # depends on [control=['if'], data=[]] if cA == cB: if cA not in wi: wi[cA] = [] # depends on [control=['if'], data=['cA', 'wi']] wi[cA].append(pident) # depends on [control=['if'], data=['cA']] else: if cA not in bt: bt[cA] = {} # depends on [control=['if'], data=['cA', 'bt']] if cB not in bt[cA]: bt[cA][cB] = [] # depends on [control=['if'], data=['cB']] bt[cA][cB].append(pident) # depends on [control=['for'], data=['b']] # depends on [control=['for'], data=['a']] print('\n# min. within') for (clade, pidents) in list(wi.items()): print('\t'.join(['wi:%s' % str(i), clade, str(min(pidents))])) # depends on [control=['for'], data=[]] # print matrix of maximum between groups comps = [] print('\n# max. between') for comp in print_pairwise(bt): if comp is not None: print('\t'.join(['bt:%s' % str(i)] + [str(j) for j in comp])) if comp[0] != '#': comps.extend([j for j in comp[1:] if j != '-']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['comp']] # depends on [control=['for'], data=['comp']] print_comps(comps) # print matrix of median between groups comps = [] print('\n# median between') for comp in print_pairwise(bt, median=True): if comp is not None: print('\t'.join(['bt:%s' % str(i)] + [str(j) for j in comp])) if comp[0] != '#': comps.extend([j for j in comp[1:] if j != '-']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['comp']] # depends on [control=['for'], data=['comp']] print_comps(comps) # depends on [control=['for'], data=['i']]
def put_metric_data(self, namespace, name, value=None, timestamp=None, unit=None, dimensions=None, statistics=None): """ Publishes metric data points to Amazon CloudWatch. Amazon Cloudwatch associates the data points with the specified metric. If the specified metric does not exist, Amazon CloudWatch creates the metric. If a list is specified for some, but not all, of the arguments, the remaining arguments are repeated a corresponding number of times. :type namespace: str :param namespace: The namespace of the metric. :type name: str or list :param name: The name of the metric. :type value: float or list :param value: The value for the metric. :type timestamp: datetime or list :param timestamp: The time stamp used for the metric. If not specified, the default value is set to the time the metric data was received. :type unit: string or list :param unit: The unit of the metric. Valid Values: Seconds | Microseconds | Milliseconds | Bytes | Kilobytes | Megabytes | Gigabytes | Terabytes | Bits | Kilobits | Megabits | Gigabits | Terabits | Percent | Count | Bytes/Second | Kilobytes/Second | Megabytes/Second | Gigabytes/Second | Terabytes/Second | Bits/Second | Kilobits/Second | Megabits/Second | Gigabits/Second | Terabits/Second | Count/Second | None :type dimensions: dict :param dimensions: Add extra name value pairs to associate with the metric, i.e.: {'name1': value1, 'name2': (value2, value3)} :type statistics: dict or list :param statistics: Use a statistic set instead of a value, for example:: {'maximum': 30, 'minimum': 1, 'samplecount': 100, 'sum': 10000} """ params = {'Namespace': namespace} self.build_put_params(params, name, value=value, timestamp=timestamp, unit=unit, dimensions=dimensions, statistics=statistics) return self.get_status('PutMetricData', params)
def function[put_metric_data, parameter[self, namespace, name, value, timestamp, unit, dimensions, statistics]]: constant[ Publishes metric data points to Amazon CloudWatch. Amazon Cloudwatch associates the data points with the specified metric. If the specified metric does not exist, Amazon CloudWatch creates the metric. If a list is specified for some, but not all, of the arguments, the remaining arguments are repeated a corresponding number of times. :type namespace: str :param namespace: The namespace of the metric. :type name: str or list :param name: The name of the metric. :type value: float or list :param value: The value for the metric. :type timestamp: datetime or list :param timestamp: The time stamp used for the metric. If not specified, the default value is set to the time the metric data was received. :type unit: string or list :param unit: The unit of the metric. Valid Values: Seconds | Microseconds | Milliseconds | Bytes | Kilobytes | Megabytes | Gigabytes | Terabytes | Bits | Kilobits | Megabits | Gigabits | Terabits | Percent | Count | Bytes/Second | Kilobytes/Second | Megabytes/Second | Gigabytes/Second | Terabytes/Second | Bits/Second | Kilobits/Second | Megabits/Second | Gigabits/Second | Terabits/Second | Count/Second | None :type dimensions: dict :param dimensions: Add extra name value pairs to associate with the metric, i.e.: {'name1': value1, 'name2': (value2, value3)} :type statistics: dict or list :param statistics: Use a statistic set instead of a value, for example:: {'maximum': 30, 'minimum': 1, 'samplecount': 100, 'sum': 10000} ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b269ece0>], [<ast.Name object at 0x7da1b269cee0>]] call[name[self].build_put_params, parameter[name[params], name[name]]] return[call[name[self].get_status, parameter[constant[PutMetricData], name[params]]]]
keyword[def] identifier[put_metric_data] ( identifier[self] , identifier[namespace] , identifier[name] , identifier[value] = keyword[None] , identifier[timestamp] = keyword[None] , identifier[unit] = keyword[None] , identifier[dimensions] = keyword[None] , identifier[statistics] = keyword[None] ): literal[string] identifier[params] ={ literal[string] : identifier[namespace] } identifier[self] . identifier[build_put_params] ( identifier[params] , identifier[name] , identifier[value] = identifier[value] , identifier[timestamp] = identifier[timestamp] , identifier[unit] = identifier[unit] , identifier[dimensions] = identifier[dimensions] , identifier[statistics] = identifier[statistics] ) keyword[return] identifier[self] . identifier[get_status] ( literal[string] , identifier[params] )
def put_metric_data(self, namespace, name, value=None, timestamp=None, unit=None, dimensions=None, statistics=None): """ Publishes metric data points to Amazon CloudWatch. Amazon Cloudwatch associates the data points with the specified metric. If the specified metric does not exist, Amazon CloudWatch creates the metric. If a list is specified for some, but not all, of the arguments, the remaining arguments are repeated a corresponding number of times. :type namespace: str :param namespace: The namespace of the metric. :type name: str or list :param name: The name of the metric. :type value: float or list :param value: The value for the metric. :type timestamp: datetime or list :param timestamp: The time stamp used for the metric. If not specified, the default value is set to the time the metric data was received. :type unit: string or list :param unit: The unit of the metric. Valid Values: Seconds | Microseconds | Milliseconds | Bytes | Kilobytes | Megabytes | Gigabytes | Terabytes | Bits | Kilobits | Megabits | Gigabits | Terabits | Percent | Count | Bytes/Second | Kilobytes/Second | Megabytes/Second | Gigabytes/Second | Terabytes/Second | Bits/Second | Kilobits/Second | Megabits/Second | Gigabits/Second | Terabits/Second | Count/Second | None :type dimensions: dict :param dimensions: Add extra name value pairs to associate with the metric, i.e.: {'name1': value1, 'name2': (value2, value3)} :type statistics: dict or list :param statistics: Use a statistic set instead of a value, for example:: {'maximum': 30, 'minimum': 1, 'samplecount': 100, 'sum': 10000} """ params = {'Namespace': namespace} self.build_put_params(params, name, value=value, timestamp=timestamp, unit=unit, dimensions=dimensions, statistics=statistics) return self.get_status('PutMetricData', params)
def from_array(cls, arr, index=None, name=None, dtype=None, copy=False, fastpath=False): """ Construct Series from array. .. deprecated :: 0.23.0 Use pd.Series(..) constructor instead. """ warnings.warn("'from_array' is deprecated and will be removed in a " "future version. Please use the pd.Series(..) " "constructor instead.", FutureWarning, stacklevel=2) if isinstance(arr, ABCSparseArray): from pandas.core.sparse.series import SparseSeries cls = SparseSeries return cls(arr, index=index, name=name, dtype=dtype, copy=copy, fastpath=fastpath)
def function[from_array, parameter[cls, arr, index, name, dtype, copy, fastpath]]: constant[ Construct Series from array. .. deprecated :: 0.23.0 Use pd.Series(..) constructor instead. ] call[name[warnings].warn, parameter[constant['from_array' is deprecated and will be removed in a future version. Please use the pd.Series(..) constructor instead.], name[FutureWarning]]] if call[name[isinstance], parameter[name[arr], name[ABCSparseArray]]] begin[:] from relative_module[pandas.core.sparse.series] import module[SparseSeries] variable[cls] assign[=] name[SparseSeries] return[call[name[cls], parameter[name[arr]]]]
keyword[def] identifier[from_array] ( identifier[cls] , identifier[arr] , identifier[index] = keyword[None] , identifier[name] = keyword[None] , identifier[dtype] = keyword[None] , identifier[copy] = keyword[False] , identifier[fastpath] = keyword[False] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] literal[string] literal[string] , identifier[FutureWarning] , identifier[stacklevel] = literal[int] ) keyword[if] identifier[isinstance] ( identifier[arr] , identifier[ABCSparseArray] ): keyword[from] identifier[pandas] . identifier[core] . identifier[sparse] . identifier[series] keyword[import] identifier[SparseSeries] identifier[cls] = identifier[SparseSeries] keyword[return] identifier[cls] ( identifier[arr] , identifier[index] = identifier[index] , identifier[name] = identifier[name] , identifier[dtype] = identifier[dtype] , identifier[copy] = identifier[copy] , identifier[fastpath] = identifier[fastpath] )
def from_array(cls, arr, index=None, name=None, dtype=None, copy=False, fastpath=False): """ Construct Series from array. .. deprecated :: 0.23.0 Use pd.Series(..) constructor instead. """ warnings.warn("'from_array' is deprecated and will be removed in a future version. Please use the pd.Series(..) constructor instead.", FutureWarning, stacklevel=2) if isinstance(arr, ABCSparseArray): from pandas.core.sparse.series import SparseSeries cls = SparseSeries # depends on [control=['if'], data=[]] return cls(arr, index=index, name=name, dtype=dtype, copy=copy, fastpath=fastpath)
def copy_file(filename): """Copy the file and put the correct tag""" print("Updating file %s" % filename) out_dir = os.path.abspath(DIRECTORY) tags = filename[:-4].split("-") tags[-2] = tags[-2].replace("m", "") new_name = "-".join(tags) + ".whl" wheel_flag = "-".join(tags[2:]) with InWheelCtx(os.path.join(DIRECTORY, filename)) as ctx: info_fname = os.path.join(_dist_info_dir(ctx.path), 'WHEEL') infos = pkginfo.read_pkg_info(info_fname) print("Changing Tag %s to %s" % (infos["Tag"], wheel_flag)) del infos['Tag'] infos.add_header('Tag', wheel_flag) pkginfo.write_pkg_info(info_fname, infos) ctx.out_wheel = os.path.join(out_dir, new_name) print("Saving new wheel into %s" % ctx.out_wheel)
def function[copy_file, parameter[filename]]: constant[Copy the file and put the correct tag] call[name[print], parameter[binary_operation[constant[Updating file %s] <ast.Mod object at 0x7da2590d6920> name[filename]]]] variable[out_dir] assign[=] call[name[os].path.abspath, parameter[name[DIRECTORY]]] variable[tags] assign[=] call[call[name[filename]][<ast.Slice object at 0x7da2049606a0>].split, parameter[constant[-]]] call[name[tags]][<ast.UnaryOp object at 0x7da204963d30>] assign[=] call[call[name[tags]][<ast.UnaryOp object at 0x7da2049612a0>].replace, parameter[constant[m], constant[]]] variable[new_name] assign[=] binary_operation[call[constant[-].join, parameter[name[tags]]] + constant[.whl]] variable[wheel_flag] assign[=] call[constant[-].join, parameter[call[name[tags]][<ast.Slice object at 0x7da204963130>]]] with call[name[InWheelCtx], parameter[call[name[os].path.join, parameter[name[DIRECTORY], name[filename]]]]] begin[:] variable[info_fname] assign[=] call[name[os].path.join, parameter[call[name[_dist_info_dir], parameter[name[ctx].path]], constant[WHEEL]]] variable[infos] assign[=] call[name[pkginfo].read_pkg_info, parameter[name[info_fname]]] call[name[print], parameter[binary_operation[constant[Changing Tag %s to %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da2054a6980>, <ast.Name object at 0x7da2054a4cd0>]]]]] <ast.Delete object at 0x7da2054a68f0> call[name[infos].add_header, parameter[constant[Tag], name[wheel_flag]]] call[name[pkginfo].write_pkg_info, parameter[name[info_fname], name[infos]]] name[ctx].out_wheel assign[=] call[name[os].path.join, parameter[name[out_dir], name[new_name]]] call[name[print], parameter[binary_operation[constant[Saving new wheel into %s] <ast.Mod object at 0x7da2590d6920> name[ctx].out_wheel]]]
keyword[def] identifier[copy_file] ( identifier[filename] ): literal[string] identifier[print] ( literal[string] % identifier[filename] ) identifier[out_dir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[DIRECTORY] ) identifier[tags] = identifier[filename] [:- literal[int] ]. identifier[split] ( literal[string] ) identifier[tags] [- literal[int] ]= identifier[tags] [- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ) identifier[new_name] = literal[string] . identifier[join] ( identifier[tags] )+ literal[string] identifier[wheel_flag] = literal[string] . identifier[join] ( identifier[tags] [ literal[int] :]) keyword[with] identifier[InWheelCtx] ( identifier[os] . identifier[path] . identifier[join] ( identifier[DIRECTORY] , identifier[filename] )) keyword[as] identifier[ctx] : identifier[info_fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[_dist_info_dir] ( identifier[ctx] . identifier[path] ), literal[string] ) identifier[infos] = identifier[pkginfo] . identifier[read_pkg_info] ( identifier[info_fname] ) identifier[print] ( literal[string] %( identifier[infos] [ literal[string] ], identifier[wheel_flag] )) keyword[del] identifier[infos] [ literal[string] ] identifier[infos] . identifier[add_header] ( literal[string] , identifier[wheel_flag] ) identifier[pkginfo] . identifier[write_pkg_info] ( identifier[info_fname] , identifier[infos] ) identifier[ctx] . identifier[out_wheel] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[new_name] ) identifier[print] ( literal[string] % identifier[ctx] . identifier[out_wheel] )
def copy_file(filename): """Copy the file and put the correct tag""" print('Updating file %s' % filename) out_dir = os.path.abspath(DIRECTORY) tags = filename[:-4].split('-') tags[-2] = tags[-2].replace('m', '') new_name = '-'.join(tags) + '.whl' wheel_flag = '-'.join(tags[2:]) with InWheelCtx(os.path.join(DIRECTORY, filename)) as ctx: info_fname = os.path.join(_dist_info_dir(ctx.path), 'WHEEL') infos = pkginfo.read_pkg_info(info_fname) print('Changing Tag %s to %s' % (infos['Tag'], wheel_flag)) del infos['Tag'] infos.add_header('Tag', wheel_flag) pkginfo.write_pkg_info(info_fname, infos) ctx.out_wheel = os.path.join(out_dir, new_name) print('Saving new wheel into %s' % ctx.out_wheel) # depends on [control=['with'], data=['ctx']]
def launchDashboardOverlay(self, pchAppKey): """ Launches the dashboard overlay application if it is not already running. This call is only valid for dashboard overlay applications. """ fn = self.function_table.launchDashboardOverlay result = fn(pchAppKey) return result
def function[launchDashboardOverlay, parameter[self, pchAppKey]]: constant[ Launches the dashboard overlay application if it is not already running. This call is only valid for dashboard overlay applications. ] variable[fn] assign[=] name[self].function_table.launchDashboardOverlay variable[result] assign[=] call[name[fn], parameter[name[pchAppKey]]] return[name[result]]
keyword[def] identifier[launchDashboardOverlay] ( identifier[self] , identifier[pchAppKey] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[launchDashboardOverlay] identifier[result] = identifier[fn] ( identifier[pchAppKey] ) keyword[return] identifier[result]
def launchDashboardOverlay(self, pchAppKey): """ Launches the dashboard overlay application if it is not already running. This call is only valid for dashboard overlay applications. """ fn = self.function_table.launchDashboardOverlay result = fn(pchAppKey) return result
def clear_matplotlib_ticks(ax=None, axis="both"): """ Clears the default matplotlib axes, or the one specified by the axis argument. Parameters ---------- ax: Matplotlib AxesSubplot, None The subplot to draw on. axis: string, "both" The axis to clear: "x" or "horizontal", "y" or "vertical", or "both" """ if not ax: return if axis.lower() in ["both", "x", "horizontal"]: ax.set_xticks([], []) if axis.lower() in ["both", "y", "vertical"]: ax.set_yticks([], [])
def function[clear_matplotlib_ticks, parameter[ax, axis]]: constant[ Clears the default matplotlib axes, or the one specified by the axis argument. Parameters ---------- ax: Matplotlib AxesSubplot, None The subplot to draw on. axis: string, "both" The axis to clear: "x" or "horizontal", "y" or "vertical", or "both" ] if <ast.UnaryOp object at 0x7da2047eada0> begin[:] return[None] if compare[call[name[axis].lower, parameter[]] in list[[<ast.Constant object at 0x7da2047e81c0>, <ast.Constant object at 0x7da2047e8760>, <ast.Constant object at 0x7da2047ea4d0>]]] begin[:] call[name[ax].set_xticks, parameter[list[[]], list[[]]]] if compare[call[name[axis].lower, parameter[]] in list[[<ast.Constant object at 0x7da2047eb550>, <ast.Constant object at 0x7da2047e9810>, <ast.Constant object at 0x7da2047ea770>]]] begin[:] call[name[ax].set_yticks, parameter[list[[]], list[[]]]]
keyword[def] identifier[clear_matplotlib_ticks] ( identifier[ax] = keyword[None] , identifier[axis] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[ax] : keyword[return] keyword[if] identifier[axis] . identifier[lower] () keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[ax] . identifier[set_xticks] ([],[]) keyword[if] identifier[axis] . identifier[lower] () keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[ax] . identifier[set_yticks] ([],[])
def clear_matplotlib_ticks(ax=None, axis='both'): """ Clears the default matplotlib axes, or the one specified by the axis argument. Parameters ---------- ax: Matplotlib AxesSubplot, None The subplot to draw on. axis: string, "both" The axis to clear: "x" or "horizontal", "y" or "vertical", or "both" """ if not ax: return # depends on [control=['if'], data=[]] if axis.lower() in ['both', 'x', 'horizontal']: ax.set_xticks([], []) # depends on [control=['if'], data=[]] if axis.lower() in ['both', 'y', 'vertical']: ax.set_yticks([], []) # depends on [control=['if'], data=[]]
def apply_transformer_types(network): """Calculate transformer electrical parameters x, r, b, g from standard types. """ trafos_with_types_b = network.transformers.type != "" if trafos_with_types_b.zsum() == 0: return missing_types = (pd.Index(network.transformers.loc[trafos_with_types_b, 'type'].unique()) .difference(network.transformer_types.index)) assert missing_types.empty, ("The type(s) {} do(es) not exist in network.transformer_types" .format(", ".join(missing_types))) # Get a copy of the transformers data # (joining pulls in "phase_shift", "s_nom", "tap_side" from TransformerType) t = (network.transformers.loc[trafos_with_types_b, ["type", "tap_position", "num_parallel"]] .join(network.transformer_types, on='type')) t["r"] = t["vscr"] /100. t["x"] = np.sqrt((t["vsc"]/100.)**2 - t["r"]**2) #NB: b and g are per unit of s_nom t["g"] = t["pfe"]/(1000. * t["s_nom"]) #for some bizarre reason, some of the standard types in pandapower have i0^2 < g^2 t["b"] = - np.sqrt(((t["i0"]/100.)**2 - t["g"]**2).clip(lower=0)) for attr in ["r","x"]: t[attr] /= t["num_parallel"] for attr in ["b","g"]: t[attr] *= t["num_parallel"] #deal with tap positions t["tap_ratio"] = 1. + (t["tap_position"] - t["tap_neutral"]) * (t["tap_step"]/100.) # now set calculated values on live transformers for attr in ["r", "x", "g", "b", "phase_shift", "s_nom", "tap_side", "tap_ratio"]: network.transformers.loc[trafos_with_types_b, attr] = t[attr]
def function[apply_transformer_types, parameter[network]]: constant[Calculate transformer electrical parameters x, r, b, g from standard types. ] variable[trafos_with_types_b] assign[=] compare[name[network].transformers.type not_equal[!=] constant[]] if compare[call[name[trafos_with_types_b].zsum, parameter[]] equal[==] constant[0]] begin[:] return[None] variable[missing_types] assign[=] call[call[name[pd].Index, parameter[call[call[name[network].transformers.loc][tuple[[<ast.Name object at 0x7da2054a7ac0>, <ast.Constant object at 0x7da2054a6ef0>]]].unique, parameter[]]]].difference, parameter[name[network].transformer_types.index]] assert[name[missing_types].empty] variable[t] assign[=] call[call[name[network].transformers.loc][tuple[[<ast.Name object at 0x7da2054a6e90>, <ast.List object at 0x7da2054a7700>]]].join, parameter[name[network].transformer_types]] call[name[t]][constant[r]] assign[=] binary_operation[call[name[t]][constant[vscr]] / constant[100.0]] call[name[t]][constant[x]] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[call[name[t]][constant[vsc]] / constant[100.0]] ** constant[2]] - binary_operation[call[name[t]][constant[r]] ** constant[2]]]]] call[name[t]][constant[g]] assign[=] binary_operation[call[name[t]][constant[pfe]] / binary_operation[constant[1000.0] * call[name[t]][constant[s_nom]]]] call[name[t]][constant[b]] assign[=] <ast.UnaryOp object at 0x7da2054a5150> for taget[name[attr]] in starred[list[[<ast.Constant object at 0x7da2054a5ed0>, <ast.Constant object at 0x7da2054a7ee0>]]] begin[:] <ast.AugAssign object at 0x7da2054a42b0> for taget[name[attr]] in starred[list[[<ast.Constant object at 0x7da2054a7910>, <ast.Constant object at 0x7da2054a6650>]]] begin[:] <ast.AugAssign object at 0x7da2054a6830> call[name[t]][constant[tap_ratio]] assign[=] binary_operation[constant[1.0] + binary_operation[binary_operation[call[name[t]][constant[tap_position]] - call[name[t]][constant[tap_neutral]]] * binary_operation[call[name[t]][constant[tap_step]] / constant[100.0]]]] for taget[name[attr]] in starred[list[[<ast.Constant object at 0x7da2054a46d0>, <ast.Constant object at 0x7da2054a7e80>, <ast.Constant object at 0x7da2054a7e20>, <ast.Constant object at 0x7da2054a54e0>, <ast.Constant object at 0x7da2054a4f10>, <ast.Constant object at 0x7da2054a7460>, <ast.Constant object at 0x7da2054a5030>, <ast.Constant object at 0x7da2054a4490>]]] begin[:] call[name[network].transformers.loc][tuple[[<ast.Name object at 0x7da2054a6020>, <ast.Name object at 0x7da2054a6c50>]]] assign[=] call[name[t]][name[attr]]
keyword[def] identifier[apply_transformer_types] ( identifier[network] ): literal[string] identifier[trafos_with_types_b] = identifier[network] . identifier[transformers] . identifier[type] != literal[string] keyword[if] identifier[trafos_with_types_b] . identifier[zsum] ()== literal[int] : keyword[return] identifier[missing_types] =( identifier[pd] . identifier[Index] ( identifier[network] . identifier[transformers] . identifier[loc] [ identifier[trafos_with_types_b] , literal[string] ]. identifier[unique] ()) . identifier[difference] ( identifier[network] . identifier[transformer_types] . identifier[index] )) keyword[assert] identifier[missing_types] . identifier[empty] ,( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[missing_types] ))) identifier[t] =( identifier[network] . identifier[transformers] . identifier[loc] [ identifier[trafos_with_types_b] ,[ literal[string] , literal[string] , literal[string] ]] . identifier[join] ( identifier[network] . identifier[transformer_types] , identifier[on] = literal[string] )) identifier[t] [ literal[string] ]= identifier[t] [ literal[string] ]/ literal[int] identifier[t] [ literal[string] ]= identifier[np] . identifier[sqrt] (( identifier[t] [ literal[string] ]/ literal[int] )** literal[int] - identifier[t] [ literal[string] ]** literal[int] ) identifier[t] [ literal[string] ]= identifier[t] [ literal[string] ]/( literal[int] * identifier[t] [ literal[string] ]) identifier[t] [ literal[string] ]=- identifier[np] . identifier[sqrt] ((( identifier[t] [ literal[string] ]/ literal[int] )** literal[int] - identifier[t] [ literal[string] ]** literal[int] ). identifier[clip] ( identifier[lower] = literal[int] )) keyword[for] identifier[attr] keyword[in] [ literal[string] , literal[string] ]: identifier[t] [ identifier[attr] ]/= identifier[t] [ literal[string] ] keyword[for] identifier[attr] keyword[in] [ literal[string] , literal[string] ]: identifier[t] [ identifier[attr] ]*= identifier[t] [ literal[string] ] identifier[t] [ literal[string] ]= literal[int] +( identifier[t] [ literal[string] ]- identifier[t] [ literal[string] ])*( identifier[t] [ literal[string] ]/ literal[int] ) keyword[for] identifier[attr] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[network] . identifier[transformers] . identifier[loc] [ identifier[trafos_with_types_b] , identifier[attr] ]= identifier[t] [ identifier[attr] ]
def apply_transformer_types(network): """Calculate transformer electrical parameters x, r, b, g from standard types. """ trafos_with_types_b = network.transformers.type != '' if trafos_with_types_b.zsum() == 0: return # depends on [control=['if'], data=[]] missing_types = pd.Index(network.transformers.loc[trafos_with_types_b, 'type'].unique()).difference(network.transformer_types.index) assert missing_types.empty, 'The type(s) {} do(es) not exist in network.transformer_types'.format(', '.join(missing_types)) # Get a copy of the transformers data # (joining pulls in "phase_shift", "s_nom", "tap_side" from TransformerType) t = network.transformers.loc[trafos_with_types_b, ['type', 'tap_position', 'num_parallel']].join(network.transformer_types, on='type') t['r'] = t['vscr'] / 100.0 t['x'] = np.sqrt((t['vsc'] / 100.0) ** 2 - t['r'] ** 2) #NB: b and g are per unit of s_nom t['g'] = t['pfe'] / (1000.0 * t['s_nom']) #for some bizarre reason, some of the standard types in pandapower have i0^2 < g^2 t['b'] = -np.sqrt(((t['i0'] / 100.0) ** 2 - t['g'] ** 2).clip(lower=0)) for attr in ['r', 'x']: t[attr] /= t['num_parallel'] # depends on [control=['for'], data=['attr']] for attr in ['b', 'g']: t[attr] *= t['num_parallel'] # depends on [control=['for'], data=['attr']] #deal with tap positions t['tap_ratio'] = 1.0 + (t['tap_position'] - t['tap_neutral']) * (t['tap_step'] / 100.0) # now set calculated values on live transformers for attr in ['r', 'x', 'g', 'b', 'phase_shift', 's_nom', 'tap_side', 'tap_ratio']: network.transformers.loc[trafos_with_types_b, attr] = t[attr] # depends on [control=['for'], data=['attr']]
def document(self, document_tree, backend=None): """Create a :class:`DocumentTemplate` object based on the given document tree and this template configuration Args: document_tree (DocumentTree): tree of the document's contents backend: the backend to use when rendering the document """ return self.template(document_tree, configuration=self, backend=backend)
def function[document, parameter[self, document_tree, backend]]: constant[Create a :class:`DocumentTemplate` object based on the given document tree and this template configuration Args: document_tree (DocumentTree): tree of the document's contents backend: the backend to use when rendering the document ] return[call[name[self].template, parameter[name[document_tree]]]]
keyword[def] identifier[document] ( identifier[self] , identifier[document_tree] , identifier[backend] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[template] ( identifier[document_tree] , identifier[configuration] = identifier[self] , identifier[backend] = identifier[backend] )
def document(self, document_tree, backend=None): """Create a :class:`DocumentTemplate` object based on the given document tree and this template configuration Args: document_tree (DocumentTree): tree of the document's contents backend: the backend to use when rendering the document """ return self.template(document_tree, configuration=self, backend=backend)
def str2bool(string_, default='raise'): """ Convert a string to a bool. Parameters ---------- string_ : str default : {'raise', False} Default behaviour if none of the "true" strings is detected. Returns ------- boolean : bool Examples -------- >>> str2bool('True') True >>> str2bool('1') True >>> str2bool('0') False """ true = ['true', 't', '1', 'y', 'yes', 'enabled', 'enable', 'on'] false = ['false', 'f', '0', 'n', 'no', 'disabled', 'disable', 'off'] if string_.lower() in true: return True elif string_.lower() in false or (not default): return False else: raise ValueError('The value \'{}\' cannot be mapped to boolean.' .format(string_))
def function[str2bool, parameter[string_, default]]: constant[ Convert a string to a bool. Parameters ---------- string_ : str default : {'raise', False} Default behaviour if none of the "true" strings is detected. Returns ------- boolean : bool Examples -------- >>> str2bool('True') True >>> str2bool('1') True >>> str2bool('0') False ] variable[true] assign[=] list[[<ast.Constant object at 0x7da18ede6e90>, <ast.Constant object at 0x7da18ede5450>, <ast.Constant object at 0x7da18ede75b0>, <ast.Constant object at 0x7da18ede6ec0>, <ast.Constant object at 0x7da18ede4cd0>, <ast.Constant object at 0x7da18ede4a60>, <ast.Constant object at 0x7da18ede7910>, <ast.Constant object at 0x7da18ede4700>]] variable[false] assign[=] list[[<ast.Constant object at 0x7da18ede7a60>, <ast.Constant object at 0x7da18ede4310>, <ast.Constant object at 0x7da18ede5ea0>, <ast.Constant object at 0x7da18ede4250>, <ast.Constant object at 0x7da18ede44c0>, <ast.Constant object at 0x7da18ede5810>, <ast.Constant object at 0x7da18ede6f50>, <ast.Constant object at 0x7da18ede7730>]] if compare[call[name[string_].lower, parameter[]] in name[true]] begin[:] return[constant[True]]
keyword[def] identifier[str2bool] ( identifier[string_] , identifier[default] = literal[string] ): literal[string] identifier[true] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[false] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[string_] . identifier[lower] () keyword[in] identifier[true] : keyword[return] keyword[True] keyword[elif] identifier[string_] . identifier[lower] () keyword[in] identifier[false] keyword[or] ( keyword[not] identifier[default] ): keyword[return] keyword[False] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[string_] ))
def str2bool(string_, default='raise'): """ Convert a string to a bool. Parameters ---------- string_ : str default : {'raise', False} Default behaviour if none of the "true" strings is detected. Returns ------- boolean : bool Examples -------- >>> str2bool('True') True >>> str2bool('1') True >>> str2bool('0') False """ true = ['true', 't', '1', 'y', 'yes', 'enabled', 'enable', 'on'] false = ['false', 'f', '0', 'n', 'no', 'disabled', 'disable', 'off'] if string_.lower() in true: return True # depends on [control=['if'], data=[]] elif string_.lower() in false or not default: return False # depends on [control=['if'], data=[]] else: raise ValueError("The value '{}' cannot be mapped to boolean.".format(string_))
def create_jwt(integration_id, private_key_path): """Create a JSON Web Token to authenticate a GitHub Integration or installation. Parameters ---------- integration_id : `int` Integration ID. This is available from the GitHub integration's homepage. private_key_path : `str` Path to the integration's private key (a ``.pem`` file). Returns ------- jwt : `bytes` JSON Web Token that is good for 9 minutes. Notes ----- The JWT is encoded with the RS256 algorithm. It includes a payload with fields: - ``'iat'``: The current time, as an `int` timestamp. - ``'exp'``: Expiration time, as an `int timestamp. The expiration time is set of 9 minutes in the future (maximum allowance is 10 minutes). - ``'iss'``: The integration ID (`int`). For more information, see https://developer.github.com/early-access/integrations/authentication/. """ integration_id = int(integration_id) with open(private_key_path, 'rb') as f: cert_bytes = f.read() now = datetime.datetime.now() expiration_time = now + datetime.timedelta(minutes=9) payload = { # Issued at time 'iat': int(now.timestamp()), # JWT expiration time (10 minute maximum) 'exp': int(expiration_time.timestamp()), # Integration's GitHub identifier 'iss': integration_id } return jwt.encode(payload, cert_bytes, algorithm='RS256')
def function[create_jwt, parameter[integration_id, private_key_path]]: constant[Create a JSON Web Token to authenticate a GitHub Integration or installation. Parameters ---------- integration_id : `int` Integration ID. This is available from the GitHub integration's homepage. private_key_path : `str` Path to the integration's private key (a ``.pem`` file). Returns ------- jwt : `bytes` JSON Web Token that is good for 9 minutes. Notes ----- The JWT is encoded with the RS256 algorithm. It includes a payload with fields: - ``'iat'``: The current time, as an `int` timestamp. - ``'exp'``: Expiration time, as an `int timestamp. The expiration time is set of 9 minutes in the future (maximum allowance is 10 minutes). - ``'iss'``: The integration ID (`int`). For more information, see https://developer.github.com/early-access/integrations/authentication/. ] variable[integration_id] assign[=] call[name[int], parameter[name[integration_id]]] with call[name[open], parameter[name[private_key_path], constant[rb]]] begin[:] variable[cert_bytes] assign[=] call[name[f].read, parameter[]] variable[now] assign[=] call[name[datetime].datetime.now, parameter[]] variable[expiration_time] assign[=] binary_operation[name[now] + call[name[datetime].timedelta, parameter[]]] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1aff6e920>, <ast.Constant object at 0x7da1aff6ec50>, <ast.Constant object at 0x7da1aff6fb80>], [<ast.Call object at 0x7da1aff6c790>, <ast.Call object at 0x7da1aff6fa30>, <ast.Name object at 0x7da1aff6fe20>]] return[call[name[jwt].encode, parameter[name[payload], name[cert_bytes]]]]
keyword[def] identifier[create_jwt] ( identifier[integration_id] , identifier[private_key_path] ): literal[string] identifier[integration_id] = identifier[int] ( identifier[integration_id] ) keyword[with] identifier[open] ( identifier[private_key_path] , literal[string] ) keyword[as] identifier[f] : identifier[cert_bytes] = identifier[f] . identifier[read] () identifier[now] = identifier[datetime] . identifier[datetime] . identifier[now] () identifier[expiration_time] = identifier[now] + identifier[datetime] . identifier[timedelta] ( identifier[minutes] = literal[int] ) identifier[payload] ={ literal[string] : identifier[int] ( identifier[now] . identifier[timestamp] ()), literal[string] : identifier[int] ( identifier[expiration_time] . identifier[timestamp] ()), literal[string] : identifier[integration_id] } keyword[return] identifier[jwt] . identifier[encode] ( identifier[payload] , identifier[cert_bytes] , identifier[algorithm] = literal[string] )
def create_jwt(integration_id, private_key_path): """Create a JSON Web Token to authenticate a GitHub Integration or installation. Parameters ---------- integration_id : `int` Integration ID. This is available from the GitHub integration's homepage. private_key_path : `str` Path to the integration's private key (a ``.pem`` file). Returns ------- jwt : `bytes` JSON Web Token that is good for 9 minutes. Notes ----- The JWT is encoded with the RS256 algorithm. It includes a payload with fields: - ``'iat'``: The current time, as an `int` timestamp. - ``'exp'``: Expiration time, as an `int timestamp. The expiration time is set of 9 minutes in the future (maximum allowance is 10 minutes). - ``'iss'``: The integration ID (`int`). For more information, see https://developer.github.com/early-access/integrations/authentication/. """ integration_id = int(integration_id) with open(private_key_path, 'rb') as f: cert_bytes = f.read() # depends on [control=['with'], data=['f']] now = datetime.datetime.now() expiration_time = now + datetime.timedelta(minutes=9) # Issued at time # JWT expiration time (10 minute maximum) # Integration's GitHub identifier payload = {'iat': int(now.timestamp()), 'exp': int(expiration_time.timestamp()), 'iss': integration_id} return jwt.encode(payload, cert_bytes, algorithm='RS256')
def truncate_to_issuer(self, cert): """ Remove all certificates in the path after the issuer of the cert specified, as defined by this path :param cert: An asn1crypto.x509.Certificate object to find the issuer of :raises: LookupError - when the issuer of the certificate could not be found :return: The current ValidationPath object, for chaining """ issuer_index = None for index, entry in enumerate(self): if entry.subject == cert.issuer: if entry.key_identifier and cert.authority_key_identifier: if entry.key_identifier == cert.authority_key_identifier: issuer_index = index break else: issuer_index = index break if issuer_index is None: raise LookupError('Unable to find the issuer of the certificate specified') while len(self) > issuer_index + 1: self.pop() return self
def function[truncate_to_issuer, parameter[self, cert]]: constant[ Remove all certificates in the path after the issuer of the cert specified, as defined by this path :param cert: An asn1crypto.x509.Certificate object to find the issuer of :raises: LookupError - when the issuer of the certificate could not be found :return: The current ValidationPath object, for chaining ] variable[issuer_index] assign[=] constant[None] for taget[tuple[[<ast.Name object at 0x7da1b0da2d10>, <ast.Name object at 0x7da1b0da0760>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:] if compare[name[entry].subject equal[==] name[cert].issuer] begin[:] if <ast.BoolOp object at 0x7da1b0da1090> begin[:] if compare[name[entry].key_identifier equal[==] name[cert].authority_key_identifier] begin[:] variable[issuer_index] assign[=] name[index] break if compare[name[issuer_index] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0da2740> while compare[call[name[len], parameter[name[self]]] greater[>] binary_operation[name[issuer_index] + constant[1]]] begin[:] call[name[self].pop, parameter[]] return[name[self]]
keyword[def] identifier[truncate_to_issuer] ( identifier[self] , identifier[cert] ): literal[string] identifier[issuer_index] = keyword[None] keyword[for] identifier[index] , identifier[entry] keyword[in] identifier[enumerate] ( identifier[self] ): keyword[if] identifier[entry] . identifier[subject] == identifier[cert] . identifier[issuer] : keyword[if] identifier[entry] . identifier[key_identifier] keyword[and] identifier[cert] . identifier[authority_key_identifier] : keyword[if] identifier[entry] . identifier[key_identifier] == identifier[cert] . identifier[authority_key_identifier] : identifier[issuer_index] = identifier[index] keyword[break] keyword[else] : identifier[issuer_index] = identifier[index] keyword[break] keyword[if] identifier[issuer_index] keyword[is] keyword[None] : keyword[raise] identifier[LookupError] ( literal[string] ) keyword[while] identifier[len] ( identifier[self] )> identifier[issuer_index] + literal[int] : identifier[self] . identifier[pop] () keyword[return] identifier[self]
def truncate_to_issuer(self, cert): """ Remove all certificates in the path after the issuer of the cert specified, as defined by this path :param cert: An asn1crypto.x509.Certificate object to find the issuer of :raises: LookupError - when the issuer of the certificate could not be found :return: The current ValidationPath object, for chaining """ issuer_index = None for (index, entry) in enumerate(self): if entry.subject == cert.issuer: if entry.key_identifier and cert.authority_key_identifier: if entry.key_identifier == cert.authority_key_identifier: issuer_index = index break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: issuer_index = index break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if issuer_index is None: raise LookupError('Unable to find the issuer of the certificate specified') # depends on [control=['if'], data=[]] while len(self) > issuer_index + 1: self.pop() # depends on [control=['while'], data=[]] return self
def _parse(self, pattern): """Parse string of comma-separated x-y/step -like ranges""" # Comma separated ranges if pattern.find(',') < 0: subranges = [pattern] else: subranges = pattern.split(',') for subrange in subranges: if subrange.find('/') < 0: step = 1 baserange = subrange else: baserange, step = subrange.split('/', 1) try: step = int(step) except ValueError: raise RangeSetParseError(subrange, "cannot convert string to integer") if baserange.find('-') < 0: if step != 1: raise RangeSetParseError(subrange, "invalid step usage") begin = end = baserange else: begin, end = baserange.split('-', 1) # compute padding and return node range info tuple try: pad = 0 if int(begin) != 0: begins = begin.lstrip("0") if len(begin) - len(begins) > 0: pad = len(begin) start = int(begins) else: if len(begin) > 1: pad = len(begin) start = 0 if int(end) != 0: ends = end.lstrip("0") else: ends = end stop = int(ends) except ValueError: raise RangeSetParseError(subrange, "cannot convert string to integer") # check preconditions if stop > 1e100 or start > stop or step < 1: raise RangeSetParseError(subrange, "invalid values in range") self.add_range(start, stop + 1, step, pad)
def function[_parse, parameter[self, pattern]]: constant[Parse string of comma-separated x-y/step -like ranges] if compare[call[name[pattern].find, parameter[constant[,]]] less[<] constant[0]] begin[:] variable[subranges] assign[=] list[[<ast.Name object at 0x7da1b1039180>]] for taget[name[subrange]] in starred[name[subranges]] begin[:] if compare[call[name[subrange].find, parameter[constant[/]]] less[<] constant[0]] begin[:] variable[step] assign[=] constant[1] variable[baserange] assign[=] name[subrange] <ast.Try object at 0x7da1b103b4c0> if compare[call[name[baserange].find, parameter[constant[-]]] less[<] constant[0]] begin[:] if compare[name[step] not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b103b5e0> variable[begin] assign[=] name[baserange] <ast.Try object at 0x7da1b10387f0> if <ast.BoolOp object at 0x7da1b103b310> begin[:] <ast.Raise object at 0x7da1b1061240> call[name[self].add_range, parameter[name[start], binary_operation[name[stop] + constant[1]], name[step], name[pad]]]
keyword[def] identifier[_parse] ( identifier[self] , identifier[pattern] ): literal[string] keyword[if] identifier[pattern] . identifier[find] ( literal[string] )< literal[int] : identifier[subranges] =[ identifier[pattern] ] keyword[else] : identifier[subranges] = identifier[pattern] . identifier[split] ( literal[string] ) keyword[for] identifier[subrange] keyword[in] identifier[subranges] : keyword[if] identifier[subrange] . identifier[find] ( literal[string] )< literal[int] : identifier[step] = literal[int] identifier[baserange] = identifier[subrange] keyword[else] : identifier[baserange] , identifier[step] = identifier[subrange] . identifier[split] ( literal[string] , literal[int] ) keyword[try] : identifier[step] = identifier[int] ( identifier[step] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[RangeSetParseError] ( identifier[subrange] , literal[string] ) keyword[if] identifier[baserange] . identifier[find] ( literal[string] )< literal[int] : keyword[if] identifier[step] != literal[int] : keyword[raise] identifier[RangeSetParseError] ( identifier[subrange] , literal[string] ) identifier[begin] = identifier[end] = identifier[baserange] keyword[else] : identifier[begin] , identifier[end] = identifier[baserange] . identifier[split] ( literal[string] , literal[int] ) keyword[try] : identifier[pad] = literal[int] keyword[if] identifier[int] ( identifier[begin] )!= literal[int] : identifier[begins] = identifier[begin] . identifier[lstrip] ( literal[string] ) keyword[if] identifier[len] ( identifier[begin] )- identifier[len] ( identifier[begins] )> literal[int] : identifier[pad] = identifier[len] ( identifier[begin] ) identifier[start] = identifier[int] ( identifier[begins] ) keyword[else] : keyword[if] identifier[len] ( identifier[begin] )> literal[int] : identifier[pad] = identifier[len] ( identifier[begin] ) identifier[start] = literal[int] keyword[if] identifier[int] ( identifier[end] )!= literal[int] : identifier[ends] = identifier[end] . identifier[lstrip] ( literal[string] ) keyword[else] : identifier[ends] = identifier[end] identifier[stop] = identifier[int] ( identifier[ends] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[RangeSetParseError] ( identifier[subrange] , literal[string] ) keyword[if] identifier[stop] > literal[int] keyword[or] identifier[start] > identifier[stop] keyword[or] identifier[step] < literal[int] : keyword[raise] identifier[RangeSetParseError] ( identifier[subrange] , literal[string] ) identifier[self] . identifier[add_range] ( identifier[start] , identifier[stop] + literal[int] , identifier[step] , identifier[pad] )
def _parse(self, pattern): """Parse string of comma-separated x-y/step -like ranges""" # Comma separated ranges if pattern.find(',') < 0: subranges = [pattern] # depends on [control=['if'], data=[]] else: subranges = pattern.split(',') for subrange in subranges: if subrange.find('/') < 0: step = 1 baserange = subrange # depends on [control=['if'], data=[]] else: (baserange, step) = subrange.split('/', 1) try: step = int(step) # depends on [control=['try'], data=[]] except ValueError: raise RangeSetParseError(subrange, 'cannot convert string to integer') # depends on [control=['except'], data=[]] if baserange.find('-') < 0: if step != 1: raise RangeSetParseError(subrange, 'invalid step usage') # depends on [control=['if'], data=[]] begin = end = baserange # depends on [control=['if'], data=[]] else: (begin, end) = baserange.split('-', 1) # compute padding and return node range info tuple try: pad = 0 if int(begin) != 0: begins = begin.lstrip('0') if len(begin) - len(begins) > 0: pad = len(begin) # depends on [control=['if'], data=[]] start = int(begins) # depends on [control=['if'], data=[]] else: if len(begin) > 1: pad = len(begin) # depends on [control=['if'], data=[]] start = 0 if int(end) != 0: ends = end.lstrip('0') # depends on [control=['if'], data=[]] else: ends = end stop = int(ends) # depends on [control=['try'], data=[]] except ValueError: raise RangeSetParseError(subrange, 'cannot convert string to integer') # depends on [control=['except'], data=[]] # check preconditions if stop > 1e+100 or start > stop or step < 1: raise RangeSetParseError(subrange, 'invalid values in range') # depends on [control=['if'], data=[]] self.add_range(start, stop + 1, step, pad) # depends on [control=['for'], data=['subrange']]
def sell_margin(self): """ [float] 卖方向保证金 """ return sum(position.sell_margin for position in six.itervalues(self._positions))
def function[sell_margin, parameter[self]]: constant[ [float] 卖方向保证金 ] return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b212dc90>]]]
keyword[def] identifier[sell_margin] ( identifier[self] ): literal[string] keyword[return] identifier[sum] ( identifier[position] . identifier[sell_margin] keyword[for] identifier[position] keyword[in] identifier[six] . identifier[itervalues] ( identifier[self] . identifier[_positions] ))
def sell_margin(self): """ [float] 卖方向保证金 """ return sum((position.sell_margin for position in six.itervalues(self._positions)))
def finished(tokens, s): """Parser(a, None) Throws an exception if any tokens are left in the input unparsed. """ if s.pos >= len(tokens): return None, s else: raise NoParseError(u'should have reached <EOF>', s)
def function[finished, parameter[tokens, s]]: constant[Parser(a, None) Throws an exception if any tokens are left in the input unparsed. ] if compare[name[s].pos greater_or_equal[>=] call[name[len], parameter[name[tokens]]]] begin[:] return[tuple[[<ast.Constant object at 0x7da18f09f7c0>, <ast.Name object at 0x7da18f09c7c0>]]]
keyword[def] identifier[finished] ( identifier[tokens] , identifier[s] ): literal[string] keyword[if] identifier[s] . identifier[pos] >= identifier[len] ( identifier[tokens] ): keyword[return] keyword[None] , identifier[s] keyword[else] : keyword[raise] identifier[NoParseError] ( literal[string] , identifier[s] )
def finished(tokens, s): """Parser(a, None) Throws an exception if any tokens are left in the input unparsed. """ if s.pos >= len(tokens): return (None, s) # depends on [control=['if'], data=[]] else: raise NoParseError(u'should have reached <EOF>', s)
def _reorder_csv(d, filename=""): """ Preserve the csv column ordering before writing back out to CSV file. Keep column data consistent with JSONLD column number alignment. { "var1" : {"number": 1, "values": [] }, "var2": {"number": 1, "values": [] } } :param dict d: csv data :param str filename: Filename :return dict: csv data """ _ensemble = is_ensemble(d) _d2 = [] try: if _ensemble: # 1 column ensemble: realizations if len(d) == 1: for var, data in d.items(): if "values" in data: _d2 = data["values"] # 2 column ensemble: depth and realizations else: _count = 0 # count up how many columns total, and how many placeholders to make in our list for var, data in d.items(): if isinstance(data["number"], list): _curr_count = len(data["number"]) _count += _curr_count elif isinstance(data["number"], (int, float, str)): _count += 1 # make a list with X number of placeholders _d2 = [None for i in range(0, _count)] # Loop again and start combining all columns into one list of lists for var, data in d.items(): # realizations: insert at (hopefully) index 1,2...1001 if isinstance(data["number"], list): for idx, number in enumerate(data["number"]): # we can't trust the number entries. sometimes they start at "number 1", # which isn't true, because DEPTH is number 1. Use enumerate index instead. _insert_at = int(idx) + 1 # Insert at one above the index. Grab values at exact index _d2[_insert_at] = data["values"][idx-1] # depth column: insert at (hopefully) index 0 else: # we can trust to use the number entry as an index placement _insert_at = int(data["number"]) - 1 # insert at one below number, to compensate for 0-index _d2[_insert_at] = data["values"] else: _count = len(d) _d2 = [None for i in range(0, _count)] for key, data in d.items(): _insert_at = int(data["number"]) - 1 _d2[_insert_at] = data["values"] except Exception as e: print("Error: Unable to write CSV: There was an error trying to prep the values for file write: {}".format(e)) logger_csvs.error("reorder_csvs: Unable to write CSV file: {}, {}".format(filename, e)) return _d2
def function[_reorder_csv, parameter[d, filename]]: constant[ Preserve the csv column ordering before writing back out to CSV file. Keep column data consistent with JSONLD column number alignment. { "var1" : {"number": 1, "values": [] }, "var2": {"number": 1, "values": [] } } :param dict d: csv data :param str filename: Filename :return dict: csv data ] variable[_ensemble] assign[=] call[name[is_ensemble], parameter[name[d]]] variable[_d2] assign[=] list[[]] <ast.Try object at 0x7da18ede4490> return[name[_d2]]
keyword[def] identifier[_reorder_csv] ( identifier[d] , identifier[filename] = literal[string] ): literal[string] identifier[_ensemble] = identifier[is_ensemble] ( identifier[d] ) identifier[_d2] =[] keyword[try] : keyword[if] identifier[_ensemble] : keyword[if] identifier[len] ( identifier[d] )== literal[int] : keyword[for] identifier[var] , identifier[data] keyword[in] identifier[d] . identifier[items] (): keyword[if] literal[string] keyword[in] identifier[data] : identifier[_d2] = identifier[data] [ literal[string] ] keyword[else] : identifier[_count] = literal[int] keyword[for] identifier[var] , identifier[data] keyword[in] identifier[d] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[data] [ literal[string] ], identifier[list] ): identifier[_curr_count] = identifier[len] ( identifier[data] [ literal[string] ]) identifier[_count] += identifier[_curr_count] keyword[elif] identifier[isinstance] ( identifier[data] [ literal[string] ],( identifier[int] , identifier[float] , identifier[str] )): identifier[_count] += literal[int] identifier[_d2] =[ keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[_count] )] keyword[for] identifier[var] , identifier[data] keyword[in] identifier[d] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[data] [ literal[string] ], identifier[list] ): keyword[for] identifier[idx] , identifier[number] keyword[in] identifier[enumerate] ( identifier[data] [ literal[string] ]): identifier[_insert_at] = identifier[int] ( identifier[idx] )+ literal[int] identifier[_d2] [ identifier[_insert_at] ]= identifier[data] [ literal[string] ][ identifier[idx] - literal[int] ] keyword[else] : identifier[_insert_at] = identifier[int] ( identifier[data] [ literal[string] ])- literal[int] identifier[_d2] [ identifier[_insert_at] ]= identifier[data] [ literal[string] ] keyword[else] : identifier[_count] = identifier[len] ( identifier[d] ) identifier[_d2] =[ keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[_count] )] keyword[for] identifier[key] , identifier[data] keyword[in] identifier[d] . identifier[items] (): identifier[_insert_at] = identifier[int] ( identifier[data] [ literal[string] ])- literal[int] identifier[_d2] [ identifier[_insert_at] ]= identifier[data] [ literal[string] ] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( literal[string] . identifier[format] ( identifier[e] )) identifier[logger_csvs] . identifier[error] ( literal[string] . identifier[format] ( identifier[filename] , identifier[e] )) keyword[return] identifier[_d2]
def _reorder_csv(d, filename=''): """ Preserve the csv column ordering before writing back out to CSV file. Keep column data consistent with JSONLD column number alignment. { "var1" : {"number": 1, "values": [] }, "var2": {"number": 1, "values": [] } } :param dict d: csv data :param str filename: Filename :return dict: csv data """ _ensemble = is_ensemble(d) _d2 = [] try: if _ensemble: # 1 column ensemble: realizations if len(d) == 1: for (var, data) in d.items(): if 'values' in data: _d2 = data['values'] # depends on [control=['if'], data=['data']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: # 2 column ensemble: depth and realizations _count = 0 # count up how many columns total, and how many placeholders to make in our list for (var, data) in d.items(): if isinstance(data['number'], list): _curr_count = len(data['number']) _count += _curr_count # depends on [control=['if'], data=[]] elif isinstance(data['number'], (int, float, str)): _count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # make a list with X number of placeholders _d2 = [None for i in range(0, _count)] # Loop again and start combining all columns into one list of lists for (var, data) in d.items(): # realizations: insert at (hopefully) index 1,2...1001 if isinstance(data['number'], list): for (idx, number) in enumerate(data['number']): # we can't trust the number entries. sometimes they start at "number 1", # which isn't true, because DEPTH is number 1. Use enumerate index instead. _insert_at = int(idx) + 1 # Insert at one above the index. Grab values at exact index _d2[_insert_at] = data['values'][idx - 1] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: # depth column: insert at (hopefully) index 0 # we can trust to use the number entry as an index placement _insert_at = int(data['number']) - 1 # insert at one below number, to compensate for 0-index _d2[_insert_at] = data['values'] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: _count = len(d) _d2 = [None for i in range(0, _count)] for (key, data) in d.items(): _insert_at = int(data['number']) - 1 _d2[_insert_at] = data['values'] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: print('Error: Unable to write CSV: There was an error trying to prep the values for file write: {}'.format(e)) logger_csvs.error('reorder_csvs: Unable to write CSV file: {}, {}'.format(filename, e)) # depends on [control=['except'], data=['e']] return _d2
def HSL_to_RGB(cobj, target_rgb, *args, **kwargs): """ HSL to RGB conversion. """ H = cobj.hsl_h S = cobj.hsl_s L = cobj.hsl_l if L < 0.5: var_q = L * (1.0 + S) else: var_q = L + S - (L * S) var_p = 2.0 * L - var_q # H normalized to range [0,1] h_sub_k = (H / 360.0) t_sub_R = h_sub_k + (1.0 / 3.0) t_sub_G = h_sub_k t_sub_B = h_sub_k - (1.0 / 3.0) rgb_r = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_R) rgb_g = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_G) rgb_b = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_B) # TODO: Investigate intent of following code block. # In the event that they define an HSV color and want to convert it to # a particular RGB space, let them override it here. # if target_rgb is not None: # rgb_type = target_rgb # else: # rgb_type = cobj.rgb_type return target_rgb(rgb_r, rgb_g, rgb_b)
def function[HSL_to_RGB, parameter[cobj, target_rgb]]: constant[ HSL to RGB conversion. ] variable[H] assign[=] name[cobj].hsl_h variable[S] assign[=] name[cobj].hsl_s variable[L] assign[=] name[cobj].hsl_l if compare[name[L] less[<] constant[0.5]] begin[:] variable[var_q] assign[=] binary_operation[name[L] * binary_operation[constant[1.0] + name[S]]] variable[var_p] assign[=] binary_operation[binary_operation[constant[2.0] * name[L]] - name[var_q]] variable[h_sub_k] assign[=] binary_operation[name[H] / constant[360.0]] variable[t_sub_R] assign[=] binary_operation[name[h_sub_k] + binary_operation[constant[1.0] / constant[3.0]]] variable[t_sub_G] assign[=] name[h_sub_k] variable[t_sub_B] assign[=] binary_operation[name[h_sub_k] - binary_operation[constant[1.0] / constant[3.0]]] variable[rgb_r] assign[=] call[name[__Calc_HSL_to_RGB_Components], parameter[name[var_q], name[var_p], name[t_sub_R]]] variable[rgb_g] assign[=] call[name[__Calc_HSL_to_RGB_Components], parameter[name[var_q], name[var_p], name[t_sub_G]]] variable[rgb_b] assign[=] call[name[__Calc_HSL_to_RGB_Components], parameter[name[var_q], name[var_p], name[t_sub_B]]] return[call[name[target_rgb], parameter[name[rgb_r], name[rgb_g], name[rgb_b]]]]
keyword[def] identifier[HSL_to_RGB] ( identifier[cobj] , identifier[target_rgb] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[H] = identifier[cobj] . identifier[hsl_h] identifier[S] = identifier[cobj] . identifier[hsl_s] identifier[L] = identifier[cobj] . identifier[hsl_l] keyword[if] identifier[L] < literal[int] : identifier[var_q] = identifier[L] *( literal[int] + identifier[S] ) keyword[else] : identifier[var_q] = identifier[L] + identifier[S] -( identifier[L] * identifier[S] ) identifier[var_p] = literal[int] * identifier[L] - identifier[var_q] identifier[h_sub_k] =( identifier[H] / literal[int] ) identifier[t_sub_R] = identifier[h_sub_k] +( literal[int] / literal[int] ) identifier[t_sub_G] = identifier[h_sub_k] identifier[t_sub_B] = identifier[h_sub_k] -( literal[int] / literal[int] ) identifier[rgb_r] = identifier[__Calc_HSL_to_RGB_Components] ( identifier[var_q] , identifier[var_p] , identifier[t_sub_R] ) identifier[rgb_g] = identifier[__Calc_HSL_to_RGB_Components] ( identifier[var_q] , identifier[var_p] , identifier[t_sub_G] ) identifier[rgb_b] = identifier[__Calc_HSL_to_RGB_Components] ( identifier[var_q] , identifier[var_p] , identifier[t_sub_B] ) keyword[return] identifier[target_rgb] ( identifier[rgb_r] , identifier[rgb_g] , identifier[rgb_b] )
def HSL_to_RGB(cobj, target_rgb, *args, **kwargs): """ HSL to RGB conversion. """ H = cobj.hsl_h S = cobj.hsl_s L = cobj.hsl_l if L < 0.5: var_q = L * (1.0 + S) # depends on [control=['if'], data=['L']] else: var_q = L + S - L * S var_p = 2.0 * L - var_q # H normalized to range [0,1] h_sub_k = H / 360.0 t_sub_R = h_sub_k + 1.0 / 3.0 t_sub_G = h_sub_k t_sub_B = h_sub_k - 1.0 / 3.0 rgb_r = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_R) rgb_g = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_G) rgb_b = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_B) # TODO: Investigate intent of following code block. # In the event that they define an HSV color and want to convert it to # a particular RGB space, let them override it here. # if target_rgb is not None: # rgb_type = target_rgb # else: # rgb_type = cobj.rgb_type return target_rgb(rgb_r, rgb_g, rgb_b)
def _init_glyph(self, plot, mapping, properties): """ Returns a Bokeh glyph object. """ properties = mpl_to_bokeh(properties) properties = dict(properties, **mapping) if 'xs' in mapping: renderer = plot.patches(**properties) else: renderer = plot.quad(**properties) if self.colorbar and 'color_mapper' in self.handles: self._draw_colorbar(plot, self.handles['color_mapper']) return renderer, renderer.glyph
def function[_init_glyph, parameter[self, plot, mapping, properties]]: constant[ Returns a Bokeh glyph object. ] variable[properties] assign[=] call[name[mpl_to_bokeh], parameter[name[properties]]] variable[properties] assign[=] call[name[dict], parameter[name[properties]]] if compare[constant[xs] in name[mapping]] begin[:] variable[renderer] assign[=] call[name[plot].patches, parameter[]] if <ast.BoolOp object at 0x7da18f00da50> begin[:] call[name[self]._draw_colorbar, parameter[name[plot], call[name[self].handles][constant[color_mapper]]]] return[tuple[[<ast.Name object at 0x7da204963460>, <ast.Attribute object at 0x7da204962b30>]]]
keyword[def] identifier[_init_glyph] ( identifier[self] , identifier[plot] , identifier[mapping] , identifier[properties] ): literal[string] identifier[properties] = identifier[mpl_to_bokeh] ( identifier[properties] ) identifier[properties] = identifier[dict] ( identifier[properties] ,** identifier[mapping] ) keyword[if] literal[string] keyword[in] identifier[mapping] : identifier[renderer] = identifier[plot] . identifier[patches] (** identifier[properties] ) keyword[else] : identifier[renderer] = identifier[plot] . identifier[quad] (** identifier[properties] ) keyword[if] identifier[self] . identifier[colorbar] keyword[and] literal[string] keyword[in] identifier[self] . identifier[handles] : identifier[self] . identifier[_draw_colorbar] ( identifier[plot] , identifier[self] . identifier[handles] [ literal[string] ]) keyword[return] identifier[renderer] , identifier[renderer] . identifier[glyph]
def _init_glyph(self, plot, mapping, properties): """ Returns a Bokeh glyph object. """ properties = mpl_to_bokeh(properties) properties = dict(properties, **mapping) if 'xs' in mapping: renderer = plot.patches(**properties) # depends on [control=['if'], data=[]] else: renderer = plot.quad(**properties) if self.colorbar and 'color_mapper' in self.handles: self._draw_colorbar(plot, self.handles['color_mapper']) # depends on [control=['if'], data=[]] return (renderer, renderer.glyph)
def request(self, content='xml', filter=None, detail=False): """Get config from Alu router *content* Content layer. cli or xml *filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved) *detail* Show detailed config in CLI -layer""" node = new_ele('get-config') node.append(util.datastore_or_url('source', 'running', self._assert)) if filter is not None: if content == 'xml': node.append(util.build_filter(('subtree', filter))) elif content == 'cli': rep = new_ele('filter') sub_filter = sub_ele(rep, 'config-format-cli-block') if filter is not None: for item in filter: if detail: sub_ele(sub_filter, 'cli-info-detail').text = item else: sub_ele(sub_filter, 'cli-info').text = item else: if detail: sub_ele(sub_filter, 'cli-info-detail') else: sub_ele(sub_filter, 'cli-info') node.append(validated_element(rep)) return self._request(node)
def function[request, parameter[self, content, filter, detail]]: constant[Get config from Alu router *content* Content layer. cli or xml *filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved) *detail* Show detailed config in CLI -layer] variable[node] assign[=] call[name[new_ele], parameter[constant[get-config]]] call[name[node].append, parameter[call[name[util].datastore_or_url, parameter[constant[source], constant[running], name[self]._assert]]]] if compare[name[filter] is_not constant[None]] begin[:] if compare[name[content] equal[==] constant[xml]] begin[:] call[name[node].append, parameter[call[name[util].build_filter, parameter[tuple[[<ast.Constant object at 0x7da1b18ad060>, <ast.Name object at 0x7da1b18ac8e0>]]]]]] return[call[name[self]._request, parameter[name[node]]]]
keyword[def] identifier[request] ( identifier[self] , identifier[content] = literal[string] , identifier[filter] = keyword[None] , identifier[detail] = keyword[False] ): literal[string] identifier[node] = identifier[new_ele] ( literal[string] ) identifier[node] . identifier[append] ( identifier[util] . identifier[datastore_or_url] ( literal[string] , literal[string] , identifier[self] . identifier[_assert] )) keyword[if] identifier[filter] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[content] == literal[string] : identifier[node] . identifier[append] ( identifier[util] . identifier[build_filter] (( literal[string] , identifier[filter] ))) keyword[elif] identifier[content] == literal[string] : identifier[rep] = identifier[new_ele] ( literal[string] ) identifier[sub_filter] = identifier[sub_ele] ( identifier[rep] , literal[string] ) keyword[if] identifier[filter] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[item] keyword[in] identifier[filter] : keyword[if] identifier[detail] : identifier[sub_ele] ( identifier[sub_filter] , literal[string] ). identifier[text] = identifier[item] keyword[else] : identifier[sub_ele] ( identifier[sub_filter] , literal[string] ). identifier[text] = identifier[item] keyword[else] : keyword[if] identifier[detail] : identifier[sub_ele] ( identifier[sub_filter] , literal[string] ) keyword[else] : identifier[sub_ele] ( identifier[sub_filter] , literal[string] ) identifier[node] . identifier[append] ( identifier[validated_element] ( identifier[rep] )) keyword[return] identifier[self] . identifier[_request] ( identifier[node] )
def request(self, content='xml', filter=None, detail=False): """Get config from Alu router *content* Content layer. cli or xml *filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved) *detail* Show detailed config in CLI -layer""" node = new_ele('get-config') node.append(util.datastore_or_url('source', 'running', self._assert)) if filter is not None: if content == 'xml': node.append(util.build_filter(('subtree', filter))) # depends on [control=['if'], data=[]] elif content == 'cli': rep = new_ele('filter') sub_filter = sub_ele(rep, 'config-format-cli-block') if filter is not None: for item in filter: if detail: sub_ele(sub_filter, 'cli-info-detail').text = item # depends on [control=['if'], data=[]] else: sub_ele(sub_filter, 'cli-info').text = item # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['filter']] elif detail: sub_ele(sub_filter, 'cli-info-detail') # depends on [control=['if'], data=[]] else: sub_ele(sub_filter, 'cli-info') node.append(validated_element(rep)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['filter']] return self._request(node)
def diagnostics(self): """ Return an iterable (and indexable) object containing the diagnostics. """ class DiagIterator: def __init__(self, tu): self.tu = tu def __len__(self): return int(conf.lib.clang_getNumDiagnostics(self.tu)) def __getitem__(self, key): diag = conf.lib.clang_getDiagnostic(self.tu, key) if not diag: raise IndexError return Diagnostic(diag) return DiagIterator(self)
def function[diagnostics, parameter[self]]: constant[ Return an iterable (and indexable) object containing the diagnostics. ] class class[DiagIterator, parameter[]] begin[:] def function[__init__, parameter[self, tu]]: name[self].tu assign[=] name[tu] def function[__len__, parameter[self]]: return[call[name[int], parameter[call[name[conf].lib.clang_getNumDiagnostics, parameter[name[self].tu]]]]] def function[__getitem__, parameter[self, key]]: variable[diag] assign[=] call[name[conf].lib.clang_getDiagnostic, parameter[name[self].tu, name[key]]] if <ast.UnaryOp object at 0x7da204620460> begin[:] <ast.Raise object at 0x7da2046215a0> return[call[name[Diagnostic], parameter[name[diag]]]] return[call[name[DiagIterator], parameter[name[self]]]]
keyword[def] identifier[diagnostics] ( identifier[self] ): literal[string] keyword[class] identifier[DiagIterator] : keyword[def] identifier[__init__] ( identifier[self] , identifier[tu] ): identifier[self] . identifier[tu] = identifier[tu] keyword[def] identifier[__len__] ( identifier[self] ): keyword[return] identifier[int] ( identifier[conf] . identifier[lib] . identifier[clang_getNumDiagnostics] ( identifier[self] . identifier[tu] )) keyword[def] identifier[__getitem__] ( identifier[self] , identifier[key] ): identifier[diag] = identifier[conf] . identifier[lib] . identifier[clang_getDiagnostic] ( identifier[self] . identifier[tu] , identifier[key] ) keyword[if] keyword[not] identifier[diag] : keyword[raise] identifier[IndexError] keyword[return] identifier[Diagnostic] ( identifier[diag] ) keyword[return] identifier[DiagIterator] ( identifier[self] )
def diagnostics(self): """ Return an iterable (and indexable) object containing the diagnostics. """ class DiagIterator: def __init__(self, tu): self.tu = tu def __len__(self): return int(conf.lib.clang_getNumDiagnostics(self.tu)) def __getitem__(self, key): diag = conf.lib.clang_getDiagnostic(self.tu, key) if not diag: raise IndexError # depends on [control=['if'], data=[]] return Diagnostic(diag) return DiagIterator(self)
def get_subscriptions(self): """Return a list of subscriptions currently active for this WVA device :raises WVAError: if there is a problem getting the subscription list from the WVA :returns: A list of :class:`WVASubscription` instances """ # Example: {'subscriptions': ['subscriptions/TripDistance~sub', 'subscriptions/FuelRate~sub', ]} subscriptions = [] for uri in self.get_http_client().get("subscriptions").get('subscriptions'): subscriptions.append(self.get_subscription(uri.split("/")[-1])) return subscriptions
def function[get_subscriptions, parameter[self]]: constant[Return a list of subscriptions currently active for this WVA device :raises WVAError: if there is a problem getting the subscription list from the WVA :returns: A list of :class:`WVASubscription` instances ] variable[subscriptions] assign[=] list[[]] for taget[name[uri]] in starred[call[call[call[name[self].get_http_client, parameter[]].get, parameter[constant[subscriptions]]].get, parameter[constant[subscriptions]]]] begin[:] call[name[subscriptions].append, parameter[call[name[self].get_subscription, parameter[call[call[name[uri].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da204564f10>]]]]] return[name[subscriptions]]
keyword[def] identifier[get_subscriptions] ( identifier[self] ): literal[string] identifier[subscriptions] =[] keyword[for] identifier[uri] keyword[in] identifier[self] . identifier[get_http_client] (). identifier[get] ( literal[string] ). identifier[get] ( literal[string] ): identifier[subscriptions] . identifier[append] ( identifier[self] . identifier[get_subscription] ( identifier[uri] . identifier[split] ( literal[string] )[- literal[int] ])) keyword[return] identifier[subscriptions]
def get_subscriptions(self): """Return a list of subscriptions currently active for this WVA device :raises WVAError: if there is a problem getting the subscription list from the WVA :returns: A list of :class:`WVASubscription` instances """ # Example: {'subscriptions': ['subscriptions/TripDistance~sub', 'subscriptions/FuelRate~sub', ]} subscriptions = [] for uri in self.get_http_client().get('subscriptions').get('subscriptions'): subscriptions.append(self.get_subscription(uri.split('/')[-1])) # depends on [control=['for'], data=['uri']] return subscriptions
def get_yield_curve(date=None, tenor=None): """ 获取某个国家市场指定日期的收益率曲线水平。 数据为2002年至今的中债国债收益率曲线,来源于中央国债登记结算有限责任公司。 :param date: 查询日期,默认为策略当前日期前一天 :type date: `str` | `date` | `datetime` | `pandas.Timestamp` :param str tenor: 标准期限,'0S' - 隔夜,'1M' - 1个月,'1Y' - 1年,默认为全部期限 :return: `pandas.DataFrame` - 查询时间段内无风险收益率曲线 :example: .. code-block:: python3 :linenos: [In] get_yield_curve('20130104') [Out] 0S 1M 2M 3M 6M 9M 1Y 2Y \ 2013-01-04 0.0196 0.0253 0.0288 0.0279 0.0280 0.0283 0.0292 0.0310 3Y 4Y ... 6Y 7Y 8Y 9Y 10Y \ 2013-01-04 0.0314 0.0318 ... 0.0342 0.0350 0.0353 0.0357 0.0361 ... """ env = Environment.get_instance() trading_date = env.trading_dt.date() yesterday = env.data_proxy.get_previous_trading_date(trading_date) if date is None: date = yesterday else: date = pd.Timestamp(date) if date > yesterday: raise RQInvalidArgument( "get_yield_curve: {} >= now({})".format(date, yesterday) ) return env.data_proxy.get_yield_curve(start_date=date, end_date=date, tenor=tenor)
def function[get_yield_curve, parameter[date, tenor]]: constant[ 获取某个国家市场指定日期的收益率曲线水平。 数据为2002年至今的中债国债收益率曲线,来源于中央国债登记结算有限责任公司。 :param date: 查询日期,默认为策略当前日期前一天 :type date: `str` | `date` | `datetime` | `pandas.Timestamp` :param str tenor: 标准期限,'0S' - 隔夜,'1M' - 1个月,'1Y' - 1年,默认为全部期限 :return: `pandas.DataFrame` - 查询时间段内无风险收益率曲线 :example: .. code-block:: python3 :linenos: [In] get_yield_curve('20130104') [Out] 0S 1M 2M 3M 6M 9M 1Y 2Y 2013-01-04 0.0196 0.0253 0.0288 0.0279 0.0280 0.0283 0.0292 0.0310 3Y 4Y ... 6Y 7Y 8Y 9Y 10Y 2013-01-04 0.0314 0.0318 ... 0.0342 0.0350 0.0353 0.0357 0.0361 ... ] variable[env] assign[=] call[name[Environment].get_instance, parameter[]] variable[trading_date] assign[=] call[name[env].trading_dt.date, parameter[]] variable[yesterday] assign[=] call[name[env].data_proxy.get_previous_trading_date, parameter[name[trading_date]]] if compare[name[date] is constant[None]] begin[:] variable[date] assign[=] name[yesterday] return[call[name[env].data_proxy.get_yield_curve, parameter[]]]
keyword[def] identifier[get_yield_curve] ( identifier[date] = keyword[None] , identifier[tenor] = keyword[None] ): literal[string] identifier[env] = identifier[Environment] . identifier[get_instance] () identifier[trading_date] = identifier[env] . identifier[trading_dt] . identifier[date] () identifier[yesterday] = identifier[env] . identifier[data_proxy] . identifier[get_previous_trading_date] ( identifier[trading_date] ) keyword[if] identifier[date] keyword[is] keyword[None] : identifier[date] = identifier[yesterday] keyword[else] : identifier[date] = identifier[pd] . identifier[Timestamp] ( identifier[date] ) keyword[if] identifier[date] > identifier[yesterday] : keyword[raise] identifier[RQInvalidArgument] ( literal[string] . identifier[format] ( identifier[date] , identifier[yesterday] ) ) keyword[return] identifier[env] . identifier[data_proxy] . identifier[get_yield_curve] ( identifier[start_date] = identifier[date] , identifier[end_date] = identifier[date] , identifier[tenor] = identifier[tenor] )
def get_yield_curve(date=None, tenor=None): """ 获取某个国家市场指定日期的收益率曲线水平。 数据为2002年至今的中债国债收益率曲线,来源于中央国债登记结算有限责任公司。 :param date: 查询日期,默认为策略当前日期前一天 :type date: `str` | `date` | `datetime` | `pandas.Timestamp` :param str tenor: 标准期限,'0S' - 隔夜,'1M' - 1个月,'1Y' - 1年,默认为全部期限 :return: `pandas.DataFrame` - 查询时间段内无风险收益率曲线 :example: .. code-block:: python3 :linenos: [In] get_yield_curve('20130104') [Out] 0S 1M 2M 3M 6M 9M 1Y 2Y 2013-01-04 0.0196 0.0253 0.0288 0.0279 0.0280 0.0283 0.0292 0.0310 3Y 4Y ... 6Y 7Y 8Y 9Y 10Y 2013-01-04 0.0314 0.0318 ... 0.0342 0.0350 0.0353 0.0357 0.0361 ... """ env = Environment.get_instance() trading_date = env.trading_dt.date() yesterday = env.data_proxy.get_previous_trading_date(trading_date) if date is None: date = yesterday # depends on [control=['if'], data=['date']] else: date = pd.Timestamp(date) if date > yesterday: raise RQInvalidArgument('get_yield_curve: {} >= now({})'.format(date, yesterday)) # depends on [control=['if'], data=['date', 'yesterday']] return env.data_proxy.get_yield_curve(start_date=date, end_date=date, tenor=tenor)
def _set_isns_discovery_domain(self, v, load=False): """ Setter method for isns_discovery_domain, mapped from YANG variable /isns/isns_vrf/isns_discovery_domain (list) If this variable is read-only (config: false) in the source YANG file, then _set_isns_discovery_domain is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_isns_discovery_domain() directly. YANG Description: This specifies configurations of Discovery Domain. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("isns_discovery_domain_name",isns_discovery_domain.isns_discovery_domain, yang_name="isns-discovery-domain", rest_name="discovery-domain", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='isns-discovery-domain-name', extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'discovery-domain', u'hidden': u'isns-discovery-domain', u'callpoint': u'isns_discovery_domain_cp', u'cli-mode-name': u'config-dd-$(isns-discovery-domain-name)'}}), is_container='list', yang_name="isns-discovery-domain", rest_name="discovery-domain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'discovery-domain', u'hidden': u'isns-discovery-domain', u'callpoint': u'isns_discovery_domain_cp', u'cli-mode-name': u'config-dd-$(isns-discovery-domain-name)'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """isns_discovery_domain must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("isns_discovery_domain_name",isns_discovery_domain.isns_discovery_domain, yang_name="isns-discovery-domain", rest_name="discovery-domain", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='isns-discovery-domain-name', extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'discovery-domain', u'hidden': u'isns-discovery-domain', u'callpoint': u'isns_discovery_domain_cp', u'cli-mode-name': u'config-dd-$(isns-discovery-domain-name)'}}), is_container='list', yang_name="isns-discovery-domain", rest_name="discovery-domain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'discovery-domain', u'hidden': u'isns-discovery-domain', u'callpoint': u'isns_discovery_domain_cp', u'cli-mode-name': u'config-dd-$(isns-discovery-domain-name)'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='list', is_config=True)""", }) self.__isns_discovery_domain = t if hasattr(self, '_set'): self._set()
def function[_set_isns_discovery_domain, parameter[self, v, load]]: constant[ Setter method for isns_discovery_domain, mapped from YANG variable /isns/isns_vrf/isns_discovery_domain (list) If this variable is read-only (config: false) in the source YANG file, then _set_isns_discovery_domain is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_isns_discovery_domain() directly. YANG Description: This specifies configurations of Discovery Domain. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c6c7670> name[self].__isns_discovery_domain assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_isns_discovery_domain] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[isns_discovery_domain] . identifier[isns_discovery_domain] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__isns_discovery_domain] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_isns_discovery_domain(self, v, load=False): """ Setter method for isns_discovery_domain, mapped from YANG variable /isns/isns_vrf/isns_discovery_domain (list) If this variable is read-only (config: false) in the source YANG file, then _set_isns_discovery_domain is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_isns_discovery_domain() directly. YANG Description: This specifies configurations of Discovery Domain. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('isns_discovery_domain_name', isns_discovery_domain.isns_discovery_domain, yang_name='isns-discovery-domain', rest_name='discovery-domain', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='isns-discovery-domain-name', extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'discovery-domain', u'hidden': u'isns-discovery-domain', u'callpoint': u'isns_discovery_domain_cp', u'cli-mode-name': u'config-dd-$(isns-discovery-domain-name)'}}), is_container='list', yang_name='isns-discovery-domain', rest_name='discovery-domain', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'discovery-domain', u'hidden': u'isns-discovery-domain', u'callpoint': u'isns_discovery_domain_cp', u'cli-mode-name': u'config-dd-$(isns-discovery-domain-name)'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'isns_discovery_domain must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("isns_discovery_domain_name",isns_discovery_domain.isns_discovery_domain, yang_name="isns-discovery-domain", rest_name="discovery-domain", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'isns-discovery-domain-name\', extensions={u\'tailf-common\': {u\'info\': u\'Configure Discovery Domain Parameters\', u\'cli-no-key-completion\': None, u\'alt-name\': u\'discovery-domain\', u\'hidden\': u\'isns-discovery-domain\', u\'callpoint\': u\'isns_discovery_domain_cp\', u\'cli-mode-name\': u\'config-dd-$(isns-discovery-domain-name)\'}}), is_container=\'list\', yang_name="isns-discovery-domain", rest_name="discovery-domain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure Discovery Domain Parameters\', u\'cli-no-key-completion\': None, u\'alt-name\': u\'discovery-domain\', u\'hidden\': u\'isns-discovery-domain\', u\'callpoint\': u\'isns_discovery_domain_cp\', u\'cli-mode-name\': u\'config-dd-$(isns-discovery-domain-name)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-isns\', defining_module=\'brocade-isns\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__isns_discovery_domain = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def parser_helper(key, val): ''' Helper for parser function @param key: @param val: ''' start_bracket = key.find("[") end_bracket = key.find("]") pdict = {} if has_variable_name(key): # var['key'][3] pdict[key[:key.find("[")]] = parser_helper(key[start_bracket:], val) elif more_than_one_index(key): # ['key'][3] newkey = get_key(key) newkey = int(newkey) if is_number(newkey) else newkey pdict[newkey] = parser_helper(key[end_bracket + 1:], val) else: # key = val or ['key'] newkey = key if start_bracket != -1: # ['key'] newkey = get_key(key) if newkey is None: raise MalformedQueryStringError newkey = int(newkey) if is_number(newkey) else newkey if key == u'[]': # val is the array key val = int(val) if is_number(val) else val pdict[newkey] = val return pdict
def function[parser_helper, parameter[key, val]]: constant[ Helper for parser function @param key: @param val: ] variable[start_bracket] assign[=] call[name[key].find, parameter[constant[[]]] variable[end_bracket] assign[=] call[name[key].find, parameter[constant[]]]] variable[pdict] assign[=] dictionary[[], []] if call[name[has_variable_name], parameter[name[key]]] begin[:] call[name[pdict]][call[name[key]][<ast.Slice object at 0x7da2041d9ba0>]] assign[=] call[name[parser_helper], parameter[call[name[key]][<ast.Slice object at 0x7da2041d85e0>], name[val]]] return[name[pdict]]
keyword[def] identifier[parser_helper] ( identifier[key] , identifier[val] ): literal[string] identifier[start_bracket] = identifier[key] . identifier[find] ( literal[string] ) identifier[end_bracket] = identifier[key] . identifier[find] ( literal[string] ) identifier[pdict] ={} keyword[if] identifier[has_variable_name] ( identifier[key] ): identifier[pdict] [ identifier[key] [: identifier[key] . identifier[find] ( literal[string] )]]= identifier[parser_helper] ( identifier[key] [ identifier[start_bracket] :], identifier[val] ) keyword[elif] identifier[more_than_one_index] ( identifier[key] ): identifier[newkey] = identifier[get_key] ( identifier[key] ) identifier[newkey] = identifier[int] ( identifier[newkey] ) keyword[if] identifier[is_number] ( identifier[newkey] ) keyword[else] identifier[newkey] identifier[pdict] [ identifier[newkey] ]= identifier[parser_helper] ( identifier[key] [ identifier[end_bracket] + literal[int] :], identifier[val] ) keyword[else] : identifier[newkey] = identifier[key] keyword[if] identifier[start_bracket] !=- literal[int] : identifier[newkey] = identifier[get_key] ( identifier[key] ) keyword[if] identifier[newkey] keyword[is] keyword[None] : keyword[raise] identifier[MalformedQueryStringError] identifier[newkey] = identifier[int] ( identifier[newkey] ) keyword[if] identifier[is_number] ( identifier[newkey] ) keyword[else] identifier[newkey] keyword[if] identifier[key] == literal[string] : identifier[val] = identifier[int] ( identifier[val] ) keyword[if] identifier[is_number] ( identifier[val] ) keyword[else] identifier[val] identifier[pdict] [ identifier[newkey] ]= identifier[val] keyword[return] identifier[pdict]
def parser_helper(key, val): """ Helper for parser function @param key: @param val: """ start_bracket = key.find('[') end_bracket = key.find(']') pdict = {} if has_variable_name(key): # var['key'][3] pdict[key[:key.find('[')]] = parser_helper(key[start_bracket:], val) # depends on [control=['if'], data=[]] elif more_than_one_index(key): # ['key'][3] newkey = get_key(key) newkey = int(newkey) if is_number(newkey) else newkey pdict[newkey] = parser_helper(key[end_bracket + 1:], val) # depends on [control=['if'], data=[]] else: # key = val or ['key'] newkey = key if start_bracket != -1: # ['key'] newkey = get_key(key) if newkey is None: raise MalformedQueryStringError # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] newkey = int(newkey) if is_number(newkey) else newkey if key == u'[]': # val is the array key val = int(val) if is_number(val) else val # depends on [control=['if'], data=[]] pdict[newkey] = val return pdict
def prior_from_config(cp, variable_params, prior_section, constraint_section): """Gets arguments and keyword arguments from a config file. Parameters ---------- cp : WorkflowConfigParser Config file parser to read. variable_params : list List of of model parameter names. prior_section : str Section to read prior(s) from. constraint_section : str Section to read constraint(s) from. Returns ------- pycbc.distributions.JointDistribution The prior. """ # get prior distribution for each variable parameter logging.info("Setting up priors for each parameter") dists = distributions.read_distributions_from_config(cp, prior_section) constraints = distributions.read_constraints_from_config( cp, constraint_section) return distributions.JointDistribution(variable_params, *dists, constraints=constraints)
def function[prior_from_config, parameter[cp, variable_params, prior_section, constraint_section]]: constant[Gets arguments and keyword arguments from a config file. Parameters ---------- cp : WorkflowConfigParser Config file parser to read. variable_params : list List of of model parameter names. prior_section : str Section to read prior(s) from. constraint_section : str Section to read constraint(s) from. Returns ------- pycbc.distributions.JointDistribution The prior. ] call[name[logging].info, parameter[constant[Setting up priors for each parameter]]] variable[dists] assign[=] call[name[distributions].read_distributions_from_config, parameter[name[cp], name[prior_section]]] variable[constraints] assign[=] call[name[distributions].read_constraints_from_config, parameter[name[cp], name[constraint_section]]] return[call[name[distributions].JointDistribution, parameter[name[variable_params], <ast.Starred object at 0x7da18f813850>]]]
keyword[def] identifier[prior_from_config] ( identifier[cp] , identifier[variable_params] , identifier[prior_section] , identifier[constraint_section] ): literal[string] identifier[logging] . identifier[info] ( literal[string] ) identifier[dists] = identifier[distributions] . identifier[read_distributions_from_config] ( identifier[cp] , identifier[prior_section] ) identifier[constraints] = identifier[distributions] . identifier[read_constraints_from_config] ( identifier[cp] , identifier[constraint_section] ) keyword[return] identifier[distributions] . identifier[JointDistribution] ( identifier[variable_params] ,* identifier[dists] , identifier[constraints] = identifier[constraints] )
def prior_from_config(cp, variable_params, prior_section, constraint_section): """Gets arguments and keyword arguments from a config file. Parameters ---------- cp : WorkflowConfigParser Config file parser to read. variable_params : list List of of model parameter names. prior_section : str Section to read prior(s) from. constraint_section : str Section to read constraint(s) from. Returns ------- pycbc.distributions.JointDistribution The prior. """ # get prior distribution for each variable parameter logging.info('Setting up priors for each parameter') dists = distributions.read_distributions_from_config(cp, prior_section) constraints = distributions.read_constraints_from_config(cp, constraint_section) return distributions.JointDistribution(variable_params, *dists, constraints=constraints)
def _ip2country(ip): """Get user country.""" if ip: match = geolite2.reader().get(ip) return match.get('country', {}).get('iso_code') if match else None
def function[_ip2country, parameter[ip]]: constant[Get user country.] if name[ip] begin[:] variable[match] assign[=] call[call[name[geolite2].reader, parameter[]].get, parameter[name[ip]]] return[<ast.IfExp object at 0x7da2044c1180>]
keyword[def] identifier[_ip2country] ( identifier[ip] ): literal[string] keyword[if] identifier[ip] : identifier[match] = identifier[geolite2] . identifier[reader] (). identifier[get] ( identifier[ip] ) keyword[return] identifier[match] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) keyword[if] identifier[match] keyword[else] keyword[None]
def _ip2country(ip): """Get user country.""" if ip: match = geolite2.reader().get(ip) return match.get('country', {}).get('iso_code') if match else None # depends on [control=['if'], data=[]]
def derive_configuration(cls): """ Collect the nearest type variables and effective parameters from the type, its bases, and their origins as necessary. """ base_params = cls.base.__parameters__ if hasattr(cls.type, '__args__'): # typing as of commit abefbe4 tvars = {p: p for p in base_params} types = {} for t in iter_generic_bases(cls.type): if t is cls.base: type_vars = tuple(tvars[p] for p in base_params) parameters = (types.get(tvar, tvar) for tvar in type_vars) break if t.__args__: for arg, tvar in zip(t.__args__, t.__origin__.__parameters__): if isinstance(arg, typing.TypeVar): tvars[tvar] = tvars.get(arg, arg) else: types[tvar] = arg else: # typing 3.5.0 tvars = [None] * len(base_params) for t in iter_generic_bases(cls.type): for i, p in enumerate(t.__parameters__): if tvars[i] is None and isinstance(p, typing.TypeVar): tvars[i] = p if all(tvars): type_vars = tvars parameters = cls.type.__parameters__ break cls.type_vars = type_vars cls.parameters = tuple(normalize_type(p, 1) for p in parameters)
def function[derive_configuration, parameter[cls]]: constant[ Collect the nearest type variables and effective parameters from the type, its bases, and their origins as necessary. ] variable[base_params] assign[=] name[cls].base.__parameters__ if call[name[hasattr], parameter[name[cls].type, constant[__args__]]] begin[:] variable[tvars] assign[=] <ast.DictComp object at 0x7da1b25449a0> variable[types] assign[=] dictionary[[], []] for taget[name[t]] in starred[call[name[iter_generic_bases], parameter[name[cls].type]]] begin[:] if compare[name[t] is name[cls].base] begin[:] variable[type_vars] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b25443a0>]] variable[parameters] assign[=] <ast.GeneratorExp object at 0x7da1b2547790> break if name[t].__args__ begin[:] for taget[tuple[[<ast.Name object at 0x7da1b2547c70>, <ast.Name object at 0x7da1b2546d40>]]] in starred[call[name[zip], parameter[name[t].__args__, name[t].__origin__.__parameters__]]] begin[:] if call[name[isinstance], parameter[name[arg], name[typing].TypeVar]] begin[:] call[name[tvars]][name[tvar]] assign[=] call[name[tvars].get, parameter[name[arg], name[arg]]] name[cls].type_vars assign[=] name[type_vars] name[cls].parameters assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b2545de0>]]
keyword[def] identifier[derive_configuration] ( identifier[cls] ): literal[string] identifier[base_params] = identifier[cls] . identifier[base] . identifier[__parameters__] keyword[if] identifier[hasattr] ( identifier[cls] . identifier[type] , literal[string] ): identifier[tvars] ={ identifier[p] : identifier[p] keyword[for] identifier[p] keyword[in] identifier[base_params] } identifier[types] ={} keyword[for] identifier[t] keyword[in] identifier[iter_generic_bases] ( identifier[cls] . identifier[type] ): keyword[if] identifier[t] keyword[is] identifier[cls] . identifier[base] : identifier[type_vars] = identifier[tuple] ( identifier[tvars] [ identifier[p] ] keyword[for] identifier[p] keyword[in] identifier[base_params] ) identifier[parameters] =( identifier[types] . identifier[get] ( identifier[tvar] , identifier[tvar] ) keyword[for] identifier[tvar] keyword[in] identifier[type_vars] ) keyword[break] keyword[if] identifier[t] . identifier[__args__] : keyword[for] identifier[arg] , identifier[tvar] keyword[in] identifier[zip] ( identifier[t] . identifier[__args__] , identifier[t] . identifier[__origin__] . identifier[__parameters__] ): keyword[if] identifier[isinstance] ( identifier[arg] , identifier[typing] . identifier[TypeVar] ): identifier[tvars] [ identifier[tvar] ]= identifier[tvars] . identifier[get] ( identifier[arg] , identifier[arg] ) keyword[else] : identifier[types] [ identifier[tvar] ]= identifier[arg] keyword[else] : identifier[tvars] =[ keyword[None] ]* identifier[len] ( identifier[base_params] ) keyword[for] identifier[t] keyword[in] identifier[iter_generic_bases] ( identifier[cls] . identifier[type] ): keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[t] . identifier[__parameters__] ): keyword[if] identifier[tvars] [ identifier[i] ] keyword[is] keyword[None] keyword[and] identifier[isinstance] ( identifier[p] , identifier[typing] . identifier[TypeVar] ): identifier[tvars] [ identifier[i] ]= identifier[p] keyword[if] identifier[all] ( identifier[tvars] ): identifier[type_vars] = identifier[tvars] identifier[parameters] = identifier[cls] . identifier[type] . identifier[__parameters__] keyword[break] identifier[cls] . identifier[type_vars] = identifier[type_vars] identifier[cls] . identifier[parameters] = identifier[tuple] ( identifier[normalize_type] ( identifier[p] , literal[int] ) keyword[for] identifier[p] keyword[in] identifier[parameters] )
def derive_configuration(cls): """ Collect the nearest type variables and effective parameters from the type, its bases, and their origins as necessary. """ base_params = cls.base.__parameters__ if hasattr(cls.type, '__args__'): # typing as of commit abefbe4 tvars = {p: p for p in base_params} types = {} for t in iter_generic_bases(cls.type): if t is cls.base: type_vars = tuple((tvars[p] for p in base_params)) parameters = (types.get(tvar, tvar) for tvar in type_vars) break # depends on [control=['if'], data=[]] if t.__args__: for (arg, tvar) in zip(t.__args__, t.__origin__.__parameters__): if isinstance(arg, typing.TypeVar): tvars[tvar] = tvars.get(arg, arg) # depends on [control=['if'], data=[]] else: types[tvar] = arg # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] # depends on [control=['if'], data=[]] else: # typing 3.5.0 tvars = [None] * len(base_params) for t in iter_generic_bases(cls.type): for (i, p) in enumerate(t.__parameters__): if tvars[i] is None and isinstance(p, typing.TypeVar): tvars[i] = p # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if all(tvars): type_vars = tvars parameters = cls.type.__parameters__ break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] cls.type_vars = type_vars cls.parameters = tuple((normalize_type(p, 1) for p in parameters))
def layer(output_shape=None, new_parameters=None): """Create a layer class from a function.""" def layer_decorator(call): """Decorating the call function.""" def output_shape_fun(self, input_shape): if output_shape is None: return input_shape kwargs = self._init_kwargs # pylint: disable=protected-access return output_shape(input_shape, **kwargs) def new_parameters_fun(self, input_shape, rng): if new_parameters is None: return () kwargs = self._init_kwargs # pylint: disable=protected-access return new_parameters(input_shape, rng, **kwargs) def call_fun(self, x, params=(), **kwargs): """The call function of the created class, derived from call.""" # Merge on-call kwargs with class-kwargs. call_kwargs = kwargs.copy() call_kwargs.update(self._init_kwargs) # pylint: disable=protected-access # Call with the merged kwargs. return call(x, params=params, **call_kwargs) # Set doc for python help. call_fun.__doc__ = call.__doc__ if output_shape is None: output_shape_fun.__doc__ = output_shape.__doc__ if new_parameters is None: new_parameters_fun.__doc__ = new_parameters.__doc__ # Create the class. cls = type(call.__name__, (Layer,), {'call': call_fun, 'output_shape': output_shape_fun, 'new_parameters': new_parameters_fun}) return cls return layer_decorator
def function[layer, parameter[output_shape, new_parameters]]: constant[Create a layer class from a function.] def function[layer_decorator, parameter[call]]: constant[Decorating the call function.] def function[output_shape_fun, parameter[self, input_shape]]: if compare[name[output_shape] is constant[None]] begin[:] return[name[input_shape]] variable[kwargs] assign[=] name[self]._init_kwargs return[call[name[output_shape], parameter[name[input_shape]]]] def function[new_parameters_fun, parameter[self, input_shape, rng]]: if compare[name[new_parameters] is constant[None]] begin[:] return[tuple[[]]] variable[kwargs] assign[=] name[self]._init_kwargs return[call[name[new_parameters], parameter[name[input_shape], name[rng]]]] def function[call_fun, parameter[self, x, params]]: constant[The call function of the created class, derived from call.] variable[call_kwargs] assign[=] call[name[kwargs].copy, parameter[]] call[name[call_kwargs].update, parameter[name[self]._init_kwargs]] return[call[name[call], parameter[name[x]]]] name[call_fun].__doc__ assign[=] name[call].__doc__ if compare[name[output_shape] is constant[None]] begin[:] name[output_shape_fun].__doc__ assign[=] name[output_shape].__doc__ if compare[name[new_parameters] is constant[None]] begin[:] name[new_parameters_fun].__doc__ assign[=] name[new_parameters].__doc__ variable[cls] assign[=] call[name[type], parameter[name[call].__name__, tuple[[<ast.Name object at 0x7da1b203f760>]], dictionary[[<ast.Constant object at 0x7da1b203d5d0>, <ast.Constant object at 0x7da1b203f250>, <ast.Constant object at 0x7da1b203c910>], [<ast.Name object at 0x7da1b209b520>, <ast.Name object at 0x7da1b2098640>, <ast.Name object at 0x7da1b209ab30>]]]] return[name[cls]] return[name[layer_decorator]]
keyword[def] identifier[layer] ( identifier[output_shape] = keyword[None] , identifier[new_parameters] = keyword[None] ): literal[string] keyword[def] identifier[layer_decorator] ( identifier[call] ): literal[string] keyword[def] identifier[output_shape_fun] ( identifier[self] , identifier[input_shape] ): keyword[if] identifier[output_shape] keyword[is] keyword[None] : keyword[return] identifier[input_shape] identifier[kwargs] = identifier[self] . identifier[_init_kwargs] keyword[return] identifier[output_shape] ( identifier[input_shape] ,** identifier[kwargs] ) keyword[def] identifier[new_parameters_fun] ( identifier[self] , identifier[input_shape] , identifier[rng] ): keyword[if] identifier[new_parameters] keyword[is] keyword[None] : keyword[return] () identifier[kwargs] = identifier[self] . identifier[_init_kwargs] keyword[return] identifier[new_parameters] ( identifier[input_shape] , identifier[rng] ,** identifier[kwargs] ) keyword[def] identifier[call_fun] ( identifier[self] , identifier[x] , identifier[params] =(),** identifier[kwargs] ): literal[string] identifier[call_kwargs] = identifier[kwargs] . identifier[copy] () identifier[call_kwargs] . identifier[update] ( identifier[self] . identifier[_init_kwargs] ) keyword[return] identifier[call] ( identifier[x] , identifier[params] = identifier[params] ,** identifier[call_kwargs] ) identifier[call_fun] . identifier[__doc__] = identifier[call] . identifier[__doc__] keyword[if] identifier[output_shape] keyword[is] keyword[None] : identifier[output_shape_fun] . identifier[__doc__] = identifier[output_shape] . identifier[__doc__] keyword[if] identifier[new_parameters] keyword[is] keyword[None] : identifier[new_parameters_fun] . identifier[__doc__] = identifier[new_parameters] . identifier[__doc__] identifier[cls] = identifier[type] ( identifier[call] . identifier[__name__] ,( identifier[Layer] ,), { literal[string] : identifier[call_fun] , literal[string] : identifier[output_shape_fun] , literal[string] : identifier[new_parameters_fun] }) keyword[return] identifier[cls] keyword[return] identifier[layer_decorator]
def layer(output_shape=None, new_parameters=None): """Create a layer class from a function.""" def layer_decorator(call): """Decorating the call function.""" def output_shape_fun(self, input_shape): if output_shape is None: return input_shape # depends on [control=['if'], data=[]] kwargs = self._init_kwargs # pylint: disable=protected-access return output_shape(input_shape, **kwargs) def new_parameters_fun(self, input_shape, rng): if new_parameters is None: return () # depends on [control=['if'], data=[]] kwargs = self._init_kwargs # pylint: disable=protected-access return new_parameters(input_shape, rng, **kwargs) def call_fun(self, x, params=(), **kwargs): """The call function of the created class, derived from call.""" # Merge on-call kwargs with class-kwargs. call_kwargs = kwargs.copy() call_kwargs.update(self._init_kwargs) # pylint: disable=protected-access # Call with the merged kwargs. return call(x, params=params, **call_kwargs) # Set doc for python help. call_fun.__doc__ = call.__doc__ if output_shape is None: output_shape_fun.__doc__ = output_shape.__doc__ # depends on [control=['if'], data=['output_shape']] if new_parameters is None: new_parameters_fun.__doc__ = new_parameters.__doc__ # depends on [control=['if'], data=['new_parameters']] # Create the class. cls = type(call.__name__, (Layer,), {'call': call_fun, 'output_shape': output_shape_fun, 'new_parameters': new_parameters_fun}) return cls return layer_decorator
def flatten(in_list): """given a list of values in_list, flatten returns the list obtained by flattening the top-level elements of in_list.""" out_list = [] for val in in_list: if isinstance(val, list): out_list.extend(val) else: out_list.append(val) return out_list
def function[flatten, parameter[in_list]]: constant[given a list of values in_list, flatten returns the list obtained by flattening the top-level elements of in_list.] variable[out_list] assign[=] list[[]] for taget[name[val]] in starred[name[in_list]] begin[:] if call[name[isinstance], parameter[name[val], name[list]]] begin[:] call[name[out_list].extend, parameter[name[val]]] return[name[out_list]]
keyword[def] identifier[flatten] ( identifier[in_list] ): literal[string] identifier[out_list] =[] keyword[for] identifier[val] keyword[in] identifier[in_list] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[list] ): identifier[out_list] . identifier[extend] ( identifier[val] ) keyword[else] : identifier[out_list] . identifier[append] ( identifier[val] ) keyword[return] identifier[out_list]
def flatten(in_list): """given a list of values in_list, flatten returns the list obtained by flattening the top-level elements of in_list.""" out_list = [] for val in in_list: if isinstance(val, list): out_list.extend(val) # depends on [control=['if'], data=[]] else: out_list.append(val) # depends on [control=['for'], data=['val']] return out_list
async def keepalive_ping(self) -> None: """ Send a Ping frame and wait for a Pong frame at regular intervals. This coroutine exits when the connection terminates and one of the following happens: - :meth:`ping` raises :exc:`ConnectionClosed`, or - :meth:`close_connection` cancels :attr:`keepalive_ping_task`. """ if self.ping_interval is None: return try: while True: await asyncio.sleep(self.ping_interval, loop=self.loop) # ping() cannot raise ConnectionClosed, only CancelledError: # - If the connection is CLOSING, keepalive_ping_task will be # canceled by close_connection() before ping() returns. # - If the connection is CLOSED, keepalive_ping_task must be # canceled already. ping_waiter = await self.ping() if self.ping_timeout is not None: try: await asyncio.wait_for( ping_waiter, self.ping_timeout, loop=self.loop ) except asyncio.TimeoutError: logger.debug("%s ! timed out waiting for pong", self.side) self.fail_connection(1011) break except asyncio.CancelledError: raise except Exception: logger.warning("Unexpected exception in keepalive ping task", exc_info=True)
<ast.AsyncFunctionDef object at 0x7da18eb54280>
keyword[async] keyword[def] identifier[keepalive_ping] ( identifier[self] )-> keyword[None] : literal[string] keyword[if] identifier[self] . identifier[ping_interval] keyword[is] keyword[None] : keyword[return] keyword[try] : keyword[while] keyword[True] : keyword[await] identifier[asyncio] . identifier[sleep] ( identifier[self] . identifier[ping_interval] , identifier[loop] = identifier[self] . identifier[loop] ) identifier[ping_waiter] = keyword[await] identifier[self] . identifier[ping] () keyword[if] identifier[self] . identifier[ping_timeout] keyword[is] keyword[not] keyword[None] : keyword[try] : keyword[await] identifier[asyncio] . identifier[wait_for] ( identifier[ping_waiter] , identifier[self] . identifier[ping_timeout] , identifier[loop] = identifier[self] . identifier[loop] ) keyword[except] identifier[asyncio] . identifier[TimeoutError] : identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[side] ) identifier[self] . identifier[fail_connection] ( literal[int] ) keyword[break] keyword[except] identifier[asyncio] . identifier[CancelledError] : keyword[raise] keyword[except] identifier[Exception] : identifier[logger] . identifier[warning] ( literal[string] , identifier[exc_info] = keyword[True] )
async def keepalive_ping(self) -> None: """ Send a Ping frame and wait for a Pong frame at regular intervals. This coroutine exits when the connection terminates and one of the following happens: - :meth:`ping` raises :exc:`ConnectionClosed`, or - :meth:`close_connection` cancels :attr:`keepalive_ping_task`. """ if self.ping_interval is None: return # depends on [control=['if'], data=[]] try: while True: await asyncio.sleep(self.ping_interval, loop=self.loop) # ping() cannot raise ConnectionClosed, only CancelledError: # - If the connection is CLOSING, keepalive_ping_task will be # canceled by close_connection() before ping() returns. # - If the connection is CLOSED, keepalive_ping_task must be # canceled already. ping_waiter = await self.ping() if self.ping_timeout is not None: try: await asyncio.wait_for(ping_waiter, self.ping_timeout, loop=self.loop) # depends on [control=['try'], data=[]] except asyncio.TimeoutError: logger.debug('%s ! timed out waiting for pong', self.side) self.fail_connection(1011) break # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except asyncio.CancelledError: raise # depends on [control=['except'], data=[]] except Exception: logger.warning('Unexpected exception in keepalive ping task', exc_info=True) # depends on [control=['except'], data=[]]
def deploy_branch_exists(deploy_branch): """ Check if there is a remote branch with name specified in ``deploy_branch``. Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and ``master`` for ``github.io`` repos. This isn't completely robust. If there are multiple remotes and you have a ``deploy_branch`` branch on the non-default remote, this won't see it. """ remote_name = 'doctr_remote' branch_names = subprocess.check_output(['git', 'branch', '-r']).decode('utf-8').split() return '{}/{}'.format(remote_name, deploy_branch) in branch_names
def function[deploy_branch_exists, parameter[deploy_branch]]: constant[ Check if there is a remote branch with name specified in ``deploy_branch``. Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and ``master`` for ``github.io`` repos. This isn't completely robust. If there are multiple remotes and you have a ``deploy_branch`` branch on the non-default remote, this won't see it. ] variable[remote_name] assign[=] constant[doctr_remote] variable[branch_names] assign[=] call[call[call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da1b101b5e0>, <ast.Constant object at 0x7da1b101add0>, <ast.Constant object at 0x7da1b101a680>]]]].decode, parameter[constant[utf-8]]].split, parameter[]] return[compare[call[constant[{}/{}].format, parameter[name[remote_name], name[deploy_branch]]] in name[branch_names]]]
keyword[def] identifier[deploy_branch_exists] ( identifier[deploy_branch] ): literal[string] identifier[remote_name] = literal[string] identifier[branch_names] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] ]). identifier[decode] ( literal[string] ). identifier[split] () keyword[return] literal[string] . identifier[format] ( identifier[remote_name] , identifier[deploy_branch] ) keyword[in] identifier[branch_names]
def deploy_branch_exists(deploy_branch): """ Check if there is a remote branch with name specified in ``deploy_branch``. Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and ``master`` for ``github.io`` repos. This isn't completely robust. If there are multiple remotes and you have a ``deploy_branch`` branch on the non-default remote, this won't see it. """ remote_name = 'doctr_remote' branch_names = subprocess.check_output(['git', 'branch', '-r']).decode('utf-8').split() return '{}/{}'.format(remote_name, deploy_branch) in branch_names
def set_attribute(attribute, attribute_value, instance_name=None, instance_id=None, region=None, key=None, keyid=None, profile=None, filters=None): ''' Set an EC2 instance attribute. Returns whether the operation succeeded or not. CLI Example: .. code-block:: bash salt myminion boto_ec2.set_attribute sourceDestCheck False instance_name=my_instance Available attributes: * instanceType * kernel * ramdisk * userData * disableApiTermination * instanceInitiatedShutdownBehavior * rootDeviceName * blockDeviceMapping * productCodes * sourceDestCheck * groupSet * ebsOptimized * sriovNetSupport ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) attribute_list = ['instanceType', 'kernel', 'ramdisk', 'userData', 'disableApiTermination', 'instanceInitiatedShutdownBehavior', 'rootDeviceName', 'blockDeviceMapping', 'productCodes', 'sourceDestCheck', 'groupSet', 'ebsOptimized', 'sriovNetSupport'] if not any((instance_name, instance_id)): raise SaltInvocationError('At least one of the following must be specified: instance_name or instance_id.') if instance_name and instance_id: raise SaltInvocationError('Both instance_name and instance_id can not be specified in the same command.') if attribute not in attribute_list: raise SaltInvocationError('Attribute must be one of: {0}.'.format(attribute_list)) try: if instance_name: instances = find_instances(name=instance_name, region=region, key=key, keyid=keyid, profile=profile, filters=filters) if len(instances) != 1: raise CommandExecutionError('Found more than one EC2 instance matching the criteria.') instance_id = instances[0] attribute = conn.modify_instance_attribute(instance_id, attribute, attribute_value) if not attribute: return False return attribute except boto.exception.BotoServerError as exc: log.error(exc) return False
def function[set_attribute, parameter[attribute, attribute_value, instance_name, instance_id, region, key, keyid, profile, filters]]: constant[ Set an EC2 instance attribute. Returns whether the operation succeeded or not. CLI Example: .. code-block:: bash salt myminion boto_ec2.set_attribute sourceDestCheck False instance_name=my_instance Available attributes: * instanceType * kernel * ramdisk * userData * disableApiTermination * instanceInitiatedShutdownBehavior * rootDeviceName * blockDeviceMapping * productCodes * sourceDestCheck * groupSet * ebsOptimized * sriovNetSupport ] variable[conn] assign[=] call[name[_get_conn], parameter[]] variable[attribute_list] assign[=] list[[<ast.Constant object at 0x7da1b20bac50>, <ast.Constant object at 0x7da1b20b8190>, <ast.Constant object at 0x7da1b20bb5e0>, <ast.Constant object at 0x7da1b20b9e70>, <ast.Constant object at 0x7da1b20bb9a0>, <ast.Constant object at 0x7da1b20b97b0>, <ast.Constant object at 0x7da1b20bb790>, <ast.Constant object at 0x7da1b20b9f60>, <ast.Constant object at 0x7da1b20bb9d0>, <ast.Constant object at 0x7da1b20b9360>, <ast.Constant object at 0x7da1b20b9210>, <ast.Constant object at 0x7da1b20ba080>, <ast.Constant object at 0x7da1b20b8d90>]] if <ast.UnaryOp object at 0x7da1b20b8640> begin[:] <ast.Raise object at 0x7da1b20b88b0> if <ast.BoolOp object at 0x7da1b20ba6b0> begin[:] <ast.Raise object at 0x7da1b20b81f0> if compare[name[attribute] <ast.NotIn object at 0x7da2590d7190> name[attribute_list]] begin[:] <ast.Raise object at 0x7da18ede5f90> <ast.Try object at 0x7da18ede5060>
keyword[def] identifier[set_attribute] ( identifier[attribute] , identifier[attribute_value] , identifier[instance_name] = keyword[None] , identifier[instance_id] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] , identifier[filters] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) identifier[attribute_list] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] keyword[not] identifier[any] (( identifier[instance_name] , identifier[instance_id] )): keyword[raise] identifier[SaltInvocationError] ( literal[string] ) keyword[if] identifier[instance_name] keyword[and] identifier[instance_id] : keyword[raise] identifier[SaltInvocationError] ( literal[string] ) keyword[if] identifier[attribute] keyword[not] keyword[in] identifier[attribute_list] : keyword[raise] identifier[SaltInvocationError] ( literal[string] . identifier[format] ( identifier[attribute_list] )) keyword[try] : keyword[if] identifier[instance_name] : identifier[instances] = identifier[find_instances] ( identifier[name] = identifier[instance_name] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] , identifier[filters] = identifier[filters] ) keyword[if] identifier[len] ( identifier[instances] )!= literal[int] : keyword[raise] identifier[CommandExecutionError] ( literal[string] ) identifier[instance_id] = identifier[instances] [ literal[int] ] identifier[attribute] = identifier[conn] . identifier[modify_instance_attribute] ( identifier[instance_id] , identifier[attribute] , identifier[attribute_value] ) keyword[if] keyword[not] identifier[attribute] : keyword[return] keyword[False] keyword[return] identifier[attribute] keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[exc] : identifier[log] . identifier[error] ( identifier[exc] ) keyword[return] keyword[False]
def set_attribute(attribute, attribute_value, instance_name=None, instance_id=None, region=None, key=None, keyid=None, profile=None, filters=None): """ Set an EC2 instance attribute. Returns whether the operation succeeded or not. CLI Example: .. code-block:: bash salt myminion boto_ec2.set_attribute sourceDestCheck False instance_name=my_instance Available attributes: * instanceType * kernel * ramdisk * userData * disableApiTermination * instanceInitiatedShutdownBehavior * rootDeviceName * blockDeviceMapping * productCodes * sourceDestCheck * groupSet * ebsOptimized * sriovNetSupport """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) attribute_list = ['instanceType', 'kernel', 'ramdisk', 'userData', 'disableApiTermination', 'instanceInitiatedShutdownBehavior', 'rootDeviceName', 'blockDeviceMapping', 'productCodes', 'sourceDestCheck', 'groupSet', 'ebsOptimized', 'sriovNetSupport'] if not any((instance_name, instance_id)): raise SaltInvocationError('At least one of the following must be specified: instance_name or instance_id.') # depends on [control=['if'], data=[]] if instance_name and instance_id: raise SaltInvocationError('Both instance_name and instance_id can not be specified in the same command.') # depends on [control=['if'], data=[]] if attribute not in attribute_list: raise SaltInvocationError('Attribute must be one of: {0}.'.format(attribute_list)) # depends on [control=['if'], data=['attribute_list']] try: if instance_name: instances = find_instances(name=instance_name, region=region, key=key, keyid=keyid, profile=profile, filters=filters) if len(instances) != 1: raise CommandExecutionError('Found more than one EC2 instance matching the criteria.') # depends on [control=['if'], data=[]] instance_id = instances[0] # depends on [control=['if'], data=[]] attribute = conn.modify_instance_attribute(instance_id, attribute, attribute_value) if not attribute: return False # depends on [control=['if'], data=[]] return attribute # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as exc: log.error(exc) return False # depends on [control=['except'], data=['exc']]
def barycenter_graph(distance_matrix, X, reg=1e-3): """ Computes the barycenter weighted graph for points in X Parameters ---------- distance_matrix: sparse Ndarray, (N_obs, N_obs) pairwise distance matrix. X : Ndarray (N_obs, N_dim) observed data matrix. reg : float, optional Amount of regularization when solving the least-squares problem. Only relevant if mode='barycenter'. If None, use the default. Returns ------- W : sparse matrix in CSR format, shape = [n_samples, n_samples] W[i, j] is assigned the weight of edge that connects i to j. """ (N, d_in) = X.shape (rows, cols) = distance_matrix.nonzero() W = sparse.lil_matrix((N, N)) # best for W[i, nbrs_i] = w/np.sum(w) for i in range(N): nbrs_i = cols[rows == i] n_neighbors_i = len(nbrs_i) v = np.ones(n_neighbors_i, dtype=X.dtype) C = X[nbrs_i] - X[i] G = np.dot(C, C.T) trace = np.trace(G) if trace > 0: R = reg * trace else: R = reg G.flat[::n_neighbors_i + 1] += R w = solve(G, v, sym_pos = True) W[i, nbrs_i] = w / np.sum(w) return W
def function[barycenter_graph, parameter[distance_matrix, X, reg]]: constant[ Computes the barycenter weighted graph for points in X Parameters ---------- distance_matrix: sparse Ndarray, (N_obs, N_obs) pairwise distance matrix. X : Ndarray (N_obs, N_dim) observed data matrix. reg : float, optional Amount of regularization when solving the least-squares problem. Only relevant if mode='barycenter'. If None, use the default. Returns ------- W : sparse matrix in CSR format, shape = [n_samples, n_samples] W[i, j] is assigned the weight of edge that connects i to j. ] <ast.Tuple object at 0x7da1b13a7040> assign[=] name[X].shape <ast.Tuple object at 0x7da1b13a6f20> assign[=] call[name[distance_matrix].nonzero, parameter[]] variable[W] assign[=] call[name[sparse].lil_matrix, parameter[tuple[[<ast.Name object at 0x7da1b13a76d0>, <ast.Name object at 0x7da1b13a6b60>]]]] for taget[name[i]] in starred[call[name[range], parameter[name[N]]]] begin[:] variable[nbrs_i] assign[=] call[name[cols]][compare[name[rows] equal[==] name[i]]] variable[n_neighbors_i] assign[=] call[name[len], parameter[name[nbrs_i]]] variable[v] assign[=] call[name[np].ones, parameter[name[n_neighbors_i]]] variable[C] assign[=] binary_operation[call[name[X]][name[nbrs_i]] - call[name[X]][name[i]]] variable[G] assign[=] call[name[np].dot, parameter[name[C], name[C].T]] variable[trace] assign[=] call[name[np].trace, parameter[name[G]]] if compare[name[trace] greater[>] constant[0]] begin[:] variable[R] assign[=] binary_operation[name[reg] * name[trace]] <ast.AugAssign object at 0x7da1b13a4af0> variable[w] assign[=] call[name[solve], parameter[name[G], name[v]]] call[name[W]][tuple[[<ast.Name object at 0x7da1b13a5d20>, <ast.Name object at 0x7da1b13a5d80>]]] assign[=] binary_operation[name[w] / call[name[np].sum, parameter[name[w]]]] return[name[W]]
keyword[def] identifier[barycenter_graph] ( identifier[distance_matrix] , identifier[X] , identifier[reg] = literal[int] ): literal[string] ( identifier[N] , identifier[d_in] )= identifier[X] . identifier[shape] ( identifier[rows] , identifier[cols] )= identifier[distance_matrix] . identifier[nonzero] () identifier[W] = identifier[sparse] . identifier[lil_matrix] (( identifier[N] , identifier[N] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N] ): identifier[nbrs_i] = identifier[cols] [ identifier[rows] == identifier[i] ] identifier[n_neighbors_i] = identifier[len] ( identifier[nbrs_i] ) identifier[v] = identifier[np] . identifier[ones] ( identifier[n_neighbors_i] , identifier[dtype] = identifier[X] . identifier[dtype] ) identifier[C] = identifier[X] [ identifier[nbrs_i] ]- identifier[X] [ identifier[i] ] identifier[G] = identifier[np] . identifier[dot] ( identifier[C] , identifier[C] . identifier[T] ) identifier[trace] = identifier[np] . identifier[trace] ( identifier[G] ) keyword[if] identifier[trace] > literal[int] : identifier[R] = identifier[reg] * identifier[trace] keyword[else] : identifier[R] = identifier[reg] identifier[G] . identifier[flat] [:: identifier[n_neighbors_i] + literal[int] ]+= identifier[R] identifier[w] = identifier[solve] ( identifier[G] , identifier[v] , identifier[sym_pos] = keyword[True] ) identifier[W] [ identifier[i] , identifier[nbrs_i] ]= identifier[w] / identifier[np] . identifier[sum] ( identifier[w] ) keyword[return] identifier[W]
def barycenter_graph(distance_matrix, X, reg=0.001): """ Computes the barycenter weighted graph for points in X Parameters ---------- distance_matrix: sparse Ndarray, (N_obs, N_obs) pairwise distance matrix. X : Ndarray (N_obs, N_dim) observed data matrix. reg : float, optional Amount of regularization when solving the least-squares problem. Only relevant if mode='barycenter'. If None, use the default. Returns ------- W : sparse matrix in CSR format, shape = [n_samples, n_samples] W[i, j] is assigned the weight of edge that connects i to j. """ (N, d_in) = X.shape (rows, cols) = distance_matrix.nonzero() W = sparse.lil_matrix((N, N)) # best for W[i, nbrs_i] = w/np.sum(w) for i in range(N): nbrs_i = cols[rows == i] n_neighbors_i = len(nbrs_i) v = np.ones(n_neighbors_i, dtype=X.dtype) C = X[nbrs_i] - X[i] G = np.dot(C, C.T) trace = np.trace(G) if trace > 0: R = reg * trace # depends on [control=['if'], data=['trace']] else: R = reg G.flat[::n_neighbors_i + 1] += R w = solve(G, v, sym_pos=True) W[i, nbrs_i] = w / np.sum(w) # depends on [control=['for'], data=['i']] return W
def configure(self, argv=None): ''' Configures this engine based on the options array passed into `argv`. If `argv` is ``None``, then ``sys.argv`` is used instead. During configuration, the command line options are merged with previously stored values. Then the logging subsystem and the database model are initialized, and all storable settings are serialized to configurations files. ''' self._setupOptions() self._parseOptions(argv) self._setupLogging() self._setupModel() self.dbsession.commit() return self
def function[configure, parameter[self, argv]]: constant[ Configures this engine based on the options array passed into `argv`. If `argv` is ``None``, then ``sys.argv`` is used instead. During configuration, the command line options are merged with previously stored values. Then the logging subsystem and the database model are initialized, and all storable settings are serialized to configurations files. ] call[name[self]._setupOptions, parameter[]] call[name[self]._parseOptions, parameter[name[argv]]] call[name[self]._setupLogging, parameter[]] call[name[self]._setupModel, parameter[]] call[name[self].dbsession.commit, parameter[]] return[name[self]]
keyword[def] identifier[configure] ( identifier[self] , identifier[argv] = keyword[None] ): literal[string] identifier[self] . identifier[_setupOptions] () identifier[self] . identifier[_parseOptions] ( identifier[argv] ) identifier[self] . identifier[_setupLogging] () identifier[self] . identifier[_setupModel] () identifier[self] . identifier[dbsession] . identifier[commit] () keyword[return] identifier[self]
def configure(self, argv=None): """ Configures this engine based on the options array passed into `argv`. If `argv` is ``None``, then ``sys.argv`` is used instead. During configuration, the command line options are merged with previously stored values. Then the logging subsystem and the database model are initialized, and all storable settings are serialized to configurations files. """ self._setupOptions() self._parseOptions(argv) self._setupLogging() self._setupModel() self.dbsession.commit() return self
def parse_cadd(variant, transcripts): """Check if the cadd phred score is annotated""" cadd = 0 cadd_keys = ['CADD', 'CADD_PHRED'] for key in cadd_keys: cadd = variant.INFO.get(key, 0) if cadd: return float(cadd) for transcript in transcripts: cadd_entry = transcript.get('cadd') if (cadd_entry and cadd_entry > cadd): cadd = cadd_entry return cadd
def function[parse_cadd, parameter[variant, transcripts]]: constant[Check if the cadd phred score is annotated] variable[cadd] assign[=] constant[0] variable[cadd_keys] assign[=] list[[<ast.Constant object at 0x7da18f58ca00>, <ast.Constant object at 0x7da18f58dc60>]] for taget[name[key]] in starred[name[cadd_keys]] begin[:] variable[cadd] assign[=] call[name[variant].INFO.get, parameter[name[key], constant[0]]] if name[cadd] begin[:] return[call[name[float], parameter[name[cadd]]]] for taget[name[transcript]] in starred[name[transcripts]] begin[:] variable[cadd_entry] assign[=] call[name[transcript].get, parameter[constant[cadd]]] if <ast.BoolOp object at 0x7da18f58fe20> begin[:] variable[cadd] assign[=] name[cadd_entry] return[name[cadd]]
keyword[def] identifier[parse_cadd] ( identifier[variant] , identifier[transcripts] ): literal[string] identifier[cadd] = literal[int] identifier[cadd_keys] =[ literal[string] , literal[string] ] keyword[for] identifier[key] keyword[in] identifier[cadd_keys] : identifier[cadd] = identifier[variant] . identifier[INFO] . identifier[get] ( identifier[key] , literal[int] ) keyword[if] identifier[cadd] : keyword[return] identifier[float] ( identifier[cadd] ) keyword[for] identifier[transcript] keyword[in] identifier[transcripts] : identifier[cadd_entry] = identifier[transcript] . identifier[get] ( literal[string] ) keyword[if] ( identifier[cadd_entry] keyword[and] identifier[cadd_entry] > identifier[cadd] ): identifier[cadd] = identifier[cadd_entry] keyword[return] identifier[cadd]
def parse_cadd(variant, transcripts): """Check if the cadd phred score is annotated""" cadd = 0 cadd_keys = ['CADD', 'CADD_PHRED'] for key in cadd_keys: cadd = variant.INFO.get(key, 0) if cadd: return float(cadd) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] for transcript in transcripts: cadd_entry = transcript.get('cadd') if cadd_entry and cadd_entry > cadd: cadd = cadd_entry # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['transcript']] return cadd
def check( state_engine, nameop, block_id, checked_ops ): """ Given a NAME_IMPORT nameop, see if we can import it. * the name must be well-formed * the namespace must be revealed, but not ready * the name cannot have been imported yet * the sender must be the same as the namespace's sender Set the __preorder__ and __prior_history__ fields, since this is a state-creating operation. Return True if accepted Return False if not """ from ..nameset import BlockstackDB name = str(nameop['name']) sender = str(nameop['sender']) sender_pubkey = None recipient = str(nameop['recipient']) recipient_address = str(nameop['recipient_address']) preorder_hash = hash_name( nameop['name'], sender, recipient_address ) log.debug("preorder_hash = %s (%s, %s, %s)" % (preorder_hash, nameop['name'], sender, recipient_address)) preorder_block_number = block_id name_block_number = block_id name_first_registered = block_id name_last_renewed = block_id # transfer_send_block_id = None if not nameop.has_key('sender_pubkey'): log.warning("Name import requires a sender_pubkey (i.e. use of a p2pkh transaction)") return False # name must be well-formed if not is_name_valid( name ): log.warning("Malformed name '%s'" % name) return False name_without_namespace = get_name_from_fq_name( name ) namespace_id = get_namespace_from_name( name ) # namespace must be revealed, but not ready if not state_engine.is_namespace_revealed( namespace_id ): log.warning("Namespace '%s' is not revealed" % namespace_id ) return False namespace = state_engine.get_namespace_reveal( namespace_id ) # sender p2pkh script must use a public key derived from the namespace revealer's public key sender_pubkey_hex = str(nameop['sender_pubkey']) sender_pubkey = virtualchain.BitcoinPublicKey( str(sender_pubkey_hex) ) sender_address = sender_pubkey.address() import_addresses = BlockstackDB.load_import_keychain( state_engine.working_dir, namespace['namespace_id'] ) if import_addresses is None: # the first name imported must be the revealer's address if sender_address != namespace['recipient_address']: log.warning("First NAME_IMPORT must come from the namespace revealer's address") return False # need to generate a keyring from the revealer's public key log.warning("Generating %s-key keychain for '%s'" % (NAME_IMPORT_KEYRING_SIZE, namespace_id)) import_addresses = BlockstackDB.build_import_keychain( state_engine.working_dir, namespace['namespace_id'], sender_pubkey_hex ) # sender must be the same as the the person who revealed the namespace # (i.e. sender's address must be from one of the valid import addresses) if sender_address not in import_addresses: log.warning("Sender address '%s' is not in the import keychain" % (sender_address)) return False # we can overwrite, but emit a warning # search *current* block as well as last block prev_name_rec = get_prev_imported( state_engine, checked_ops, name ) if prev_name_rec is not None and is_earlier_than( prev_name_rec, block_id, nameop['vtxindex'] ): log.warning("Overwriting already-imported name '%s'" % name) # propagate preorder block number and hash... preorder_block_number = prev_name_rec['preorder_block_number'] name_block_number = prev_name_rec['block_number'] name_first_registered = prev_name_rec['first_registered'] name_last_renewed = prev_name_rec['last_renewed'] log.debug("use previous preorder_hash = %s" % prev_name_rec['preorder_hash']) preorder_hash = prev_name_rec['preorder_hash'] # can never have been preordered state_create_put_preorder( nameop, None ) # carry out the transition del nameop['recipient'] del nameop['recipient_address'] # set op_fee for BTC # set token_fee otherwise bitcoin_price = 0 stacks_price = 0 if namespace['version'] == NAMESPACE_VERSION_PAY_WITH_STACKS: # make sure we're in the right epoch epoch_features = get_epoch_features(block_id) if EPOCH_FEATURE_STACKS_BUY_NAMESPACES not in epoch_features or EPOCH_FEATURE_NAMEOPS_COST_TOKENS not in epoch_features: log.fatal('Have a namespace with STACKs enabled, but we\'re in the wrong epoch!') os.abort() stacks_price = price_name(name_without_namespace, namespace, block_id) stacks_price = int(stacks_price) else: # QUIRK: keep this as a float due to backwards-compatibility bitcoin_price = price_name(name_without_namespace, namespace, block_id) bitcoin_price = float(bitcoin_price) nameop['sender'] = recipient nameop['address'] = recipient_address nameop['importer'] = sender nameop['importer_address'] = sender_address nameop['op_fee'] = bitcoin_price nameop['token_fee'] = '{}'.format(stacks_price) nameop['namespace_block_number'] = namespace['block_number'] nameop['consensus_hash'] = None nameop['preorder_hash'] = preorder_hash nameop['block_number'] = name_block_number nameop['first_registered'] = name_first_registered nameop['last_renewed'] = name_last_renewed nameop['preorder_block_number'] = preorder_block_number nameop['opcode'] = "NAME_IMPORT" # not required for consensus, but for SNV nameop['last_creation_op'] = NAME_IMPORT # good! return True
def function[check, parameter[state_engine, nameop, block_id, checked_ops]]: constant[ Given a NAME_IMPORT nameop, see if we can import it. * the name must be well-formed * the namespace must be revealed, but not ready * the name cannot have been imported yet * the sender must be the same as the namespace's sender Set the __preorder__ and __prior_history__ fields, since this is a state-creating operation. Return True if accepted Return False if not ] from relative_module[nameset] import module[BlockstackDB] variable[name] assign[=] call[name[str], parameter[call[name[nameop]][constant[name]]]] variable[sender] assign[=] call[name[str], parameter[call[name[nameop]][constant[sender]]]] variable[sender_pubkey] assign[=] constant[None] variable[recipient] assign[=] call[name[str], parameter[call[name[nameop]][constant[recipient]]]] variable[recipient_address] assign[=] call[name[str], parameter[call[name[nameop]][constant[recipient_address]]]] variable[preorder_hash] assign[=] call[name[hash_name], parameter[call[name[nameop]][constant[name]], name[sender], name[recipient_address]]] call[name[log].debug, parameter[binary_operation[constant[preorder_hash = %s (%s, %s, %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c76ec20>, <ast.Subscript object at 0x7da20c76d810>, <ast.Name object at 0x7da20c76cc70>, <ast.Name object at 0x7da20c76c700>]]]]] variable[preorder_block_number] assign[=] name[block_id] variable[name_block_number] assign[=] name[block_id] variable[name_first_registered] assign[=] name[block_id] variable[name_last_renewed] assign[=] name[block_id] if <ast.UnaryOp object at 0x7da20c76e3b0> begin[:] call[name[log].warning, parameter[constant[Name import requires a sender_pubkey (i.e. use of a p2pkh transaction)]]] return[constant[False]] if <ast.UnaryOp object at 0x7da20c76c850> begin[:] call[name[log].warning, parameter[binary_operation[constant[Malformed name '%s'] <ast.Mod object at 0x7da2590d6920> name[name]]]] return[constant[False]] variable[name_without_namespace] assign[=] call[name[get_name_from_fq_name], parameter[name[name]]] variable[namespace_id] assign[=] call[name[get_namespace_from_name], parameter[name[name]]] if <ast.UnaryOp object at 0x7da20c76e4a0> begin[:] call[name[log].warning, parameter[binary_operation[constant[Namespace '%s' is not revealed] <ast.Mod object at 0x7da2590d6920> name[namespace_id]]]] return[constant[False]] variable[namespace] assign[=] call[name[state_engine].get_namespace_reveal, parameter[name[namespace_id]]] variable[sender_pubkey_hex] assign[=] call[name[str], parameter[call[name[nameop]][constant[sender_pubkey]]]] variable[sender_pubkey] assign[=] call[name[virtualchain].BitcoinPublicKey, parameter[call[name[str], parameter[name[sender_pubkey_hex]]]]] variable[sender_address] assign[=] call[name[sender_pubkey].address, parameter[]] variable[import_addresses] assign[=] call[name[BlockstackDB].load_import_keychain, parameter[name[state_engine].working_dir, call[name[namespace]][constant[namespace_id]]]] if compare[name[import_addresses] is constant[None]] begin[:] if compare[name[sender_address] not_equal[!=] call[name[namespace]][constant[recipient_address]]] begin[:] call[name[log].warning, parameter[constant[First NAME_IMPORT must come from the namespace revealer's address]]] return[constant[False]] call[name[log].warning, parameter[binary_operation[constant[Generating %s-key keychain for '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c76d750>, <ast.Name object at 0x7da20c76c9d0>]]]]] variable[import_addresses] assign[=] call[name[BlockstackDB].build_import_keychain, parameter[name[state_engine].working_dir, call[name[namespace]][constant[namespace_id]], name[sender_pubkey_hex]]] if compare[name[sender_address] <ast.NotIn object at 0x7da2590d7190> name[import_addresses]] begin[:] call[name[log].warning, parameter[binary_operation[constant[Sender address '%s' is not in the import keychain] <ast.Mod object at 0x7da2590d6920> name[sender_address]]]] return[constant[False]] variable[prev_name_rec] assign[=] call[name[get_prev_imported], parameter[name[state_engine], name[checked_ops], name[name]]] if <ast.BoolOp object at 0x7da18bcc8880> begin[:] call[name[log].warning, parameter[binary_operation[constant[Overwriting already-imported name '%s'] <ast.Mod object at 0x7da2590d6920> name[name]]]] variable[preorder_block_number] assign[=] call[name[prev_name_rec]][constant[preorder_block_number]] variable[name_block_number] assign[=] call[name[prev_name_rec]][constant[block_number]] variable[name_first_registered] assign[=] call[name[prev_name_rec]][constant[first_registered]] variable[name_last_renewed] assign[=] call[name[prev_name_rec]][constant[last_renewed]] call[name[log].debug, parameter[binary_operation[constant[use previous preorder_hash = %s] <ast.Mod object at 0x7da2590d6920> call[name[prev_name_rec]][constant[preorder_hash]]]]] variable[preorder_hash] assign[=] call[name[prev_name_rec]][constant[preorder_hash]] call[name[state_create_put_preorder], parameter[name[nameop], constant[None]]] <ast.Delete object at 0x7da18bcc8a90> <ast.Delete object at 0x7da18bcc95d0> variable[bitcoin_price] assign[=] constant[0] variable[stacks_price] assign[=] constant[0] if compare[call[name[namespace]][constant[version]] equal[==] name[NAMESPACE_VERSION_PAY_WITH_STACKS]] begin[:] variable[epoch_features] assign[=] call[name[get_epoch_features], parameter[name[block_id]]] if <ast.BoolOp object at 0x7da20e9601c0> begin[:] call[name[log].fatal, parameter[constant[Have a namespace with STACKs enabled, but we're in the wrong epoch!]]] call[name[os].abort, parameter[]] variable[stacks_price] assign[=] call[name[price_name], parameter[name[name_without_namespace], name[namespace], name[block_id]]] variable[stacks_price] assign[=] call[name[int], parameter[name[stacks_price]]] call[name[nameop]][constant[sender]] assign[=] name[recipient] call[name[nameop]][constant[address]] assign[=] name[recipient_address] call[name[nameop]][constant[importer]] assign[=] name[sender] call[name[nameop]][constant[importer_address]] assign[=] name[sender_address] call[name[nameop]][constant[op_fee]] assign[=] name[bitcoin_price] call[name[nameop]][constant[token_fee]] assign[=] call[constant[{}].format, parameter[name[stacks_price]]] call[name[nameop]][constant[namespace_block_number]] assign[=] call[name[namespace]][constant[block_number]] call[name[nameop]][constant[consensus_hash]] assign[=] constant[None] call[name[nameop]][constant[preorder_hash]] assign[=] name[preorder_hash] call[name[nameop]][constant[block_number]] assign[=] name[name_block_number] call[name[nameop]][constant[first_registered]] assign[=] name[name_first_registered] call[name[nameop]][constant[last_renewed]] assign[=] name[name_last_renewed] call[name[nameop]][constant[preorder_block_number]] assign[=] name[preorder_block_number] call[name[nameop]][constant[opcode]] assign[=] constant[NAME_IMPORT] call[name[nameop]][constant[last_creation_op]] assign[=] name[NAME_IMPORT] return[constant[True]]
keyword[def] identifier[check] ( identifier[state_engine] , identifier[nameop] , identifier[block_id] , identifier[checked_ops] ): literal[string] keyword[from] .. identifier[nameset] keyword[import] identifier[BlockstackDB] identifier[name] = identifier[str] ( identifier[nameop] [ literal[string] ]) identifier[sender] = identifier[str] ( identifier[nameop] [ literal[string] ]) identifier[sender_pubkey] = keyword[None] identifier[recipient] = identifier[str] ( identifier[nameop] [ literal[string] ]) identifier[recipient_address] = identifier[str] ( identifier[nameop] [ literal[string] ]) identifier[preorder_hash] = identifier[hash_name] ( identifier[nameop] [ literal[string] ], identifier[sender] , identifier[recipient_address] ) identifier[log] . identifier[debug] ( literal[string] %( identifier[preorder_hash] , identifier[nameop] [ literal[string] ], identifier[sender] , identifier[recipient_address] )) identifier[preorder_block_number] = identifier[block_id] identifier[name_block_number] = identifier[block_id] identifier[name_first_registered] = identifier[block_id] identifier[name_last_renewed] = identifier[block_id] keyword[if] keyword[not] identifier[nameop] . identifier[has_key] ( literal[string] ): identifier[log] . identifier[warning] ( literal[string] ) keyword[return] keyword[False] keyword[if] keyword[not] identifier[is_name_valid] ( identifier[name] ): identifier[log] . identifier[warning] ( literal[string] % identifier[name] ) keyword[return] keyword[False] identifier[name_without_namespace] = identifier[get_name_from_fq_name] ( identifier[name] ) identifier[namespace_id] = identifier[get_namespace_from_name] ( identifier[name] ) keyword[if] keyword[not] identifier[state_engine] . identifier[is_namespace_revealed] ( identifier[namespace_id] ): identifier[log] . identifier[warning] ( literal[string] % identifier[namespace_id] ) keyword[return] keyword[False] identifier[namespace] = identifier[state_engine] . identifier[get_namespace_reveal] ( identifier[namespace_id] ) identifier[sender_pubkey_hex] = identifier[str] ( identifier[nameop] [ literal[string] ]) identifier[sender_pubkey] = identifier[virtualchain] . identifier[BitcoinPublicKey] ( identifier[str] ( identifier[sender_pubkey_hex] )) identifier[sender_address] = identifier[sender_pubkey] . identifier[address] () identifier[import_addresses] = identifier[BlockstackDB] . identifier[load_import_keychain] ( identifier[state_engine] . identifier[working_dir] , identifier[namespace] [ literal[string] ]) keyword[if] identifier[import_addresses] keyword[is] keyword[None] : keyword[if] identifier[sender_address] != identifier[namespace] [ literal[string] ]: identifier[log] . identifier[warning] ( literal[string] ) keyword[return] keyword[False] identifier[log] . identifier[warning] ( literal[string] %( identifier[NAME_IMPORT_KEYRING_SIZE] , identifier[namespace_id] )) identifier[import_addresses] = identifier[BlockstackDB] . identifier[build_import_keychain] ( identifier[state_engine] . identifier[working_dir] , identifier[namespace] [ literal[string] ], identifier[sender_pubkey_hex] ) keyword[if] identifier[sender_address] keyword[not] keyword[in] identifier[import_addresses] : identifier[log] . identifier[warning] ( literal[string] %( identifier[sender_address] )) keyword[return] keyword[False] identifier[prev_name_rec] = identifier[get_prev_imported] ( identifier[state_engine] , identifier[checked_ops] , identifier[name] ) keyword[if] identifier[prev_name_rec] keyword[is] keyword[not] keyword[None] keyword[and] identifier[is_earlier_than] ( identifier[prev_name_rec] , identifier[block_id] , identifier[nameop] [ literal[string] ]): identifier[log] . identifier[warning] ( literal[string] % identifier[name] ) identifier[preorder_block_number] = identifier[prev_name_rec] [ literal[string] ] identifier[name_block_number] = identifier[prev_name_rec] [ literal[string] ] identifier[name_first_registered] = identifier[prev_name_rec] [ literal[string] ] identifier[name_last_renewed] = identifier[prev_name_rec] [ literal[string] ] identifier[log] . identifier[debug] ( literal[string] % identifier[prev_name_rec] [ literal[string] ]) identifier[preorder_hash] = identifier[prev_name_rec] [ literal[string] ] identifier[state_create_put_preorder] ( identifier[nameop] , keyword[None] ) keyword[del] identifier[nameop] [ literal[string] ] keyword[del] identifier[nameop] [ literal[string] ] identifier[bitcoin_price] = literal[int] identifier[stacks_price] = literal[int] keyword[if] identifier[namespace] [ literal[string] ]== identifier[NAMESPACE_VERSION_PAY_WITH_STACKS] : identifier[epoch_features] = identifier[get_epoch_features] ( identifier[block_id] ) keyword[if] identifier[EPOCH_FEATURE_STACKS_BUY_NAMESPACES] keyword[not] keyword[in] identifier[epoch_features] keyword[or] identifier[EPOCH_FEATURE_NAMEOPS_COST_TOKENS] keyword[not] keyword[in] identifier[epoch_features] : identifier[log] . identifier[fatal] ( literal[string] ) identifier[os] . identifier[abort] () identifier[stacks_price] = identifier[price_name] ( identifier[name_without_namespace] , identifier[namespace] , identifier[block_id] ) identifier[stacks_price] = identifier[int] ( identifier[stacks_price] ) keyword[else] : identifier[bitcoin_price] = identifier[price_name] ( identifier[name_without_namespace] , identifier[namespace] , identifier[block_id] ) identifier[bitcoin_price] = identifier[float] ( identifier[bitcoin_price] ) identifier[nameop] [ literal[string] ]= identifier[recipient] identifier[nameop] [ literal[string] ]= identifier[recipient_address] identifier[nameop] [ literal[string] ]= identifier[sender] identifier[nameop] [ literal[string] ]= identifier[sender_address] identifier[nameop] [ literal[string] ]= identifier[bitcoin_price] identifier[nameop] [ literal[string] ]= literal[string] . identifier[format] ( identifier[stacks_price] ) identifier[nameop] [ literal[string] ]= identifier[namespace] [ literal[string] ] identifier[nameop] [ literal[string] ]= keyword[None] identifier[nameop] [ literal[string] ]= identifier[preorder_hash] identifier[nameop] [ literal[string] ]= identifier[name_block_number] identifier[nameop] [ literal[string] ]= identifier[name_first_registered] identifier[nameop] [ literal[string] ]= identifier[name_last_renewed] identifier[nameop] [ literal[string] ]= identifier[preorder_block_number] identifier[nameop] [ literal[string] ]= literal[string] identifier[nameop] [ literal[string] ]= identifier[NAME_IMPORT] keyword[return] keyword[True]
def check(state_engine, nameop, block_id, checked_ops): """ Given a NAME_IMPORT nameop, see if we can import it. * the name must be well-formed * the namespace must be revealed, but not ready * the name cannot have been imported yet * the sender must be the same as the namespace's sender Set the __preorder__ and __prior_history__ fields, since this is a state-creating operation. Return True if accepted Return False if not """ from ..nameset import BlockstackDB name = str(nameop['name']) sender = str(nameop['sender']) sender_pubkey = None recipient = str(nameop['recipient']) recipient_address = str(nameop['recipient_address']) preorder_hash = hash_name(nameop['name'], sender, recipient_address) log.debug('preorder_hash = %s (%s, %s, %s)' % (preorder_hash, nameop['name'], sender, recipient_address)) preorder_block_number = block_id name_block_number = block_id name_first_registered = block_id name_last_renewed = block_id # transfer_send_block_id = None if not nameop.has_key('sender_pubkey'): log.warning('Name import requires a sender_pubkey (i.e. use of a p2pkh transaction)') return False # depends on [control=['if'], data=[]] # name must be well-formed if not is_name_valid(name): log.warning("Malformed name '%s'" % name) return False # depends on [control=['if'], data=[]] name_without_namespace = get_name_from_fq_name(name) namespace_id = get_namespace_from_name(name) # namespace must be revealed, but not ready if not state_engine.is_namespace_revealed(namespace_id): log.warning("Namespace '%s' is not revealed" % namespace_id) return False # depends on [control=['if'], data=[]] namespace = state_engine.get_namespace_reveal(namespace_id) # sender p2pkh script must use a public key derived from the namespace revealer's public key sender_pubkey_hex = str(nameop['sender_pubkey']) sender_pubkey = virtualchain.BitcoinPublicKey(str(sender_pubkey_hex)) sender_address = sender_pubkey.address() import_addresses = BlockstackDB.load_import_keychain(state_engine.working_dir, namespace['namespace_id']) if import_addresses is None: # the first name imported must be the revealer's address if sender_address != namespace['recipient_address']: log.warning("First NAME_IMPORT must come from the namespace revealer's address") return False # depends on [control=['if'], data=[]] # need to generate a keyring from the revealer's public key log.warning("Generating %s-key keychain for '%s'" % (NAME_IMPORT_KEYRING_SIZE, namespace_id)) import_addresses = BlockstackDB.build_import_keychain(state_engine.working_dir, namespace['namespace_id'], sender_pubkey_hex) # depends on [control=['if'], data=['import_addresses']] # sender must be the same as the the person who revealed the namespace # (i.e. sender's address must be from one of the valid import addresses) if sender_address not in import_addresses: log.warning("Sender address '%s' is not in the import keychain" % sender_address) return False # depends on [control=['if'], data=['sender_address']] # we can overwrite, but emit a warning # search *current* block as well as last block prev_name_rec = get_prev_imported(state_engine, checked_ops, name) if prev_name_rec is not None and is_earlier_than(prev_name_rec, block_id, nameop['vtxindex']): log.warning("Overwriting already-imported name '%s'" % name) # propagate preorder block number and hash... preorder_block_number = prev_name_rec['preorder_block_number'] name_block_number = prev_name_rec['block_number'] name_first_registered = prev_name_rec['first_registered'] name_last_renewed = prev_name_rec['last_renewed'] log.debug('use previous preorder_hash = %s' % prev_name_rec['preorder_hash']) preorder_hash = prev_name_rec['preorder_hash'] # depends on [control=['if'], data=[]] # can never have been preordered state_create_put_preorder(nameop, None) # carry out the transition del nameop['recipient'] del nameop['recipient_address'] # set op_fee for BTC # set token_fee otherwise bitcoin_price = 0 stacks_price = 0 if namespace['version'] == NAMESPACE_VERSION_PAY_WITH_STACKS: # make sure we're in the right epoch epoch_features = get_epoch_features(block_id) if EPOCH_FEATURE_STACKS_BUY_NAMESPACES not in epoch_features or EPOCH_FEATURE_NAMEOPS_COST_TOKENS not in epoch_features: log.fatal("Have a namespace with STACKs enabled, but we're in the wrong epoch!") os.abort() # depends on [control=['if'], data=[]] stacks_price = price_name(name_without_namespace, namespace, block_id) stacks_price = int(stacks_price) # depends on [control=['if'], data=[]] else: # QUIRK: keep this as a float due to backwards-compatibility bitcoin_price = price_name(name_without_namespace, namespace, block_id) bitcoin_price = float(bitcoin_price) nameop['sender'] = recipient nameop['address'] = recipient_address nameop['importer'] = sender nameop['importer_address'] = sender_address nameop['op_fee'] = bitcoin_price nameop['token_fee'] = '{}'.format(stacks_price) nameop['namespace_block_number'] = namespace['block_number'] nameop['consensus_hash'] = None nameop['preorder_hash'] = preorder_hash nameop['block_number'] = name_block_number nameop['first_registered'] = name_first_registered nameop['last_renewed'] = name_last_renewed nameop['preorder_block_number'] = preorder_block_number nameop['opcode'] = 'NAME_IMPORT' # not required for consensus, but for SNV nameop['last_creation_op'] = NAME_IMPORT # good! return True
def _get_veths(net_data): ''' Parse the nic setup inside lxc conf tuples back to a dictionary indexed by network interface ''' if isinstance(net_data, dict): net_data = list(net_data.items()) nics = salt.utils.odict.OrderedDict() current_nic = salt.utils.odict.OrderedDict() no_names = True for item in net_data: if item and isinstance(item, dict): item = list(item.items())[0] # skip LXC configuration comment lines, and play only with tuples conf elif isinstance(item, six.string_types): # deal with reflection of commented lxc configs sitem = item.strip() if sitem.startswith('#') or not sitem: continue elif '=' in item: item = tuple([a.strip() for a in item.split('=', 1)]) if item[0] == 'lxc.network.type': current_nic = salt.utils.odict.OrderedDict() if item[0] == 'lxc.network.name': no_names = False nics[item[1].strip()] = current_nic current_nic[item[0].strip()] = item[1].strip() # if not ethernet card name has been collected, assuming we collected # data for eth0 if no_names and current_nic: nics[DEFAULT_NIC] = current_nic return nics
def function[_get_veths, parameter[net_data]]: constant[ Parse the nic setup inside lxc conf tuples back to a dictionary indexed by network interface ] if call[name[isinstance], parameter[name[net_data], name[dict]]] begin[:] variable[net_data] assign[=] call[name[list], parameter[call[name[net_data].items, parameter[]]]] variable[nics] assign[=] call[name[salt].utils.odict.OrderedDict, parameter[]] variable[current_nic] assign[=] call[name[salt].utils.odict.OrderedDict, parameter[]] variable[no_names] assign[=] constant[True] for taget[name[item]] in starred[name[net_data]] begin[:] if <ast.BoolOp object at 0x7da1b21f3220> begin[:] variable[item] assign[=] call[call[name[list], parameter[call[name[item].items, parameter[]]]]][constant[0]] if compare[call[name[item]][constant[0]] equal[==] constant[lxc.network.type]] begin[:] variable[current_nic] assign[=] call[name[salt].utils.odict.OrderedDict, parameter[]] if compare[call[name[item]][constant[0]] equal[==] constant[lxc.network.name]] begin[:] variable[no_names] assign[=] constant[False] call[name[nics]][call[call[name[item]][constant[1]].strip, parameter[]]] assign[=] name[current_nic] call[name[current_nic]][call[call[name[item]][constant[0]].strip, parameter[]]] assign[=] call[call[name[item]][constant[1]].strip, parameter[]] if <ast.BoolOp object at 0x7da1b21f27d0> begin[:] call[name[nics]][name[DEFAULT_NIC]] assign[=] name[current_nic] return[name[nics]]
keyword[def] identifier[_get_veths] ( identifier[net_data] ): literal[string] keyword[if] identifier[isinstance] ( identifier[net_data] , identifier[dict] ): identifier[net_data] = identifier[list] ( identifier[net_data] . identifier[items] ()) identifier[nics] = identifier[salt] . identifier[utils] . identifier[odict] . identifier[OrderedDict] () identifier[current_nic] = identifier[salt] . identifier[utils] . identifier[odict] . identifier[OrderedDict] () identifier[no_names] = keyword[True] keyword[for] identifier[item] keyword[in] identifier[net_data] : keyword[if] identifier[item] keyword[and] identifier[isinstance] ( identifier[item] , identifier[dict] ): identifier[item] = identifier[list] ( identifier[item] . identifier[items] ())[ literal[int] ] keyword[elif] identifier[isinstance] ( identifier[item] , identifier[six] . identifier[string_types] ): identifier[sitem] = identifier[item] . identifier[strip] () keyword[if] identifier[sitem] . identifier[startswith] ( literal[string] ) keyword[or] keyword[not] identifier[sitem] : keyword[continue] keyword[elif] literal[string] keyword[in] identifier[item] : identifier[item] = identifier[tuple] ([ identifier[a] . identifier[strip] () keyword[for] identifier[a] keyword[in] identifier[item] . identifier[split] ( literal[string] , literal[int] )]) keyword[if] identifier[item] [ literal[int] ]== literal[string] : identifier[current_nic] = identifier[salt] . identifier[utils] . identifier[odict] . identifier[OrderedDict] () keyword[if] identifier[item] [ literal[int] ]== literal[string] : identifier[no_names] = keyword[False] identifier[nics] [ identifier[item] [ literal[int] ]. identifier[strip] ()]= identifier[current_nic] identifier[current_nic] [ identifier[item] [ literal[int] ]. identifier[strip] ()]= identifier[item] [ literal[int] ]. identifier[strip] () keyword[if] identifier[no_names] keyword[and] identifier[current_nic] : identifier[nics] [ identifier[DEFAULT_NIC] ]= identifier[current_nic] keyword[return] identifier[nics]
def _get_veths(net_data): """ Parse the nic setup inside lxc conf tuples back to a dictionary indexed by network interface """ if isinstance(net_data, dict): net_data = list(net_data.items()) # depends on [control=['if'], data=[]] nics = salt.utils.odict.OrderedDict() current_nic = salt.utils.odict.OrderedDict() no_names = True for item in net_data: if item and isinstance(item, dict): item = list(item.items())[0] # depends on [control=['if'], data=[]] # skip LXC configuration comment lines, and play only with tuples conf elif isinstance(item, six.string_types): # deal with reflection of commented lxc configs sitem = item.strip() if sitem.startswith('#') or not sitem: continue # depends on [control=['if'], data=[]] elif '=' in item: item = tuple([a.strip() for a in item.split('=', 1)]) # depends on [control=['if'], data=['item']] # depends on [control=['if'], data=[]] if item[0] == 'lxc.network.type': current_nic = salt.utils.odict.OrderedDict() # depends on [control=['if'], data=[]] if item[0] == 'lxc.network.name': no_names = False nics[item[1].strip()] = current_nic # depends on [control=['if'], data=[]] current_nic[item[0].strip()] = item[1].strip() # depends on [control=['for'], data=['item']] # if not ethernet card name has been collected, assuming we collected # data for eth0 if no_names and current_nic: nics[DEFAULT_NIC] = current_nic # depends on [control=['if'], data=[]] return nics
def vendor_runtime(chroot, dest_basedir, label, root_module_names): """Includes portions of vendored distributions in a chroot. The portion to include is selected by root module name. If the module is a file, just it is included. If the module represents a package, the package and all its sub-packages are added recursively. :param chroot: The chroot to add vendored code to. :type chroot: :class:`pex.common.Chroot` :param str dest_basedir: The prefix to store the vendored code under in the ``chroot``. :param str label: The chroot label for the vendored code fileset. :param root_module_names: The names of the root vendored modules to include in the chroot. :type root_module_names: :class:`collections.Iterable` of str :raise: :class:`ValueError` if any of the given ``root_module_names`` could not be found amongst the vendored code and added to the chroot. """ vendor_module_names = {root_module_name: False for root_module_name in root_module_names} for spec in iter_vendor_specs(): for root, dirs, files in os.walk(spec.target_dir): if root == spec.target_dir: dirs[:] = [pkg_name for pkg_name in dirs if pkg_name in vendor_module_names] files[:] = [mod_name for mod_name in files if mod_name[:-3] in vendor_module_names] vendored_names = dirs + files if vendored_names: pkg_path = '' for pkg in spec.relpath.split(os.sep): pkg_path = os.path.join(pkg_path, pkg) pkg_file = os.path.join(pkg_path, '__init__.py') src = os.path.join(VendorSpec.ROOT, pkg_file) dest = os.path.join(dest_basedir, pkg_file) if os.path.exists(src): chroot.copy(src, dest, label) else: # We delete `pex/vendor/_vendored/<dist>/__init__.py` when isolating third_party. chroot.touch(dest, label) for name in vendored_names: vendor_module_names[name] = True TRACER.log('Vendoring {} from {} @ {}'.format(name, spec, spec.target_dir), V=3) for filename in files: if not filename.endswith('.pyc'): # Sources and data only. src = os.path.join(root, filename) dest = os.path.join(dest_basedir, spec.relpath, os.path.relpath(src, spec.target_dir)) chroot.copy(src, dest, label) if not all(vendor_module_names.values()): raise ValueError('Failed to extract {module_names} from:\n\t{specs}'.format( module_names=', '.join(module for module, written in vendor_module_names.items() if not written), specs='\n\t'.join('{} @ {}'.format(spec, spec.target_dir) for spec in iter_vendor_specs())))
def function[vendor_runtime, parameter[chroot, dest_basedir, label, root_module_names]]: constant[Includes portions of vendored distributions in a chroot. The portion to include is selected by root module name. If the module is a file, just it is included. If the module represents a package, the package and all its sub-packages are added recursively. :param chroot: The chroot to add vendored code to. :type chroot: :class:`pex.common.Chroot` :param str dest_basedir: The prefix to store the vendored code under in the ``chroot``. :param str label: The chroot label for the vendored code fileset. :param root_module_names: The names of the root vendored modules to include in the chroot. :type root_module_names: :class:`collections.Iterable` of str :raise: :class:`ValueError` if any of the given ``root_module_names`` could not be found amongst the vendored code and added to the chroot. ] variable[vendor_module_names] assign[=] <ast.DictComp object at 0x7da18fe90ee0> for taget[name[spec]] in starred[call[name[iter_vendor_specs], parameter[]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18fe902b0>, <ast.Name object at 0x7da18fe92230>, <ast.Name object at 0x7da18fe90a00>]]] in starred[call[name[os].walk, parameter[name[spec].target_dir]]] begin[:] if compare[name[root] equal[==] name[spec].target_dir] begin[:] call[name[dirs]][<ast.Slice object at 0x7da18fe939a0>] assign[=] <ast.ListComp object at 0x7da18fe91270> call[name[files]][<ast.Slice object at 0x7da18fe90fa0>] assign[=] <ast.ListComp object at 0x7da18fe92ce0> variable[vendored_names] assign[=] binary_operation[name[dirs] + name[files]] if name[vendored_names] begin[:] variable[pkg_path] assign[=] constant[] for taget[name[pkg]] in starred[call[name[spec].relpath.split, parameter[name[os].sep]]] begin[:] variable[pkg_path] assign[=] call[name[os].path.join, parameter[name[pkg_path], name[pkg]]] variable[pkg_file] assign[=] call[name[os].path.join, parameter[name[pkg_path], constant[__init__.py]]] variable[src] assign[=] call[name[os].path.join, parameter[name[VendorSpec].ROOT, name[pkg_file]]] variable[dest] assign[=] call[name[os].path.join, parameter[name[dest_basedir], name[pkg_file]]] if call[name[os].path.exists, parameter[name[src]]] begin[:] call[name[chroot].copy, parameter[name[src], name[dest], name[label]]] for taget[name[name]] in starred[name[vendored_names]] begin[:] call[name[vendor_module_names]][name[name]] assign[=] constant[True] call[name[TRACER].log, parameter[call[constant[Vendoring {} from {} @ {}].format, parameter[name[name], name[spec], name[spec].target_dir]]]] for taget[name[filename]] in starred[name[files]] begin[:] if <ast.UnaryOp object at 0x7da18fe90190> begin[:] variable[src] assign[=] call[name[os].path.join, parameter[name[root], name[filename]]] variable[dest] assign[=] call[name[os].path.join, parameter[name[dest_basedir], name[spec].relpath, call[name[os].path.relpath, parameter[name[src], name[spec].target_dir]]]] call[name[chroot].copy, parameter[name[src], name[dest], name[label]]] if <ast.UnaryOp object at 0x7da18fe90be0> begin[:] <ast.Raise object at 0x7da18fe90820>
keyword[def] identifier[vendor_runtime] ( identifier[chroot] , identifier[dest_basedir] , identifier[label] , identifier[root_module_names] ): literal[string] identifier[vendor_module_names] ={ identifier[root_module_name] : keyword[False] keyword[for] identifier[root_module_name] keyword[in] identifier[root_module_names] } keyword[for] identifier[spec] keyword[in] identifier[iter_vendor_specs] (): keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[spec] . identifier[target_dir] ): keyword[if] identifier[root] == identifier[spec] . identifier[target_dir] : identifier[dirs] [:]=[ identifier[pkg_name] keyword[for] identifier[pkg_name] keyword[in] identifier[dirs] keyword[if] identifier[pkg_name] keyword[in] identifier[vendor_module_names] ] identifier[files] [:]=[ identifier[mod_name] keyword[for] identifier[mod_name] keyword[in] identifier[files] keyword[if] identifier[mod_name] [:- literal[int] ] keyword[in] identifier[vendor_module_names] ] identifier[vendored_names] = identifier[dirs] + identifier[files] keyword[if] identifier[vendored_names] : identifier[pkg_path] = literal[string] keyword[for] identifier[pkg] keyword[in] identifier[spec] . identifier[relpath] . identifier[split] ( identifier[os] . identifier[sep] ): identifier[pkg_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[pkg_path] , identifier[pkg] ) identifier[pkg_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[pkg_path] , literal[string] ) identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[VendorSpec] . identifier[ROOT] , identifier[pkg_file] ) identifier[dest] = identifier[os] . identifier[path] . identifier[join] ( identifier[dest_basedir] , identifier[pkg_file] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[src] ): identifier[chroot] . identifier[copy] ( identifier[src] , identifier[dest] , identifier[label] ) keyword[else] : identifier[chroot] . identifier[touch] ( identifier[dest] , identifier[label] ) keyword[for] identifier[name] keyword[in] identifier[vendored_names] : identifier[vendor_module_names] [ identifier[name] ]= keyword[True] identifier[TRACER] . identifier[log] ( literal[string] . identifier[format] ( identifier[name] , identifier[spec] , identifier[spec] . identifier[target_dir] ), identifier[V] = literal[int] ) keyword[for] identifier[filename] keyword[in] identifier[files] : keyword[if] keyword[not] identifier[filename] . identifier[endswith] ( literal[string] ): identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[filename] ) identifier[dest] = identifier[os] . identifier[path] . identifier[join] ( identifier[dest_basedir] , identifier[spec] . identifier[relpath] , identifier[os] . identifier[path] . identifier[relpath] ( identifier[src] , identifier[spec] . identifier[target_dir] )) identifier[chroot] . identifier[copy] ( identifier[src] , identifier[dest] , identifier[label] ) keyword[if] keyword[not] identifier[all] ( identifier[vendor_module_names] . identifier[values] ()): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[module_names] = literal[string] . identifier[join] ( identifier[module] keyword[for] identifier[module] , identifier[written] keyword[in] identifier[vendor_module_names] . identifier[items] () keyword[if] keyword[not] identifier[written] ), identifier[specs] = literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[spec] , identifier[spec] . identifier[target_dir] ) keyword[for] identifier[spec] keyword[in] identifier[iter_vendor_specs] ())))
def vendor_runtime(chroot, dest_basedir, label, root_module_names): """Includes portions of vendored distributions in a chroot. The portion to include is selected by root module name. If the module is a file, just it is included. If the module represents a package, the package and all its sub-packages are added recursively. :param chroot: The chroot to add vendored code to. :type chroot: :class:`pex.common.Chroot` :param str dest_basedir: The prefix to store the vendored code under in the ``chroot``. :param str label: The chroot label for the vendored code fileset. :param root_module_names: The names of the root vendored modules to include in the chroot. :type root_module_names: :class:`collections.Iterable` of str :raise: :class:`ValueError` if any of the given ``root_module_names`` could not be found amongst the vendored code and added to the chroot. """ vendor_module_names = {root_module_name: False for root_module_name in root_module_names} for spec in iter_vendor_specs(): for (root, dirs, files) in os.walk(spec.target_dir): if root == spec.target_dir: dirs[:] = [pkg_name for pkg_name in dirs if pkg_name in vendor_module_names] files[:] = [mod_name for mod_name in files if mod_name[:-3] in vendor_module_names] vendored_names = dirs + files if vendored_names: pkg_path = '' for pkg in spec.relpath.split(os.sep): pkg_path = os.path.join(pkg_path, pkg) pkg_file = os.path.join(pkg_path, '__init__.py') src = os.path.join(VendorSpec.ROOT, pkg_file) dest = os.path.join(dest_basedir, pkg_file) if os.path.exists(src): chroot.copy(src, dest, label) # depends on [control=['if'], data=[]] else: # We delete `pex/vendor/_vendored/<dist>/__init__.py` when isolating third_party. chroot.touch(dest, label) # depends on [control=['for'], data=['pkg']] for name in vendored_names: vendor_module_names[name] = True TRACER.log('Vendoring {} from {} @ {}'.format(name, spec, spec.target_dir), V=3) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] for filename in files: if not filename.endswith('.pyc'): # Sources and data only. src = os.path.join(root, filename) dest = os.path.join(dest_basedir, spec.relpath, os.path.relpath(src, spec.target_dir)) chroot.copy(src, dest, label) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['spec']] if not all(vendor_module_names.values()): raise ValueError('Failed to extract {module_names} from:\n\t{specs}'.format(module_names=', '.join((module for (module, written) in vendor_module_names.items() if not written)), specs='\n\t'.join(('{} @ {}'.format(spec, spec.target_dir) for spec in iter_vendor_specs())))) # depends on [control=['if'], data=[]]
def get_task_filelist(cls, task_factory, courseid, taskid): """ Returns a flattened version of all the files inside the task directory, excluding the files task.* and hidden files. It returns a list of tuples, of the type (Integer Level, Boolean IsDirectory, String Name, String CompleteName) """ task_fs = task_factory.get_task_fs(courseid, taskid) if not task_fs.exists(): return [] tmp_out = {} entries = task_fs.list(True, True, True) for entry in entries: if os.path.splitext(entry)[0] == "task" and os.path.splitext(entry)[1][1:] in task_factory.get_available_task_file_extensions(): continue data = entry.split("/") is_directory = False if data[-1] == "": is_directory = True data = data[0:len(data)-1] cur_pos = 0 tree_pos = tmp_out while cur_pos != len(data): if data[cur_pos] not in tree_pos: tree_pos[data[cur_pos]] = {} if is_directory or cur_pos != len(data) - 1 else None tree_pos = tree_pos[data[cur_pos]] cur_pos += 1 def recur_print(current, level, current_name): iteritems = sorted(current.items()) # First, the files recur_print.flattened += [(level, False, f, current_name+"/"+f) for f, t in iteritems if t is None] # Then, the dirs for name, sub in iteritems: if sub is not None: recur_print.flattened.append((level, True, name, current_name+"/"+name+"/")) recur_print(sub, level + 1, current_name + "/" + name) recur_print.flattened = [] recur_print(tmp_out, 0, '') return recur_print.flattened
def function[get_task_filelist, parameter[cls, task_factory, courseid, taskid]]: constant[ Returns a flattened version of all the files inside the task directory, excluding the files task.* and hidden files. It returns a list of tuples, of the type (Integer Level, Boolean IsDirectory, String Name, String CompleteName) ] variable[task_fs] assign[=] call[name[task_factory].get_task_fs, parameter[name[courseid], name[taskid]]] if <ast.UnaryOp object at 0x7da204963e50> begin[:] return[list[[]]] variable[tmp_out] assign[=] dictionary[[], []] variable[entries] assign[=] call[name[task_fs].list, parameter[constant[True], constant[True], constant[True]]] for taget[name[entry]] in starred[name[entries]] begin[:] if <ast.BoolOp object at 0x7da204962500> begin[:] continue variable[data] assign[=] call[name[entry].split, parameter[constant[/]]] variable[is_directory] assign[=] constant[False] if compare[call[name[data]][<ast.UnaryOp object at 0x7da204960640>] equal[==] constant[]] begin[:] variable[is_directory] assign[=] constant[True] variable[data] assign[=] call[name[data]][<ast.Slice object at 0x7da204960790>] variable[cur_pos] assign[=] constant[0] variable[tree_pos] assign[=] name[tmp_out] while compare[name[cur_pos] not_equal[!=] call[name[len], parameter[name[data]]]] begin[:] if compare[call[name[data]][name[cur_pos]] <ast.NotIn object at 0x7da2590d7190> name[tree_pos]] begin[:] call[name[tree_pos]][call[name[data]][name[cur_pos]]] assign[=] <ast.IfExp object at 0x7da204961810> variable[tree_pos] assign[=] call[name[tree_pos]][call[name[data]][name[cur_pos]]] <ast.AugAssign object at 0x7da204620250> def function[recur_print, parameter[current, level, current_name]]: variable[iteritems] assign[=] call[name[sorted], parameter[call[name[current].items, parameter[]]]] <ast.AugAssign object at 0x7da204620700> for taget[tuple[[<ast.Name object at 0x7da204621750>, <ast.Name object at 0x7da204622b00>]]] in starred[name[iteritems]] begin[:] if compare[name[sub] is_not constant[None]] begin[:] call[name[recur_print].flattened.append, parameter[tuple[[<ast.Name object at 0x7da204623bb0>, <ast.Constant object at 0x7da204620100>, <ast.Name object at 0x7da204622fe0>, <ast.BinOp object at 0x7da204623fd0>]]]] call[name[recur_print], parameter[name[sub], binary_operation[name[level] + constant[1]], binary_operation[binary_operation[name[current_name] + constant[/]] + name[name]]]] name[recur_print].flattened assign[=] list[[]] call[name[recur_print], parameter[name[tmp_out], constant[0], constant[]]] return[name[recur_print].flattened]
keyword[def] identifier[get_task_filelist] ( identifier[cls] , identifier[task_factory] , identifier[courseid] , identifier[taskid] ): literal[string] identifier[task_fs] = identifier[task_factory] . identifier[get_task_fs] ( identifier[courseid] , identifier[taskid] ) keyword[if] keyword[not] identifier[task_fs] . identifier[exists] (): keyword[return] [] identifier[tmp_out] ={} identifier[entries] = identifier[task_fs] . identifier[list] ( keyword[True] , keyword[True] , keyword[True] ) keyword[for] identifier[entry] keyword[in] identifier[entries] : keyword[if] identifier[os] . identifier[path] . identifier[splitext] ( identifier[entry] )[ literal[int] ]== literal[string] keyword[and] identifier[os] . identifier[path] . identifier[splitext] ( identifier[entry] )[ literal[int] ][ literal[int] :] keyword[in] identifier[task_factory] . identifier[get_available_task_file_extensions] (): keyword[continue] identifier[data] = identifier[entry] . identifier[split] ( literal[string] ) identifier[is_directory] = keyword[False] keyword[if] identifier[data] [- literal[int] ]== literal[string] : identifier[is_directory] = keyword[True] identifier[data] = identifier[data] [ literal[int] : identifier[len] ( identifier[data] )- literal[int] ] identifier[cur_pos] = literal[int] identifier[tree_pos] = identifier[tmp_out] keyword[while] identifier[cur_pos] != identifier[len] ( identifier[data] ): keyword[if] identifier[data] [ identifier[cur_pos] ] keyword[not] keyword[in] identifier[tree_pos] : identifier[tree_pos] [ identifier[data] [ identifier[cur_pos] ]]={} keyword[if] identifier[is_directory] keyword[or] identifier[cur_pos] != identifier[len] ( identifier[data] )- literal[int] keyword[else] keyword[None] identifier[tree_pos] = identifier[tree_pos] [ identifier[data] [ identifier[cur_pos] ]] identifier[cur_pos] += literal[int] keyword[def] identifier[recur_print] ( identifier[current] , identifier[level] , identifier[current_name] ): identifier[iteritems] = identifier[sorted] ( identifier[current] . identifier[items] ()) identifier[recur_print] . identifier[flattened] +=[( identifier[level] , keyword[False] , identifier[f] , identifier[current_name] + literal[string] + identifier[f] ) keyword[for] identifier[f] , identifier[t] keyword[in] identifier[iteritems] keyword[if] identifier[t] keyword[is] keyword[None] ] keyword[for] identifier[name] , identifier[sub] keyword[in] identifier[iteritems] : keyword[if] identifier[sub] keyword[is] keyword[not] keyword[None] : identifier[recur_print] . identifier[flattened] . identifier[append] (( identifier[level] , keyword[True] , identifier[name] , identifier[current_name] + literal[string] + identifier[name] + literal[string] )) identifier[recur_print] ( identifier[sub] , identifier[level] + literal[int] , identifier[current_name] + literal[string] + identifier[name] ) identifier[recur_print] . identifier[flattened] =[] identifier[recur_print] ( identifier[tmp_out] , literal[int] , literal[string] ) keyword[return] identifier[recur_print] . identifier[flattened]
def get_task_filelist(cls, task_factory, courseid, taskid): """ Returns a flattened version of all the files inside the task directory, excluding the files task.* and hidden files. It returns a list of tuples, of the type (Integer Level, Boolean IsDirectory, String Name, String CompleteName) """ task_fs = task_factory.get_task_fs(courseid, taskid) if not task_fs.exists(): return [] # depends on [control=['if'], data=[]] tmp_out = {} entries = task_fs.list(True, True, True) for entry in entries: if os.path.splitext(entry)[0] == 'task' and os.path.splitext(entry)[1][1:] in task_factory.get_available_task_file_extensions(): continue # depends on [control=['if'], data=[]] data = entry.split('/') is_directory = False if data[-1] == '': is_directory = True data = data[0:len(data) - 1] # depends on [control=['if'], data=[]] cur_pos = 0 tree_pos = tmp_out while cur_pos != len(data): if data[cur_pos] not in tree_pos: tree_pos[data[cur_pos]] = {} if is_directory or cur_pos != len(data) - 1 else None # depends on [control=['if'], data=['tree_pos']] tree_pos = tree_pos[data[cur_pos]] cur_pos += 1 # depends on [control=['while'], data=['cur_pos']] # depends on [control=['for'], data=['entry']] def recur_print(current, level, current_name): iteritems = sorted(current.items()) # First, the files recur_print.flattened += [(level, False, f, current_name + '/' + f) for (f, t) in iteritems if t is None] # Then, the dirs for (name, sub) in iteritems: if sub is not None: recur_print.flattened.append((level, True, name, current_name + '/' + name + '/')) recur_print(sub, level + 1, current_name + '/' + name) # depends on [control=['if'], data=['sub']] # depends on [control=['for'], data=[]] recur_print.flattened = [] recur_print(tmp_out, 0, '') return recur_print.flattened
def object_deserializer(obj): """Helper to deserialize a raw result dict into a proper dict. :param obj: The dict. """ for key, val in obj.items(): if isinstance(val, six.string_types) and DATETIME_REGEX.search(val): try: obj[key] = dates.localize_datetime(parser.parse(val)) except ValueError: obj[key] = val return obj
def function[object_deserializer, parameter[obj]]: constant[Helper to deserialize a raw result dict into a proper dict. :param obj: The dict. ] for taget[tuple[[<ast.Name object at 0x7da1b0ca6c80>, <ast.Name object at 0x7da1b0ca63e0>]]] in starred[call[name[obj].items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b0ca6560> begin[:] <ast.Try object at 0x7da1b0ca5930> return[name[obj]]
keyword[def] identifier[object_deserializer] ( identifier[obj] ): literal[string] keyword[for] identifier[key] , identifier[val] keyword[in] identifier[obj] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[string_types] ) keyword[and] identifier[DATETIME_REGEX] . identifier[search] ( identifier[val] ): keyword[try] : identifier[obj] [ identifier[key] ]= identifier[dates] . identifier[localize_datetime] ( identifier[parser] . identifier[parse] ( identifier[val] )) keyword[except] identifier[ValueError] : identifier[obj] [ identifier[key] ]= identifier[val] keyword[return] identifier[obj]
def object_deserializer(obj): """Helper to deserialize a raw result dict into a proper dict. :param obj: The dict. """ for (key, val) in obj.items(): if isinstance(val, six.string_types) and DATETIME_REGEX.search(val): try: obj[key] = dates.localize_datetime(parser.parse(val)) # depends on [control=['try'], data=[]] except ValueError: obj[key] = val # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return obj
async def kick(self, user_id: base.Integer, until_date: typing.Union[base.Integer, None] = None): """ Use this method to kick a user from a group, a supergroup or a channel. In the case of supergroups and channels, the user will not be able to return to the group on their own using invite links, etc., unless unbanned first. The bot must be an administrator in the chat for this to work and must have the appropriate admin rights. Note: In regular groups (non-supergroups), this method will only work if the ‘All Members Are Admins’ setting is off in the target group. Otherwise members may only be removed by the group's creator or by the member that added them. Source: https://core.telegram.org/bots/api#kickchatmember :param user_id: Unique identifier of the target user :type user_id: :obj:`base.Integer` :param until_date: Date when the user will be unbanned, unix time. :type until_date: :obj:`typing.Union[base.Integer, None]` :return: Returns True on success. :rtype: :obj:`base.Boolean` """ return await self.bot.kick_chat_member(self.id, user_id=user_id, until_date=until_date)
<ast.AsyncFunctionDef object at 0x7da1b1782bc0>
keyword[async] keyword[def] identifier[kick] ( identifier[self] , identifier[user_id] : identifier[base] . identifier[Integer] , identifier[until_date] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[bot] . identifier[kick_chat_member] ( identifier[self] . identifier[id] , identifier[user_id] = identifier[user_id] , identifier[until_date] = identifier[until_date] )
async def kick(self, user_id: base.Integer, until_date: typing.Union[base.Integer, None]=None): """ Use this method to kick a user from a group, a supergroup or a channel. In the case of supergroups and channels, the user will not be able to return to the group on their own using invite links, etc., unless unbanned first. The bot must be an administrator in the chat for this to work and must have the appropriate admin rights. Note: In regular groups (non-supergroups), this method will only work if the ‘All Members Are Admins’ setting is off in the target group. Otherwise members may only be removed by the group's creator or by the member that added them. Source: https://core.telegram.org/bots/api#kickchatmember :param user_id: Unique identifier of the target user :type user_id: :obj:`base.Integer` :param until_date: Date when the user will be unbanned, unix time. :type until_date: :obj:`typing.Union[base.Integer, None]` :return: Returns True on success. :rtype: :obj:`base.Boolean` """ return await self.bot.kick_chat_member(self.id, user_id=user_id, until_date=until_date)
def set_row_gap(self, value): """Sets the gap value between rows Args: value (int or str): gap value (i.e. 10 or "10px") """ value = str(value) + 'px' value = value.replace('pxpx', 'px') self.style['grid-row-gap'] = value
def function[set_row_gap, parameter[self, value]]: constant[Sets the gap value between rows Args: value (int or str): gap value (i.e. 10 or "10px") ] variable[value] assign[=] binary_operation[call[name[str], parameter[name[value]]] + constant[px]] variable[value] assign[=] call[name[value].replace, parameter[constant[pxpx], constant[px]]] call[name[self].style][constant[grid-row-gap]] assign[=] name[value]
keyword[def] identifier[set_row_gap] ( identifier[self] , identifier[value] ): literal[string] identifier[value] = identifier[str] ( identifier[value] )+ literal[string] identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] ) identifier[self] . identifier[style] [ literal[string] ]= identifier[value]
def set_row_gap(self, value): """Sets the gap value between rows Args: value (int or str): gap value (i.e. 10 or "10px") """ value = str(value) + 'px' value = value.replace('pxpx', 'px') self.style['grid-row-gap'] = value
def export(self, out_filename): """Export desired threads as a zipfile to out_filename. """ with zipfile.ZipFile(out_filename, 'w', zipfile.ZIP_DEFLATED) as arc: id_list = list(self.get_thread_info()) for num, my_info in enumerate(id_list): logging.info('Working on item %i : %s', num, my_info['number']) my_thread = GitHubCommentThread( self.gh_info.owner, self.gh_info.realm, my_info['title'], self.gh_info.user, self.gh_info.token, thread_id=my_info['number']) csec = my_thread.get_comment_section() cdict = [item.to_dict() for item in csec.comments] my_json = json.dumps(cdict) arc.writestr('%i__%s' % (my_info['number'], my_info['title']), my_json)
def function[export, parameter[self, out_filename]]: constant[Export desired threads as a zipfile to out_filename. ] with call[name[zipfile].ZipFile, parameter[name[out_filename], constant[w], name[zipfile].ZIP_DEFLATED]] begin[:] variable[id_list] assign[=] call[name[list], parameter[call[name[self].get_thread_info, parameter[]]]] for taget[tuple[[<ast.Name object at 0x7da204961150>, <ast.Name object at 0x7da204960610>]]] in starred[call[name[enumerate], parameter[name[id_list]]]] begin[:] call[name[logging].info, parameter[constant[Working on item %i : %s], name[num], call[name[my_info]][constant[number]]]] variable[my_thread] assign[=] call[name[GitHubCommentThread], parameter[name[self].gh_info.owner, name[self].gh_info.realm, call[name[my_info]][constant[title]], name[self].gh_info.user, name[self].gh_info.token]] variable[csec] assign[=] call[name[my_thread].get_comment_section, parameter[]] variable[cdict] assign[=] <ast.ListComp object at 0x7da2049608b0> variable[my_json] assign[=] call[name[json].dumps, parameter[name[cdict]]] call[name[arc].writestr, parameter[binary_operation[constant[%i__%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da204963af0>, <ast.Subscript object at 0x7da204961c30>]]], name[my_json]]]
keyword[def] identifier[export] ( identifier[self] , identifier[out_filename] ): literal[string] keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[out_filename] , literal[string] , identifier[zipfile] . identifier[ZIP_DEFLATED] ) keyword[as] identifier[arc] : identifier[id_list] = identifier[list] ( identifier[self] . identifier[get_thread_info] ()) keyword[for] identifier[num] , identifier[my_info] keyword[in] identifier[enumerate] ( identifier[id_list] ): identifier[logging] . identifier[info] ( literal[string] , identifier[num] , identifier[my_info] [ literal[string] ]) identifier[my_thread] = identifier[GitHubCommentThread] ( identifier[self] . identifier[gh_info] . identifier[owner] , identifier[self] . identifier[gh_info] . identifier[realm] , identifier[my_info] [ literal[string] ], identifier[self] . identifier[gh_info] . identifier[user] , identifier[self] . identifier[gh_info] . identifier[token] , identifier[thread_id] = identifier[my_info] [ literal[string] ]) identifier[csec] = identifier[my_thread] . identifier[get_comment_section] () identifier[cdict] =[ identifier[item] . identifier[to_dict] () keyword[for] identifier[item] keyword[in] identifier[csec] . identifier[comments] ] identifier[my_json] = identifier[json] . identifier[dumps] ( identifier[cdict] ) identifier[arc] . identifier[writestr] ( literal[string] %( identifier[my_info] [ literal[string] ], identifier[my_info] [ literal[string] ]), identifier[my_json] )
def export(self, out_filename): """Export desired threads as a zipfile to out_filename. """ with zipfile.ZipFile(out_filename, 'w', zipfile.ZIP_DEFLATED) as arc: id_list = list(self.get_thread_info()) for (num, my_info) in enumerate(id_list): logging.info('Working on item %i : %s', num, my_info['number']) my_thread = GitHubCommentThread(self.gh_info.owner, self.gh_info.realm, my_info['title'], self.gh_info.user, self.gh_info.token, thread_id=my_info['number']) csec = my_thread.get_comment_section() cdict = [item.to_dict() for item in csec.comments] my_json = json.dumps(cdict) arc.writestr('%i__%s' % (my_info['number'], my_info['title']), my_json) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['arc']]
def _encode(self, value, path_from_root): """Normalize, compress, and encode sub-objects for backend storage. value: Object to encode. path_from_root: `tuple` of key strings from the top-level summary to the current `value`. Returns: A new tree of dict's with large objects replaced with dictionaries with "_type" entries that say which type the original data was. """ # Constructs a new `dict` tree in `json_value` that discards and/or # encodes objects that aren't JSON serializable. if isinstance(value, dict): json_value = {} for key, value in six.iteritems(value): json_value[key] = self._encode(value, path_from_root + (key,)) return json_value else: path = ".".join(path_from_root) if util.is_pandas_data_frame(value): return util.encode_data_frame(path, value, self._run) else: friendly_value, converted = util.json_friendly(data_types.val_to_json(path, value)) json_value, compressed = util.maybe_compress_summary(friendly_value, util.get_h5_typename(value)) if compressed: self.write_h5(path_from_root, friendly_value) return json_value """ if isinstance(value, dict): json_child[key], converted = util.json_friendly( self._encode(value, path_from_root + [key])) else: """
def function[_encode, parameter[self, value, path_from_root]]: constant[Normalize, compress, and encode sub-objects for backend storage. value: Object to encode. path_from_root: `tuple` of key strings from the top-level summary to the current `value`. Returns: A new tree of dict's with large objects replaced with dictionaries with "_type" entries that say which type the original data was. ] if call[name[isinstance], parameter[name[value], name[dict]]] begin[:] variable[json_value] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b08df520>, <ast.Name object at 0x7da1b08dff70>]]] in starred[call[name[six].iteritems, parameter[name[value]]]] begin[:] call[name[json_value]][name[key]] assign[=] call[name[self]._encode, parameter[name[value], binary_operation[name[path_from_root] + tuple[[<ast.Name object at 0x7da1b08de830>]]]]] return[name[json_value]] constant[ if isinstance(value, dict): json_child[key], converted = util.json_friendly( self._encode(value, path_from_root + [key])) else: ]
keyword[def] identifier[_encode] ( identifier[self] , identifier[value] , identifier[path_from_root] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ): identifier[json_value] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[value] ): identifier[json_value] [ identifier[key] ]= identifier[self] . identifier[_encode] ( identifier[value] , identifier[path_from_root] +( identifier[key] ,)) keyword[return] identifier[json_value] keyword[else] : identifier[path] = literal[string] . identifier[join] ( identifier[path_from_root] ) keyword[if] identifier[util] . identifier[is_pandas_data_frame] ( identifier[value] ): keyword[return] identifier[util] . identifier[encode_data_frame] ( identifier[path] , identifier[value] , identifier[self] . identifier[_run] ) keyword[else] : identifier[friendly_value] , identifier[converted] = identifier[util] . identifier[json_friendly] ( identifier[data_types] . identifier[val_to_json] ( identifier[path] , identifier[value] )) identifier[json_value] , identifier[compressed] = identifier[util] . identifier[maybe_compress_summary] ( identifier[friendly_value] , identifier[util] . identifier[get_h5_typename] ( identifier[value] )) keyword[if] identifier[compressed] : identifier[self] . identifier[write_h5] ( identifier[path_from_root] , identifier[friendly_value] ) keyword[return] identifier[json_value] literal[string]
def _encode(self, value, path_from_root): """Normalize, compress, and encode sub-objects for backend storage. value: Object to encode. path_from_root: `tuple` of key strings from the top-level summary to the current `value`. Returns: A new tree of dict's with large objects replaced with dictionaries with "_type" entries that say which type the original data was. """ # Constructs a new `dict` tree in `json_value` that discards and/or # encodes objects that aren't JSON serializable. if isinstance(value, dict): json_value = {} for (key, value) in six.iteritems(value): json_value[key] = self._encode(value, path_from_root + (key,)) # depends on [control=['for'], data=[]] return json_value # depends on [control=['if'], data=[]] else: path = '.'.join(path_from_root) if util.is_pandas_data_frame(value): return util.encode_data_frame(path, value, self._run) # depends on [control=['if'], data=[]] else: (friendly_value, converted) = util.json_friendly(data_types.val_to_json(path, value)) (json_value, compressed) = util.maybe_compress_summary(friendly_value, util.get_h5_typename(value)) if compressed: self.write_h5(path_from_root, friendly_value) # depends on [control=['if'], data=[]] return json_value '\n if isinstance(value, dict):\n json_child[key], converted = util.json_friendly(\n self._encode(value, path_from_root + [key]))\n else:\n '